2018-04-29 07:20:32 +02:00
|
|
|
use std::{io, mem};
|
|
|
|
|
|
|
|
use bytes::{Bytes, BytesMut};
|
|
|
|
use futures::{Async, Poll};
|
|
|
|
use httparse;
|
|
|
|
|
2018-06-25 06:58:04 +02:00
|
|
|
use super::message::{MessageFlags, Request};
|
2018-04-29 07:20:32 +02:00
|
|
|
use super::settings::WorkerSettings;
|
|
|
|
use error::ParseError;
|
|
|
|
use http::header::{HeaderName, HeaderValue};
|
|
|
|
use http::{header, HttpTryFrom, Method, Uri, Version};
|
|
|
|
use uri::Url;
|
|
|
|
|
|
|
|
const MAX_BUFFER_SIZE: usize = 131_072;
|
|
|
|
const MAX_HEADERS: usize = 96;
|
|
|
|
|
|
|
|
pub(crate) struct H1Decoder {
|
|
|
|
decoder: Option<EncodingDecoder>,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) enum Message {
|
2018-06-25 06:58:04 +02:00
|
|
|
Message { msg: Request, payload: bool },
|
2018-04-29 07:20:32 +02:00
|
|
|
Chunk(Bytes),
|
|
|
|
Eof,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub(crate) enum DecoderError {
|
|
|
|
Io(io::Error),
|
|
|
|
Error(ParseError),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<io::Error> for DecoderError {
|
|
|
|
fn from(err: io::Error) -> DecoderError {
|
|
|
|
DecoderError::Io(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl H1Decoder {
|
|
|
|
pub fn new() -> H1Decoder {
|
2018-04-29 18:09:08 +02:00
|
|
|
H1Decoder { decoder: None }
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn decode<H>(
|
2018-05-17 21:20:20 +02:00
|
|
|
&mut self, src: &mut BytesMut, settings: &WorkerSettings<H>,
|
2018-04-29 07:20:32 +02:00
|
|
|
) -> Result<Option<Message>, DecoderError> {
|
|
|
|
// read payload
|
|
|
|
if self.decoder.is_some() {
|
|
|
|
match self.decoder.as_mut().unwrap().decode(src)? {
|
|
|
|
Async::Ready(Some(bytes)) => return Ok(Some(Message::Chunk(bytes))),
|
|
|
|
Async::Ready(None) => {
|
|
|
|
self.decoder.take();
|
|
|
|
return Ok(Some(Message::Eof));
|
|
|
|
}
|
|
|
|
Async::NotReady => return Ok(None),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-17 21:20:20 +02:00
|
|
|
match self
|
|
|
|
.parse_message(src, settings)
|
2018-04-29 18:09:08 +02:00
|
|
|
.map_err(DecoderError::Error)?
|
|
|
|
{
|
2018-04-29 07:20:32 +02:00
|
|
|
Async::Ready((msg, decoder)) => {
|
2018-05-17 19:58:08 +02:00
|
|
|
self.decoder = decoder;
|
|
|
|
Ok(Some(Message::Message {
|
|
|
|
msg,
|
|
|
|
payload: self.decoder.is_some(),
|
|
|
|
}))
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
Async::NotReady => {
|
|
|
|
if src.len() >= MAX_BUFFER_SIZE {
|
|
|
|
error!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
|
|
|
Err(DecoderError::Error(ParseError::TooLarge))
|
|
|
|
} else {
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_message<H>(
|
2018-05-17 21:20:20 +02:00
|
|
|
&self, buf: &mut BytesMut, settings: &WorkerSettings<H>,
|
2018-06-25 06:58:04 +02:00
|
|
|
) -> Poll<(Request, Option<EncodingDecoder>), ParseError> {
|
2018-04-29 07:20:32 +02:00
|
|
|
// Parse http message
|
|
|
|
let mut has_upgrade = false;
|
|
|
|
let mut chunked = false;
|
|
|
|
let mut content_length = None;
|
|
|
|
|
|
|
|
let msg = {
|
2018-06-23 08:28:55 +02:00
|
|
|
// Unsafe: we read only this data only after httparse parses headers into.
|
|
|
|
// performance bump for pipeline benchmarks.
|
|
|
|
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
2018-04-29 07:20:32 +02:00
|
|
|
unsafe { mem::uninitialized() };
|
|
|
|
|
|
|
|
let (len, method, path, version, headers_len) = {
|
2018-06-23 08:28:55 +02:00
|
|
|
let mut parsed: [httparse::Header; MAX_HEADERS] =
|
|
|
|
unsafe { mem::uninitialized() };
|
|
|
|
|
|
|
|
let mut req = httparse::Request::new(&mut parsed);
|
|
|
|
match req.parse(buf)? {
|
2018-04-29 07:20:32 +02:00
|
|
|
httparse::Status::Complete(len) => {
|
|
|
|
let method = Method::from_bytes(req.method.unwrap().as_bytes())
|
|
|
|
.map_err(|_| ParseError::Method)?;
|
|
|
|
let path = Url::new(Uri::try_from(req.path.unwrap())?);
|
|
|
|
let version = if req.version.unwrap() == 1 {
|
|
|
|
Version::HTTP_11
|
|
|
|
} else {
|
|
|
|
Version::HTTP_10
|
|
|
|
};
|
2018-06-23 08:28:55 +02:00
|
|
|
HeaderIndex::record(buf, req.headers, &mut headers);
|
|
|
|
|
2018-04-29 07:20:32 +02:00
|
|
|
(len, method, path, version, req.headers.len())
|
|
|
|
}
|
|
|
|
httparse::Status::Partial => return Ok(Async::NotReady),
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let slice = buf.split_to(len).freeze();
|
|
|
|
|
|
|
|
// convert headers
|
2018-06-25 06:58:04 +02:00
|
|
|
let mut msg = settings.get_request_context();
|
2018-04-29 07:20:32 +02:00
|
|
|
{
|
2018-06-25 06:58:04 +02:00
|
|
|
let inner = &mut msg.inner;
|
|
|
|
inner
|
2018-05-01 07:04:24 +02:00
|
|
|
.flags
|
2018-06-25 06:58:04 +02:00
|
|
|
.get_mut()
|
2018-05-01 07:04:24 +02:00
|
|
|
.set(MessageFlags::KEEPALIVE, version != Version::HTTP_10);
|
2018-04-29 07:20:32 +02:00
|
|
|
|
2018-06-23 08:28:55 +02:00
|
|
|
for idx in headers[..headers_len].iter() {
|
|
|
|
if let Ok(name) =
|
|
|
|
HeaderName::from_bytes(&slice[idx.name.0..idx.name.1])
|
|
|
|
{
|
2018-04-29 07:20:32 +02:00
|
|
|
has_upgrade = has_upgrade || name == header::UPGRADE;
|
2018-06-23 08:28:55 +02:00
|
|
|
// Unsafe: httparse check header value for valid utf-8
|
2018-04-29 07:20:32 +02:00
|
|
|
let value = unsafe {
|
|
|
|
HeaderValue::from_shared_unchecked(
|
2018-06-23 08:28:55 +02:00
|
|
|
slice.slice(idx.value.0, idx.value.1),
|
2018-04-29 07:20:32 +02:00
|
|
|
)
|
|
|
|
};
|
|
|
|
match name {
|
|
|
|
header::CONTENT_LENGTH => {
|
|
|
|
if let Ok(s) = value.to_str() {
|
|
|
|
if let Ok(len) = s.parse::<u64>() {
|
2018-05-17 19:58:08 +02:00
|
|
|
content_length = Some(len);
|
2018-04-29 07:20:32 +02:00
|
|
|
} else {
|
|
|
|
debug!("illegal Content-Length: {:?}", len);
|
|
|
|
return Err(ParseError::Header);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
debug!("illegal Content-Length: {:?}", len);
|
|
|
|
return Err(ParseError::Header);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// transfer-encoding
|
|
|
|
header::TRANSFER_ENCODING => {
|
|
|
|
if let Ok(s) = value.to_str() {
|
|
|
|
chunked = s.to_lowercase().contains("chunked");
|
|
|
|
} else {
|
|
|
|
return Err(ParseError::Header);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// connection keep-alive state
|
|
|
|
header::CONNECTION => {
|
2018-05-01 07:04:24 +02:00
|
|
|
let ka = if let Ok(conn) = value.to_str() {
|
2018-04-29 07:20:32 +02:00
|
|
|
if version == Version::HTTP_10
|
|
|
|
&& conn.contains("keep-alive")
|
|
|
|
{
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
version == Version::HTTP_11
|
|
|
|
&& !(conn.contains("close")
|
|
|
|
|| conn.contains("upgrade"))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
2018-06-25 06:58:04 +02:00
|
|
|
inner.flags.get_mut().set(MessageFlags::KEEPALIVE, ka);
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
|
2018-06-25 06:58:04 +02:00
|
|
|
inner.headers.append(name, value);
|
2018-04-29 07:20:32 +02:00
|
|
|
} else {
|
|
|
|
return Err(ParseError::Header);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-25 06:58:04 +02:00
|
|
|
inner.url = path;
|
|
|
|
inner.method = method;
|
|
|
|
inner.version = version;
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
msg
|
|
|
|
};
|
|
|
|
|
|
|
|
// https://tools.ietf.org/html/rfc7230#section-3.3.3
|
|
|
|
let decoder = if chunked {
|
|
|
|
// Chunked encoding
|
|
|
|
Some(EncodingDecoder::chunked())
|
|
|
|
} else if let Some(len) = content_length {
|
|
|
|
// Content-Length
|
|
|
|
Some(EncodingDecoder::length(len))
|
2018-06-25 06:58:04 +02:00
|
|
|
} else if has_upgrade || msg.inner.method == Method::CONNECT {
|
2018-04-29 07:20:32 +02:00
|
|
|
// upgrade(websocket) or connect
|
|
|
|
Some(EncodingDecoder::eof())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(Async::Ready((msg, decoder)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-23 08:28:55 +02:00
|
|
|
#[derive(Clone, Copy)]
|
|
|
|
pub(crate) struct HeaderIndex {
|
|
|
|
pub(crate) name: (usize, usize),
|
|
|
|
pub(crate) value: (usize, usize),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl HeaderIndex {
|
|
|
|
pub(crate) fn record(
|
|
|
|
bytes: &[u8], headers: &[httparse::Header], indices: &mut [HeaderIndex],
|
|
|
|
) {
|
|
|
|
let bytes_ptr = bytes.as_ptr() as usize;
|
|
|
|
for (header, indices) in headers.iter().zip(indices.iter_mut()) {
|
|
|
|
let name_start = header.name.as_ptr() as usize - bytes_ptr;
|
|
|
|
let name_end = name_start + header.name.len();
|
|
|
|
indices.name = (name_start, name_end);
|
|
|
|
let value_start = header.value.as_ptr() as usize - bytes_ptr;
|
|
|
|
let value_end = value_start + header.value.len();
|
|
|
|
indices.value = (value_start, value_end);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-29 07:20:32 +02:00
|
|
|
/// Decoders to handle different Transfer-Encodings.
|
|
|
|
///
|
|
|
|
/// If a message body does not include a Transfer-Encoding, it *should*
|
|
|
|
/// include a Content-Length header.
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
pub struct EncodingDecoder {
|
|
|
|
kind: Kind,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl EncodingDecoder {
|
|
|
|
pub fn length(x: u64) -> EncodingDecoder {
|
|
|
|
EncodingDecoder {
|
|
|
|
kind: Kind::Length(x),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn chunked() -> EncodingDecoder {
|
|
|
|
EncodingDecoder {
|
|
|
|
kind: Kind::Chunked(ChunkedState::Size, 0),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn eof() -> EncodingDecoder {
|
|
|
|
EncodingDecoder {
|
|
|
|
kind: Kind::Eof(false),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
enum Kind {
|
|
|
|
/// A Reader used when a Content-Length header is passed with a positive
|
|
|
|
/// integer.
|
|
|
|
Length(u64),
|
|
|
|
/// A Reader used when Transfer-Encoding is `chunked`.
|
|
|
|
Chunked(ChunkedState, u64),
|
|
|
|
/// A Reader used for responses that don't indicate a length or chunked.
|
|
|
|
///
|
|
|
|
/// Note: This should only used for `Response`s. It is illegal for a
|
|
|
|
/// `Request` to be made with both `Content-Length` and
|
|
|
|
/// `Transfer-Encoding: chunked` missing, as explained from the spec:
|
|
|
|
///
|
|
|
|
/// > If a Transfer-Encoding header field is present in a response and
|
|
|
|
/// > the chunked transfer coding is not the final encoding, the
|
|
|
|
/// > message body length is determined by reading the connection until
|
|
|
|
/// > it is closed by the server. If a Transfer-Encoding header field
|
|
|
|
/// > is present in a request and the chunked transfer coding is not
|
|
|
|
/// > the final encoding, the message body length cannot be determined
|
|
|
|
/// > reliably; the server MUST respond with the 400 (Bad Request)
|
|
|
|
/// > status code and then close the connection.
|
|
|
|
Eof(bool),
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, PartialEq, Clone)]
|
|
|
|
enum ChunkedState {
|
|
|
|
Size,
|
|
|
|
SizeLws,
|
|
|
|
Extension,
|
|
|
|
SizeLf,
|
|
|
|
Body,
|
|
|
|
BodyCr,
|
|
|
|
BodyLf,
|
|
|
|
EndCr,
|
|
|
|
EndLf,
|
|
|
|
End,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl EncodingDecoder {
|
|
|
|
pub fn decode(&mut self, body: &mut BytesMut) -> Poll<Option<Bytes>, io::Error> {
|
|
|
|
match self.kind {
|
|
|
|
Kind::Length(ref mut remaining) => {
|
|
|
|
if *remaining == 0 {
|
|
|
|
Ok(Async::Ready(None))
|
|
|
|
} else {
|
|
|
|
if body.is_empty() {
|
|
|
|
return Ok(Async::NotReady);
|
|
|
|
}
|
|
|
|
let len = body.len() as u64;
|
|
|
|
let buf;
|
|
|
|
if *remaining > len {
|
|
|
|
buf = body.take().freeze();
|
|
|
|
*remaining -= len;
|
|
|
|
} else {
|
|
|
|
buf = body.split_to(*remaining as usize).freeze();
|
|
|
|
*remaining = 0;
|
|
|
|
}
|
|
|
|
trace!("Length read: {}", buf.len());
|
|
|
|
Ok(Async::Ready(Some(buf)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Kind::Chunked(ref mut state, ref mut size) => {
|
|
|
|
loop {
|
|
|
|
let mut buf = None;
|
|
|
|
// advances the chunked state
|
|
|
|
*state = try_ready!(state.step(body, size, &mut buf));
|
|
|
|
if *state == ChunkedState::End {
|
|
|
|
trace!("End of chunked stream");
|
|
|
|
return Ok(Async::Ready(None));
|
|
|
|
}
|
|
|
|
if let Some(buf) = buf {
|
|
|
|
return Ok(Async::Ready(Some(buf)));
|
|
|
|
}
|
|
|
|
if body.is_empty() {
|
|
|
|
return Ok(Async::NotReady);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Kind::Eof(ref mut is_eof) => {
|
|
|
|
if *is_eof {
|
|
|
|
Ok(Async::Ready(None))
|
|
|
|
} else if !body.is_empty() {
|
|
|
|
Ok(Async::Ready(Some(body.take().freeze())))
|
|
|
|
} else {
|
|
|
|
Ok(Async::NotReady)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! byte (
|
|
|
|
($rdr:ident) => ({
|
|
|
|
if $rdr.len() > 0 {
|
|
|
|
let b = $rdr[0];
|
|
|
|
$rdr.split_to(1);
|
|
|
|
b
|
|
|
|
} else {
|
|
|
|
return Ok(Async::NotReady)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
);
|
|
|
|
|
|
|
|
impl ChunkedState {
|
|
|
|
fn step(
|
2018-05-17 21:20:20 +02:00
|
|
|
&self, body: &mut BytesMut, size: &mut u64, buf: &mut Option<Bytes>,
|
2018-04-29 07:20:32 +02:00
|
|
|
) -> Poll<ChunkedState, io::Error> {
|
|
|
|
use self::ChunkedState::*;
|
|
|
|
match *self {
|
|
|
|
Size => ChunkedState::read_size(body, size),
|
|
|
|
SizeLws => ChunkedState::read_size_lws(body),
|
|
|
|
Extension => ChunkedState::read_extension(body),
|
|
|
|
SizeLf => ChunkedState::read_size_lf(body, size),
|
|
|
|
Body => ChunkedState::read_body(body, size, buf),
|
|
|
|
BodyCr => ChunkedState::read_body_cr(body),
|
|
|
|
BodyLf => ChunkedState::read_body_lf(body),
|
|
|
|
EndCr => ChunkedState::read_end_cr(body),
|
|
|
|
EndLf => ChunkedState::read_end_lf(body),
|
|
|
|
End => Ok(Async::Ready(ChunkedState::End)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn read_size(rdr: &mut BytesMut, size: &mut u64) -> Poll<ChunkedState, io::Error> {
|
|
|
|
let radix = 16;
|
|
|
|
match byte!(rdr) {
|
|
|
|
b @ b'0'...b'9' => {
|
|
|
|
*size *= radix;
|
|
|
|
*size += u64::from(b - b'0');
|
|
|
|
}
|
|
|
|
b @ b'a'...b'f' => {
|
|
|
|
*size *= radix;
|
|
|
|
*size += u64::from(b + 10 - b'a');
|
|
|
|
}
|
|
|
|
b @ b'A'...b'F' => {
|
|
|
|
*size *= radix;
|
|
|
|
*size += u64::from(b + 10 - b'A');
|
|
|
|
}
|
|
|
|
b'\t' | b' ' => return Ok(Async::Ready(ChunkedState::SizeLws)),
|
|
|
|
b';' => return Ok(Async::Ready(ChunkedState::Extension)),
|
|
|
|
b'\r' => return Ok(Async::Ready(ChunkedState::SizeLf)),
|
|
|
|
_ => {
|
|
|
|
return Err(io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
"Invalid chunk size line: Invalid Size",
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(Async::Ready(ChunkedState::Size))
|
|
|
|
}
|
|
|
|
fn read_size_lws(rdr: &mut BytesMut) -> Poll<ChunkedState, io::Error> {
|
|
|
|
trace!("read_size_lws");
|
|
|
|
match byte!(rdr) {
|
|
|
|
// LWS can follow the chunk size, but no more digits can come
|
|
|
|
b'\t' | b' ' => Ok(Async::Ready(ChunkedState::SizeLws)),
|
|
|
|
b';' => Ok(Async::Ready(ChunkedState::Extension)),
|
|
|
|
b'\r' => Ok(Async::Ready(ChunkedState::SizeLf)),
|
|
|
|
_ => Err(io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
"Invalid chunk size linear white space",
|
|
|
|
)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn read_extension(rdr: &mut BytesMut) -> Poll<ChunkedState, io::Error> {
|
|
|
|
match byte!(rdr) {
|
|
|
|
b'\r' => Ok(Async::Ready(ChunkedState::SizeLf)),
|
|
|
|
_ => Ok(Async::Ready(ChunkedState::Extension)), // no supported extensions
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn read_size_lf(
|
2018-05-17 21:20:20 +02:00
|
|
|
rdr: &mut BytesMut, size: &mut u64,
|
2018-04-29 07:20:32 +02:00
|
|
|
) -> Poll<ChunkedState, io::Error> {
|
|
|
|
match byte!(rdr) {
|
|
|
|
b'\n' if *size > 0 => Ok(Async::Ready(ChunkedState::Body)),
|
|
|
|
b'\n' if *size == 0 => Ok(Async::Ready(ChunkedState::EndCr)),
|
2018-04-29 18:09:08 +02:00
|
|
|
_ => Err(io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
"Invalid chunk size LF",
|
|
|
|
)),
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn read_body(
|
2018-05-17 21:20:20 +02:00
|
|
|
rdr: &mut BytesMut, rem: &mut u64, buf: &mut Option<Bytes>,
|
2018-04-29 07:20:32 +02:00
|
|
|
) -> Poll<ChunkedState, io::Error> {
|
|
|
|
trace!("Chunked read, remaining={:?}", rem);
|
|
|
|
|
|
|
|
let len = rdr.len() as u64;
|
|
|
|
if len == 0 {
|
|
|
|
Ok(Async::Ready(ChunkedState::Body))
|
|
|
|
} else {
|
|
|
|
let slice;
|
|
|
|
if *rem > len {
|
|
|
|
slice = rdr.take().freeze();
|
|
|
|
*rem -= len;
|
|
|
|
} else {
|
|
|
|
slice = rdr.split_to(*rem as usize).freeze();
|
|
|
|
*rem = 0;
|
|
|
|
}
|
|
|
|
*buf = Some(slice);
|
|
|
|
if *rem > 0 {
|
|
|
|
Ok(Async::Ready(ChunkedState::Body))
|
|
|
|
} else {
|
|
|
|
Ok(Async::Ready(ChunkedState::BodyCr))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn read_body_cr(rdr: &mut BytesMut) -> Poll<ChunkedState, io::Error> {
|
|
|
|
match byte!(rdr) {
|
|
|
|
b'\r' => Ok(Async::Ready(ChunkedState::BodyLf)),
|
2018-04-29 18:09:08 +02:00
|
|
|
_ => Err(io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
"Invalid chunk body CR",
|
|
|
|
)),
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
fn read_body_lf(rdr: &mut BytesMut) -> Poll<ChunkedState, io::Error> {
|
|
|
|
match byte!(rdr) {
|
|
|
|
b'\n' => Ok(Async::Ready(ChunkedState::Size)),
|
2018-04-29 18:09:08 +02:00
|
|
|
_ => Err(io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
"Invalid chunk body LF",
|
|
|
|
)),
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
fn read_end_cr(rdr: &mut BytesMut) -> Poll<ChunkedState, io::Error> {
|
|
|
|
match byte!(rdr) {
|
|
|
|
b'\r' => Ok(Async::Ready(ChunkedState::EndLf)),
|
2018-04-29 18:09:08 +02:00
|
|
|
_ => Err(io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
"Invalid chunk end CR",
|
|
|
|
)),
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
fn read_end_lf(rdr: &mut BytesMut) -> Poll<ChunkedState, io::Error> {
|
|
|
|
match byte!(rdr) {
|
|
|
|
b'\n' => Ok(Async::Ready(ChunkedState::End)),
|
2018-04-29 18:09:08 +02:00
|
|
|
_ => Err(io::Error::new(
|
|
|
|
io::ErrorKind::InvalidInput,
|
|
|
|
"Invalid chunk end LF",
|
|
|
|
)),
|
2018-04-29 07:20:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|