1
0
mirror of https://github.com/fafhrd91/actix-web synced 2024-11-27 09:42:57 +01:00

add time parser year shift tests

This commit is contained in:
Rob Ede 2021-02-12 00:15:25 +00:00
parent 31d9ed81c5
commit 81bef93e5e
No known key found for this signature in database
GPG Key ID: C2A3B36E841A91E6
5 changed files with 116 additions and 51 deletions

View File

@ -116,6 +116,9 @@ brotli2 = "0.3.2"
flate2 = "1.0.13"
criterion = "0.3"
[profile.dev]
debug = false
[profile.release]
lto = true
opt-level = 3

View File

@ -1,7 +1,11 @@
//! Stream decoders.
use std::task::{Context, Poll};
use std::{future::Future, io, io::Write as _, pin::Pin};
use std::{
future::Future,
io::{self, Write as _},
pin::Pin,
task::{Context, Poll},
};
use actix_rt::task::{spawn_blocking, JoinHandle};
use brotli2::write::BrotliDecoder;
@ -9,11 +13,13 @@ use bytes::Bytes;
use flate2::write::{GzDecoder, ZlibDecoder};
use futures_core::{ready, Stream};
use super::Writer;
use crate::error::{BlockingError, PayloadError};
use crate::http::header::{ContentEncoding, HeaderMap, CONTENT_ENCODING};
use crate::{
encoding::Writer,
error::{BlockingError, PayloadError},
http::header::{ContentEncoding, HeaderMap, CONTENT_ENCODING},
};
const INPLACE: usize = 2049;
const MAX_CHUNK_SIZE_DECODE_IN_PLACE: usize = 2049;
pub struct Decoder<S> {
decoder: Option<ContentDecoder>,
@ -41,6 +47,7 @@ where
))),
_ => None,
};
Decoder {
decoder,
stream,
@ -53,15 +60,11 @@ where
#[inline]
pub fn from_headers(stream: S, headers: &HeaderMap) -> Decoder<S> {
// check content-encoding
let encoding = if let Some(enc) = headers.get(&CONTENT_ENCODING) {
if let Ok(enc) = enc.to_str() {
ContentEncoding::from(enc)
} else {
ContentEncoding::Identity
}
} else {
ContentEncoding::Identity
};
let encoding = headers
.get(&CONTENT_ENCODING)
.and_then(|val| val.to_str().ok())
.map(ContentEncoding::from)
.unwrap_or(ContentEncoding::Identity);
Self::new(stream, encoding)
}
@ -81,8 +84,10 @@ where
if let Some(ref mut fut) = self.fut {
let (chunk, decoder) =
ready!(Pin::new(fut).poll(cx)).map_err(|_| BlockingError)??;
self.decoder = Some(decoder);
self.fut.take();
if let Some(chunk) = chunk {
return Poll::Ready(Some(Ok(chunk)));
}
@ -92,13 +97,15 @@ where
return Poll::Ready(None);
}
match Pin::new(&mut self.stream).poll_next(cx) {
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
Poll::Ready(Some(Ok(chunk))) => {
match ready!(Pin::new(&mut self.stream).poll_next(cx)) {
Some(Err(err)) => return Poll::Ready(Some(Err(err))),
Some(Ok(chunk)) => {
if let Some(mut decoder) = self.decoder.take() {
if chunk.len() < INPLACE {
if chunk.len() < MAX_CHUNK_SIZE_DECODE_IN_PLACE {
let chunk = decoder.feed_data(chunk)?;
self.decoder = Some(decoder);
if let Some(chunk) = chunk {
return Poll::Ready(Some(Ok(chunk)));
}
@ -108,13 +115,16 @@ where
Ok((chunk, decoder))
}));
}
continue;
} else {
return Poll::Ready(Some(Ok(chunk)));
}
}
Poll::Ready(None) => {
None => {
self.eof = true;
return if let Some(mut decoder) = self.decoder.take() {
match decoder.feed_eof() {
Ok(Some(res)) => Poll::Ready(Some(Ok(res))),
@ -125,10 +135,8 @@ where
Poll::Ready(None)
};
}
Poll::Pending => break,
}
}
Poll::Pending
}
}
@ -144,6 +152,7 @@ impl ContentDecoder {
ContentDecoder::Br(ref mut decoder) => match decoder.flush() {
Ok(()) => {
let b = decoder.get_mut().take();
if !b.is_empty() {
Ok(Some(b))
} else {
@ -152,9 +161,11 @@ impl ContentDecoder {
}
Err(e) => Err(e),
},
ContentDecoder::Gzip(ref mut decoder) => match decoder.try_finish() {
Ok(_) => {
let b = decoder.get_mut().take();
if !b.is_empty() {
Ok(Some(b))
} else {
@ -163,6 +174,7 @@ impl ContentDecoder {
}
Err(e) => Err(e),
},
ContentDecoder::Deflate(ref mut decoder) => match decoder.try_finish() {
Ok(_) => {
let b = decoder.get_mut().take();
@ -183,6 +195,7 @@ impl ContentDecoder {
Ok(_) => {
decoder.flush()?;
let b = decoder.get_mut().take();
if !b.is_empty() {
Ok(Some(b))
} else {
@ -191,10 +204,12 @@ impl ContentDecoder {
}
Err(e) => Err(e),
},
ContentDecoder::Gzip(ref mut decoder) => match decoder.write_all(&data) {
Ok(_) => {
decoder.flush()?;
let b = decoder.get_mut().take();
if !b.is_empty() {
Ok(Some(b))
} else {
@ -203,9 +218,11 @@ impl ContentDecoder {
}
Err(e) => Err(e),
},
ContentDecoder::Deflate(ref mut decoder) => match decoder.write_all(&data) {
Ok(_) => {
decoder.flush()?;
let b = decoder.get_mut().take();
if !b.is_empty() {
Ok(Some(b))

View File

@ -1,7 +1,11 @@
//! Stream encoders.
use std::task::{Context, Poll};
use std::{future::Future, io, io::Write as _, pin::Pin};
use std::{
future::Future,
io::{self, Write as _},
pin::Pin,
task::{Context, Poll},
};
use actix_rt::task::{spawn_blocking, JoinHandle};
use brotli2::write::BrotliEncoder;
@ -10,15 +14,19 @@ use flate2::write::{GzEncoder, ZlibEncoder};
use futures_core::ready;
use pin_project::pin_project;
use crate::body::{Body, BodySize, MessageBody, ResponseBody};
use crate::http::header::{ContentEncoding, CONTENT_ENCODING};
use crate::http::{HeaderValue, StatusCode};
use crate::{Error, ResponseHead};
use crate::{
body::{Body, BodySize, MessageBody, ResponseBody},
http::{
header::{ContentEncoding, CONTENT_ENCODING},
HeaderValue, StatusCode,
},
Error, ResponseHead,
};
use super::Writer;
use crate::error::BlockingError;
const INPLACE: usize = 1024;
const MAX_CHUNK_SIZE_ENCODE_IN_PLACE: usize = 1024;
#[pin_project]
pub struct Encoder<B> {
@ -137,23 +145,28 @@ impl<B: MessageBody> MessageBody for Encoder<B> {
if let Some(ref mut fut) = this.fut {
let mut encoder =
ready!(Pin::new(fut).poll(cx)).map_err(|_| BlockingError)??;
let chunk = encoder.take();
*this.encoder = Some(encoder);
this.fut.take();
if !chunk.is_empty() {
return Poll::Ready(Some(Ok(chunk)));
}
}
let result = this.body.as_mut().poll_next(cx);
let result = ready!(this.body.as_mut().poll_next(cx));
match result {
Poll::Ready(Some(Ok(chunk))) => {
Some(Err(err)) => return Poll::Ready(Some(Err(err))),
Some(Ok(chunk)) => {
if let Some(mut encoder) = this.encoder.take() {
if chunk.len() < INPLACE {
if chunk.len() < MAX_CHUNK_SIZE_ENCODE_IN_PLACE {
encoder.write(&chunk)?;
let chunk = encoder.take();
*this.encoder = Some(encoder);
if !chunk.is_empty() {
return Poll::Ready(Some(Ok(chunk)));
}
@ -167,7 +180,8 @@ impl<B: MessageBody> MessageBody for Encoder<B> {
return Poll::Ready(Some(Ok(chunk)));
}
}
Poll::Ready(None) => {
None => {
if let Some(encoder) = this.encoder.take() {
let chunk = encoder.finish()?;
if chunk.is_empty() {
@ -180,7 +194,6 @@ impl<B: MessageBody> MessageBody for Encoder<B> {
return Poll::Ready(None);
}
}
val => return val,
}
}
}

View File

@ -13,8 +13,10 @@ use crate::http::{header, Method, StatusCode, Uri, Version};
pub enum ConnectionType {
/// Close connection after response
Close,
/// Keep connection alive after response
KeepAlive,
/// Connection is upgraded to different type
Upgrade,
}

View File

@ -8,35 +8,65 @@ pub fn parse_http_date(time: &str) -> Option<PrimitiveDateTime> {
}
/// Attempt to parse a `time` string as a RFC 1123 formatted date time string.
///
/// Eg: `Fri, 12 Feb 2021 00:14:29 GMT`
fn try_parse_rfc_1123(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a, %d %b %Y %H:%M:%S").ok()
}
/// Attempt to parse a `time` string as a RFC 850 formatted date time string.
///
/// Eg: `Wednesday, 11-Jan-21 13:37:41 UTC`
fn try_parse_rfc_850(time: &str) -> Option<PrimitiveDateTime> {
match PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S") {
Ok(dt) => {
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
// we consider the year as part of this century if it's within the next 50 years,
// otherwise we consider as part of the previous century.
let now = OffsetDateTime::now_utc();
let century_start_year = (now.year() / 100) * 100;
let mut expanded_year = century_start_year + dt.year();
let dt = PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S").ok()?;
if expanded_year > now.year() + 50 {
expanded_year -= 100;
}
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
// we consider the year as part of this century if it's within the next 50 years,
// otherwise we consider as part of the previous century.
match Date::try_from_ymd(expanded_year, dt.month(), dt.day()) {
Ok(date) => Some(PrimitiveDateTime::new(date, dt.time())),
Err(_) => None,
}
}
Err(_) => None,
let now = OffsetDateTime::now_utc();
let century_start_year = (now.year() / 100) * 100;
let mut expanded_year = century_start_year + dt.year();
if expanded_year > now.year() + 50 {
expanded_year -= 100;
}
let date = Date::try_from_ymd(expanded_year, dt.month(), dt.day()).ok()?;
Some(PrimitiveDateTime::new(date, dt.time()))
}
/// Attempt to parse a `time` string using ANSI C's `asctime` format.
///
/// Eg: `Wed Feb 13 15:46:11 2013`
fn try_parse_asctime(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a %b %_d %H:%M:%S %Y").ok()
}
#[cfg(test)]
mod tests {
use time::{date, time};
use super::*;
#[test]
fn test_rfc_850_year_shift() {
let date = try_parse_rfc_850("Friday, 19-Nov-82 16:14:55 EST").unwrap();
assert_eq!(date, date!(1982 - 11 - 19).with_time(time!(16:14:55)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-62 13:37:41 EST").unwrap();
assert_eq!(date, date!(2062 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-21 13:37:41 EST").unwrap();
assert_eq!(date, date!(2021 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-23 13:37:41 EST").unwrap();
assert_eq!(date, date!(2023 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-99 13:37:41 EST").unwrap();
assert_eq!(date, date!(1999 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-00 13:37:41 EST").unwrap();
assert_eq!(date, date!(2000 - 01 - 11).with_time(time!(13:37:41)));
}
}