1
0
mirror of https://github.com/fafhrd91/actix-web synced 2025-07-15 14:26:15 +02:00

Compare commits

...

17 Commits

Author SHA1 Message Date
cd9901c928 prepare release 2018-11-14 16:24:01 -08:00
1ef0eed0bd do not stop on keep-alive timer if sink is not completly flushed 2018-11-08 20:46:13 -08:00
61b1030882 Fix websockets connection drop if request contains content-length header #567 2018-11-08 20:35:47 -08:00
7065c540e1 set nodelay on socket #560 2018-11-08 16:29:43 -08:00
aed3933ae8 Merge branch 'master' of github.com:actix/actix-web 2018-11-08 16:15:45 -08:00
5b7740dee3 hide ChunkedReadFile 2018-11-08 16:12:16 -08:00
1a0bf32ec7 Fix unnecessary owned string and change htmlescape in favor of askama_escape (#584) 2018-11-08 16:08:06 -08:00
9ab586e24e update actix-net dep 2018-11-08 16:06:23 -08:00
62f1c90c8d update base64 dep 2018-11-07 21:18:40 -08:00
2677d325a7 fix keep-alive timer reset 2018-11-07 21:09:33 -08:00
8e354021d4 Add SameSite option to identity middleware cookie (#581) 2018-11-07 23:24:06 +03:00
3b536ee96c Use old clippy attributes syntax (#562) 2018-11-01 11:14:48 +03:00
cfd9a56ff7 Add async/await ref 2018-10-28 09:24:19 -07:00
5f91f5eda6 Correct IoStream::set_keepalive for UDS (#564)
Enable uds feature in tests
2018-10-26 10:59:06 +03:00
42d5d48e71 add a way to configure error treatment for Query and Path extractors (#550)
* add a way to configure error treatment for Query extractor

* allow error handler to be customized for Path extractor
2018-10-20 06:43:43 +03:00
960274ada8 Refactoring of server output to not exclude HTTP_10 (#552) 2018-10-19 07:52:10 +03:00
f383f618b5 Fix typo in error message (#554) 2018-10-18 21:27:31 +03:00
27 changed files with 288 additions and 83 deletions

View File

@ -35,7 +35,7 @@ script:
cargo check --features rust-tls cargo check --features rust-tls
cargo check --features ssl cargo check --features ssl
cargo check --features tls cargo check --features tls
cargo test --features="ssl,tls,rust-tls" -- --nocapture cargo test --features="ssl,tls,rust-tls,uds" -- --nocapture
fi fi
- | - |
if [[ "$TRAVIS_RUST_VERSION" == "nightly" ]]; then if [[ "$TRAVIS_RUST_VERSION" == "nightly" ]]; then

View File

@ -1,5 +1,25 @@
# Changes # Changes
## [0.7.14] - 2018-11-14
### Added
* Add method to configure custom error handler to `Query` and `Path` extractors.
* Add method to configure `SameSite` option in `CookieIdentityPolicy`.
### Fixed
* Fix websockets connection drop if request contains "content-length" header #567
* Fix keep-alive timer reset
* HttpServer now treats streaming bodies the same for HTTP/1.x protocols. #549
* Set nodelay for socket #560
## [0.7.13] - 2018-10-14 ## [0.7.13] - 2018-10-14
### Fixed ### Fixed

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-web" name = "actix-web"
version = "0.7.13" version = "0.7.14"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix web is a simple, pragmatic and extremely fast web framework for Rust." description = "Actix web is a simple, pragmatic and extremely fast web framework for Rust."
readme = "README.md" readme = "README.md"
@ -61,14 +61,14 @@ flate2-rust = ["flate2/rust_backend"]
cell = ["actix-net/cell"] cell = ["actix-net/cell"]
[dependencies] [dependencies]
actix = "^0.7.5" actix = "0.7.6"
actix-net = "^0.1.1" actix-net = "0.2.2"
base64 = "0.9" askama_escape = "0.1.0"
base64 = "0.10"
bitflags = "1.0" bitflags = "1.0"
failure = "^0.1.2" failure = "^0.1.2"
h2 = "0.1" h2 = "0.1"
htmlescape = "0.3"
http = "^0.1.8" http = "^0.1.8"
httparse = "1.3" httparse = "1.3"
log = "0.4" log = "0.4"

View File

@ -14,6 +14,7 @@ Actix web is a simple, pragmatic and extremely fast web framework for Rust.
* Middlewares ([Logger, Session, CORS, CSRF, etc](https://actix.rs/docs/middleware/)) * Middlewares ([Logger, Session, CORS, CSRF, etc](https://actix.rs/docs/middleware/))
* Includes an asynchronous [HTTP client](https://actix.rs/actix-web/actix_web/client/index.html) * Includes an asynchronous [HTTP client](https://actix.rs/actix-web/actix_web/client/index.html)
* Built on top of [Actix actor framework](https://github.com/actix/actix) * Built on top of [Actix actor framework](https://github.com/actix/actix)
* Experimental [Async/Await](https://github.com/mehcode/actix-web-async-await) support.
## Documentation & community resources ## Documentation & community resources

View File

@ -287,7 +287,7 @@ impl Default for ClientConnector {
} }
}; };
#[cfg_attr(feature = "cargo-clippy", allow(clippy::let_unit_value))] #[cfg_attr(feature = "cargo-clippy", allow(let_unit_value))]
ClientConnector::with_connector_impl(connector) ClientConnector::with_connector_impl(connector)
} }
} }

View File

@ -56,7 +56,7 @@ impl HttpResponseParser {
return Ok(Async::Ready(msg)); return Ok(Async::Ready(msg));
} }
Async::NotReady => { Async::NotReady => {
if buf.capacity() >= MAX_BUFFER_SIZE { if buf.len() >= MAX_BUFFER_SIZE {
return Err(HttpResponseParserError::Error( return Err(HttpResponseParserError::Error(
ParseError::TooLarge, ParseError::TooLarge,
)); ));

View File

@ -1,6 +1,6 @@
#![cfg_attr( #![cfg_attr(
feature = "cargo-clippy", feature = "cargo-clippy",
allow(clippy::redundant_field_names) allow(redundant_field_names)
)] )]
use std::cell::RefCell; use std::cell::RefCell;

View File

@ -111,18 +111,64 @@ impl<T, S> FromRequest<S> for Path<T>
where where
T: DeserializeOwned, T: DeserializeOwned,
{ {
type Config = (); type Config = PathConfig<S>;
type Result = Result<Self, Error>; type Result = Result<Self, Error>;
#[inline] #[inline]
fn from_request(req: &HttpRequest<S>, _: &Self::Config) -> Self::Result { fn from_request(req: &HttpRequest<S>, cfg: &Self::Config) -> Self::Result {
let req = req.clone(); let req = req.clone();
let req2 = req.clone();
let err = Rc::clone(&cfg.ehandler);
de::Deserialize::deserialize(PathDeserializer::new(&req)) de::Deserialize::deserialize(PathDeserializer::new(&req))
.map_err(ErrorNotFound) .map_err(move |e| (*err)(e, &req2))
.map(|inner| Path { inner }) .map(|inner| Path { inner })
} }
} }
/// Path extractor configuration
///
/// ```rust
/// # extern crate actix_web;
/// use actix_web::{error, http, App, HttpResponse, Path, Result};
///
/// /// deserialize `Info` from request's body, max payload size is 4kb
/// fn index(info: Path<(u32, String)>) -> Result<String> {
/// Ok(format!("Welcome {}!", info.1))
/// }
///
/// fn main() {
/// let app = App::new().resource("/index.html/{id}/{name}", |r| {
/// r.method(http::Method::GET).with_config(index, |cfg| {
/// cfg.0.error_handler(|err, req| {
/// // <- create custom error response
/// error::InternalError::from_response(err, HttpResponse::Conflict().finish()).into()
/// });
/// })
/// });
/// }
/// ```
pub struct PathConfig<S> {
ehandler: Rc<Fn(serde_urlencoded::de::Error, &HttpRequest<S>) -> Error>,
}
impl<S> PathConfig<S> {
/// Set custom error handler
pub fn error_handler<F>(&mut self, f: F) -> &mut Self
where
F: Fn(serde_urlencoded::de::Error, &HttpRequest<S>) -> Error + 'static,
{
self.ehandler = Rc::new(f);
self
}
}
impl<S> Default for PathConfig<S> {
fn default() -> Self {
PathConfig {
ehandler: Rc::new(|e, _| ErrorNotFound(e)),
}
}
}
impl<T: fmt::Debug> fmt::Debug for Path<T> { impl<T: fmt::Debug> fmt::Debug for Path<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f) self.inner.fmt(f)
@ -200,17 +246,69 @@ impl<T, S> FromRequest<S> for Query<T>
where where
T: de::DeserializeOwned, T: de::DeserializeOwned,
{ {
type Config = (); type Config = QueryConfig<S>;
type Result = Result<Self, Error>; type Result = Result<Self, Error>;
#[inline] #[inline]
fn from_request(req: &HttpRequest<S>, _: &Self::Config) -> Self::Result { fn from_request(req: &HttpRequest<S>, cfg: &Self::Config) -> Self::Result {
let req2 = req.clone();
let err = Rc::clone(&cfg.ehandler);
serde_urlencoded::from_str::<T>(req.query_string()) serde_urlencoded::from_str::<T>(req.query_string())
.map_err(|e| e.into()) .map_err(move |e| (*err)(e, &req2))
.map(Query) .map(Query)
} }
} }
/// Query extractor configuration
///
/// ```rust
/// # extern crate actix_web;
/// #[macro_use] extern crate serde_derive;
/// use actix_web::{error, http, App, HttpResponse, Query, Result};
///
/// #[derive(Deserialize)]
/// struct Info {
/// username: String,
/// }
///
/// /// deserialize `Info` from request's body, max payload size is 4kb
/// fn index(info: Query<Info>) -> Result<String> {
/// Ok(format!("Welcome {}!", info.username))
/// }
///
/// fn main() {
/// let app = App::new().resource("/index.html", |r| {
/// r.method(http::Method::GET).with_config(index, |cfg| {
/// cfg.0.error_handler(|err, req| {
/// // <- create custom error response
/// error::InternalError::from_response(err, HttpResponse::Conflict().finish()).into()
/// });
/// })
/// });
/// }
/// ```
pub struct QueryConfig<S> {
ehandler: Rc<Fn(serde_urlencoded::de::Error, &HttpRequest<S>) -> Error>,
}
impl<S> QueryConfig<S> {
/// Set custom error handler
pub fn error_handler<F>(&mut self, f: F) -> &mut Self
where
F: Fn(serde_urlencoded::de::Error, &HttpRequest<S>) -> Error + 'static,
{
self.ehandler = Rc::new(f);
self
}
}
impl<S> Default for QueryConfig<S> {
fn default() -> Self {
QueryConfig {
ehandler: Rc::new(|e, _| e.into()),
}
}
}
impl<T: fmt::Debug> fmt::Debug for Query<T> { impl<T: fmt::Debug> fmt::Debug for Query<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f) self.0.fmt(f)
@ -951,15 +1049,15 @@ mod tests {
let info = router.recognize(&req, &(), 0); let info = router.recognize(&req, &(), 0);
let req = req.with_route_info(info); let req = req.with_route_info(info);
let s = Path::<MyStruct>::from_request(&req, &()).unwrap(); let s = Path::<MyStruct>::from_request(&req, &PathConfig::default()).unwrap();
assert_eq!(s.key, "name"); assert_eq!(s.key, "name");
assert_eq!(s.value, "user1"); assert_eq!(s.value, "user1");
let s = Path::<(String, String)>::from_request(&req, &()).unwrap(); let s = Path::<(String, String)>::from_request(&req, &PathConfig::default()).unwrap();
assert_eq!(s.0, "name"); assert_eq!(s.0, "name");
assert_eq!(s.1, "user1"); assert_eq!(s.1, "user1");
let s = Query::<Id>::from_request(&req, &()).unwrap(); let s = Query::<Id>::from_request(&req, &QueryConfig::default()).unwrap();
assert_eq!(s.id, "test"); assert_eq!(s.id, "test");
let mut router = Router::<()>::default(); let mut router = Router::<()>::default();
@ -968,11 +1066,11 @@ mod tests {
let info = router.recognize(&req, &(), 0); let info = router.recognize(&req, &(), 0);
let req = req.with_route_info(info); let req = req.with_route_info(info);
let s = Path::<Test2>::from_request(&req, &()).unwrap(); let s = Path::<Test2>::from_request(&req, &PathConfig::default()).unwrap();
assert_eq!(s.as_ref().key, "name"); assert_eq!(s.as_ref().key, "name");
assert_eq!(s.value, 32); assert_eq!(s.value, 32);
let s = Path::<(String, u8)>::from_request(&req, &()).unwrap(); let s = Path::<(String, u8)>::from_request(&req, &PathConfig::default()).unwrap();
assert_eq!(s.0, "name"); assert_eq!(s.0, "name");
assert_eq!(s.1, 32); assert_eq!(s.1, 32);
@ -989,7 +1087,7 @@ mod tests {
let req = TestRequest::with_uri("/32/").finish(); let req = TestRequest::with_uri("/32/").finish();
let info = router.recognize(&req, &(), 0); let info = router.recognize(&req, &(), 0);
let req = req.with_route_info(info); let req = req.with_route_info(info);
assert_eq!(*Path::<i8>::from_request(&req, &()).unwrap(), 32); assert_eq!(*Path::<i8>::from_request(&req, &&PathConfig::default()).unwrap(), 32);
} }
#[test] #[test]

View File

@ -11,10 +11,10 @@ use std::{cmp, io};
#[cfg(unix)] #[cfg(unix)]
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
use askama_escape::{escape as escape_html_entity};
use bytes::Bytes; use bytes::Bytes;
use futures::{Async, Future, Poll, Stream}; use futures::{Async, Future, Poll, Stream};
use futures_cpupool::{CpuFuture, CpuPool}; use futures_cpupool::{CpuFuture, CpuPool};
use htmlescape::encode_minimal as escape_html_entity;
use mime; use mime;
use mime_guess::{get_mime_type, guess_mime_type}; use mime_guess::{get_mime_type, guess_mime_type};
use percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET}; use percent_encoding::{utf8_percent_encode, DEFAULT_ENCODE_SET};
@ -472,6 +472,7 @@ impl<C: StaticFileConfig> Responder for NamedFile<C> {
} }
} }
#[doc(hidden)]
/// A helper created from a `std::fs::File` which reads the file /// A helper created from a `std::fs::File` which reads the file
/// chunk-by-chunk on a `CpuPool`. /// chunk-by-chunk on a `CpuPool`.
pub struct ChunkedReadFile { pub struct ChunkedReadFile {
@ -561,8 +562,23 @@ impl Directory {
} }
} }
// show file url as relative to static path
macro_rules! encode_file_url {
($path:ident) => {
utf8_percent_encode(&$path.to_string_lossy(), DEFAULT_ENCODE_SET)
};
}
// " -- &quot; & -- &amp; ' -- &#x27; < -- &lt; > -- &gt; / -- &#x2f;
macro_rules! encode_file_name {
($entry:ident) => {
escape_html_entity(&$entry.file_name().to_string_lossy())
};
}
fn directory_listing<S>( fn directory_listing<S>(
dir: &Directory, req: &HttpRequest<S>, dir: &Directory,
req: &HttpRequest<S>,
) -> Result<HttpResponse, io::Error> { ) -> Result<HttpResponse, io::Error> {
let index_of = format!("Index of {}", req.path()); let index_of = format!("Index of {}", req.path());
let mut body = String::new(); let mut body = String::new();
@ -575,11 +591,6 @@ fn directory_listing<S>(
Ok(p) => base.join(p), Ok(p) => base.join(p),
Err(_) => continue, Err(_) => continue,
}; };
// show file url as relative to static path
let file_url = utf8_percent_encode(&p.to_string_lossy(), DEFAULT_ENCODE_SET)
.to_string();
// " -- &quot; & -- &amp; ' -- &#x27; < -- &lt; > -- &gt;
let file_name = escape_html_entity(&entry.file_name().to_string_lossy());
// if file is a directory, add '/' to the end of the name // if file is a directory, add '/' to the end of the name
if let Ok(metadata) = entry.metadata() { if let Ok(metadata) = entry.metadata() {
@ -587,13 +598,15 @@ fn directory_listing<S>(
let _ = write!( let _ = write!(
body, body,
"<li><a href=\"{}\">{}/</a></li>", "<li><a href=\"{}\">{}/</a></li>",
file_url, file_name encode_file_url!(p),
encode_file_name!(entry),
); );
} else { } else {
let _ = write!( let _ = write!(
body, body,
"<li><a href=\"{}\">{}</a></li>", "<li><a href=\"{}\">{}</a></li>",
file_url, file_name encode_file_url!(p),
encode_file_name!(entry),
); );
} }
} else { } else {
@ -656,7 +669,8 @@ impl<S: 'static> StaticFiles<S> {
/// Create new `StaticFiles` instance for specified base directory and /// Create new `StaticFiles` instance for specified base directory and
/// `CpuPool`. /// `CpuPool`.
pub fn with_pool<T: Into<PathBuf>>( pub fn with_pool<T: Into<PathBuf>>(
dir: T, pool: CpuPool, dir: T,
pool: CpuPool,
) -> Result<StaticFiles<S>, Error> { ) -> Result<StaticFiles<S>, Error> {
Self::with_config_pool(dir, pool, DefaultConfig) Self::with_config_pool(dir, pool, DefaultConfig)
} }
@ -667,7 +681,8 @@ impl<S: 'static, C: StaticFileConfig> StaticFiles<S, C> {
/// ///
/// Identical with `new` but allows to specify configiration to use. /// Identical with `new` but allows to specify configiration to use.
pub fn with_config<T: Into<PathBuf>>( pub fn with_config<T: Into<PathBuf>>(
dir: T, config: C, dir: T,
config: C,
) -> Result<StaticFiles<S, C>, Error> { ) -> Result<StaticFiles<S, C>, Error> {
// use default CpuPool // use default CpuPool
let pool = { DEFAULT_CPUPOOL.lock().clone() }; let pool = { DEFAULT_CPUPOOL.lock().clone() };
@ -678,7 +693,9 @@ impl<S: 'static, C: StaticFileConfig> StaticFiles<S, C> {
/// Create new `StaticFiles` instance for specified base directory with config and /// Create new `StaticFiles` instance for specified base directory with config and
/// `CpuPool`. /// `CpuPool`.
pub fn with_config_pool<T: Into<PathBuf>>( pub fn with_config_pool<T: Into<PathBuf>>(
dir: T, pool: CpuPool, _: C, dir: T,
pool: CpuPool,
_: C,
) -> Result<StaticFiles<S, C>, Error> { ) -> Result<StaticFiles<S, C>, Error> {
let dir = dir.into().canonicalize()?; let dir = dir.into().canonicalize()?;
@ -736,7 +753,8 @@ impl<S: 'static, C: StaticFileConfig> StaticFiles<S, C> {
} }
fn try_handle( fn try_handle(
&self, req: &HttpRequest<S>, &self,
req: &HttpRequest<S>,
) -> Result<AsyncResult<HttpResponse>, Error> { ) -> Result<AsyncResult<HttpResponse>, Error> {
let tail: String = req.match_info().query("tail")?; let tail: String = req.match_info().query("tail")?;
let relpath = PathBuf::from_param(tail.trim_left_matches('/'))?; let relpath = PathBuf::from_param(tail.trim_left_matches('/'))?;

View File

@ -694,7 +694,7 @@ impl HttpResponseBuilder {
} }
#[inline] #[inline]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::borrowed_box))] #[cfg_attr(feature = "cargo-clippy", allow(borrowed_box))]
fn parts<'a>( fn parts<'a>(
parts: &'a mut Option<Box<InnerHttpResponse>>, err: &Option<HttpError>, parts: &'a mut Option<Box<InnerHttpResponse>>, err: &Option<HttpError>,
) -> Option<&'a mut Box<InnerHttpResponse>> { ) -> Option<&'a mut Box<InnerHttpResponse>> {

View File

@ -18,7 +18,7 @@ impl ConnectionInfo {
/// Create *ConnectionInfo* instance for a request. /// Create *ConnectionInfo* instance for a request.
#[cfg_attr( #[cfg_attr(
feature = "cargo-clippy", feature = "cargo-clippy",
allow(clippy::cyclomatic_complexity) allow(cyclomatic_complexity)
)] )]
pub fn update(&mut self, req: &Request) { pub fn update(&mut self, req: &Request) {
let mut host = None; let mut host = None;

View File

@ -100,9 +100,9 @@ extern crate failure;
extern crate lazy_static; extern crate lazy_static;
#[macro_use] #[macro_use]
extern crate futures; extern crate futures;
extern crate askama_escape;
extern crate cookie; extern crate cookie;
extern crate futures_cpupool; extern crate futures_cpupool;
extern crate htmlescape;
extern crate http as modhttp; extern crate http as modhttp;
extern crate httparse; extern crate httparse;
extern crate language_tags; extern crate language_tags;

View File

@ -48,7 +48,7 @@ impl DefaultHeaders {
/// Set a header. /// Set a header.
#[inline] #[inline]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::match_wild_err_arm))] #[cfg_attr(feature = "cargo-clippy", allow(match_wild_err_arm))]
pub fn header<K, V>(mut self, key: K, value: V) -> Self pub fn header<K, V>(mut self, key: K, value: V) -> Self
where where
HeaderName: HttpTryFrom<K>, HeaderName: HttpTryFrom<K>,

View File

@ -48,7 +48,7 @@
//! ``` //! ```
use std::rc::Rc; use std::rc::Rc;
use cookie::{Cookie, CookieJar, Key}; use cookie::{Cookie, CookieJar, Key, SameSite};
use futures::future::{err as FutErr, ok as FutOk, FutureResult}; use futures::future::{err as FutErr, ok as FutOk, FutureResult};
use futures::Future; use futures::Future;
use time::Duration; use time::Duration;
@ -237,6 +237,7 @@ struct CookieIdentityInner {
domain: Option<String>, domain: Option<String>,
secure: bool, secure: bool,
max_age: Option<Duration>, max_age: Option<Duration>,
same_site: Option<SameSite>,
} }
impl CookieIdentityInner { impl CookieIdentityInner {
@ -248,6 +249,7 @@ impl CookieIdentityInner {
domain: None, domain: None,
secure: true, secure: true,
max_age: None, max_age: None,
same_site: None,
} }
} }
@ -268,6 +270,10 @@ impl CookieIdentityInner {
cookie.set_max_age(max_age); cookie.set_max_age(max_age);
} }
if let Some(same_site) = self.same_site {
cookie.set_same_site(same_site);
}
let mut jar = CookieJar::new(); let mut jar = CookieJar::new();
if some { if some {
jar.private(&self.key).add(cookie); jar.private(&self.key).add(cookie);
@ -370,6 +376,12 @@ impl CookieIdentityPolicy {
Rc::get_mut(&mut self.0).unwrap().max_age = Some(value); Rc::get_mut(&mut self.0).unwrap().max_age = Some(value);
self self
} }
/// Sets the `same_site` field in the session cookie being built.
pub fn same_site(mut self, same_site: SameSite) -> Self {
Rc::get_mut(&mut self.0).unwrap().same_site = Some(same_site);
self
}
} }
impl<S> IdentityPolicy<S> for CookieIdentityPolicy { impl<S> IdentityPolicy<S> for CookieIdentityPolicy {

View File

@ -551,12 +551,12 @@ impl<S: 'static, H> ProcessResponse<S, H> {
if self.resp.as_ref().unwrap().status().is_server_error() if self.resp.as_ref().unwrap().status().is_server_error()
{ {
error!( error!(
"Error occured during request handling, status: {} {}", "Error occurred during request handling, status: {} {}",
self.resp.as_ref().unwrap().status(), err self.resp.as_ref().unwrap().status(), err
); );
} else { } else {
warn!( warn!(
"Error occured during request handling: {}", "Error occurred during request handling: {}",
err err
); );
} }

View File

@ -61,7 +61,7 @@ pub struct Scope<S> {
#[cfg_attr( #[cfg_attr(
feature = "cargo-clippy", feature = "cargo-clippy",
allow(clippy::new_without_default_derive) allow(new_without_default_derive)
)] )]
impl<S: 'static> Scope<S> { impl<S: 'static> Scope<S> {
/// Create a new scope /// Create a new scope

View File

@ -9,14 +9,20 @@ use super::acceptor::{
}; };
use super::error::AcceptorError; use super::error::AcceptorError;
use super::handler::IntoHttpHandler; use super::handler::IntoHttpHandler;
use super::service::HttpService; use super::service::{HttpService, StreamConfiguration};
use super::settings::{ServerSettings, ServiceConfig}; use super::settings::{ServerSettings, ServiceConfig};
use super::KeepAlive; use super::KeepAlive;
pub(crate) trait ServiceProvider { pub(crate) trait ServiceProvider {
fn register( fn register(
&self, server: Server, lst: net::TcpListener, host: String, &self,
addr: net::SocketAddr, keep_alive: KeepAlive, secure: bool, client_timeout: u64, server: Server,
lst: net::TcpListener,
host: String,
addr: net::SocketAddr,
keep_alive: KeepAlive,
secure: bool,
client_timeout: u64,
client_shutdown: u64, client_shutdown: u64,
) -> Server; ) -> Server;
} }
@ -43,8 +49,13 @@ where
} }
fn finish( fn finish(
&self, host: String, addr: net::SocketAddr, keep_alive: KeepAlive, secure: bool, &self,
client_timeout: u64, client_shutdown: u64, host: String,
addr: net::SocketAddr,
keep_alive: KeepAlive,
secure: bool,
client_timeout: u64,
client_shutdown: u64,
) -> impl ServiceFactory { ) -> impl ServiceFactory {
let factory = self.factory.clone(); let factory = self.factory.clone();
let acceptor = self.acceptor.clone(); let acceptor = self.acceptor.clone();
@ -65,6 +76,7 @@ where
acceptor.create(), acceptor.create(),
)).map_err(|_| ()) )).map_err(|_| ())
.map_init_err(|_| ()) .map_init_err(|_| ())
.and_then(StreamConfiguration::new().nodelay(true))
.and_then( .and_then(
HttpService::new(settings) HttpService::new(settings)
.map_init_err(|_| ()) .map_init_err(|_| ())
@ -76,6 +88,7 @@ where
TcpAcceptor::new(acceptor.create().map_err(AcceptorError::Service)) TcpAcceptor::new(acceptor.create().map_err(AcceptorError::Service))
.map_err(|_| ()) .map_err(|_| ())
.map_init_err(|_| ()) .map_init_err(|_| ())
.and_then(StreamConfiguration::new().nodelay(true))
.and_then( .and_then(
HttpService::new(settings) HttpService::new(settings)
.map_init_err(|_| ()) .map_init_err(|_| ())
@ -95,8 +108,14 @@ where
H: IntoHttpHandler, H: IntoHttpHandler,
{ {
fn register( fn register(
&self, server: Server, lst: net::TcpListener, host: String, &self,
addr: net::SocketAddr, keep_alive: KeepAlive, secure: bool, client_timeout: u64, server: Server,
lst: net::TcpListener,
host: String,
addr: net::SocketAddr,
keep_alive: KeepAlive,
secure: bool,
client_timeout: u64,
client_shutdown: u64, client_shutdown: u64,
) -> Server { ) -> Server {
server.listen2( server.listen2(

View File

@ -87,7 +87,10 @@ where
H: HttpHandler + 'static, H: HttpHandler + 'static,
{ {
pub fn new( pub fn new(
settings: ServiceConfig<H>, stream: T, buf: BytesMut, is_eof: bool, settings: ServiceConfig<H>,
stream: T,
buf: BytesMut,
is_eof: bool,
keepalive_timer: Option<Delay>, keepalive_timer: Option<Delay>,
) -> Self { ) -> Self {
let addr = stream.peer_addr(); let addr = stream.peer_addr();
@ -123,8 +126,11 @@ where
} }
pub(crate) fn for_error( pub(crate) fn for_error(
settings: ServiceConfig<H>, stream: T, status: StatusCode, settings: ServiceConfig<H>,
mut keepalive_timer: Option<Delay>, buf: BytesMut, stream: T,
status: StatusCode,
mut keepalive_timer: Option<Delay>,
buf: BytesMut,
) -> Self { ) -> Self {
if let Some(deadline) = settings.client_timer_expire() { if let Some(deadline) = settings.client_timer_expire() {
let _ = keepalive_timer.as_mut().map(|delay| delay.reset(deadline)); let _ = keepalive_timer.as_mut().map(|delay| delay.reset(deadline));
@ -280,7 +286,7 @@ where
} }
if timer.deadline() >= self.ka_expire { if timer.deadline() >= self.ka_expire {
// check for any outstanding request handling // check for any outstanding request handling
if self.tasks.is_empty() { if self.tasks.is_empty() && self.flags.contains(Flags::FLUSHED) {
if !self.flags.contains(Flags::STARTED) { if !self.flags.contains(Flags::STARTED) {
// timeout on first request (slow request) return 408 // timeout on first request (slow request) return 408
trace!("Slow request timeout"); trace!("Slow request timeout");
@ -298,16 +304,19 @@ where
if let Some(deadline) = if let Some(deadline) =
self.settings.client_shutdown_timer() self.settings.client_shutdown_timer()
{ {
timer.reset(deadline) timer.reset(deadline);
let _ = timer.poll();
} else { } else {
return Ok(()); return Ok(());
} }
} }
} else if let Some(dl) = self.settings.keep_alive_expire() { } else if let Some(dl) = self.settings.keep_alive_expire() {
timer.reset(dl) timer.reset(dl);
let _ = timer.poll();
} }
} else { } else {
timer.reset(self.ka_expire) timer.reset(self.ka_expire);
let _ = timer.poll();
} }
} }
Ok(Async::NotReady) => (), Ok(Async::NotReady) => (),

View File

@ -43,7 +43,9 @@ impl H1Decoder {
} }
pub fn decode<H>( pub fn decode<H>(
&mut self, src: &mut BytesMut, settings: &ServiceConfig<H>, &mut self,
src: &mut BytesMut,
settings: &ServiceConfig<H>,
) -> Result<Option<Message>, DecoderError> { ) -> Result<Option<Message>, DecoderError> {
// read payload // read payload
if self.decoder.is_some() { if self.decoder.is_some() {
@ -80,7 +82,9 @@ impl H1Decoder {
} }
fn parse_message<H>( fn parse_message<H>(
&self, buf: &mut BytesMut, settings: &ServiceConfig<H>, &self,
buf: &mut BytesMut,
settings: &ServiceConfig<H>,
) -> Poll<(Request, Option<EncodingDecoder>), ParseError> { ) -> Poll<(Request, Option<EncodingDecoder>), ParseError> {
// Parse http message // Parse http message
let mut has_upgrade = false; let mut has_upgrade = false;
@ -178,6 +182,13 @@ impl H1Decoder {
} }
header::UPGRADE => { header::UPGRADE => {
has_upgrade = true; has_upgrade = true;
// check content-length, some clients (dart)
// sends "content-length: 0" with websocket upgrade
if let Ok(val) = value.to_str() {
if val == "websocket" {
content_length = None;
}
}
} }
_ => (), _ => (),
} }
@ -221,7 +232,9 @@ pub(crate) struct HeaderIndex {
impl HeaderIndex { impl HeaderIndex {
pub(crate) fn record( pub(crate) fn record(
bytes: &[u8], headers: &[httparse::Header], indices: &mut [HeaderIndex], bytes: &[u8],
headers: &[httparse::Header],
indices: &mut [HeaderIndex],
) { ) {
let bytes_ptr = bytes.as_ptr() as usize; let bytes_ptr = bytes.as_ptr() as usize;
for (header, indices) in headers.iter().zip(indices.iter_mut()) { for (header, indices) in headers.iter().zip(indices.iter_mut()) {
@ -369,7 +382,10 @@ macro_rules! byte (
impl ChunkedState { impl ChunkedState {
fn step( fn step(
&self, body: &mut BytesMut, size: &mut u64, buf: &mut Option<Bytes>, &self,
body: &mut BytesMut,
size: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<ChunkedState, io::Error> { ) -> Poll<ChunkedState, io::Error> {
use self::ChunkedState::*; use self::ChunkedState::*;
match *self { match *self {
@ -432,7 +448,8 @@ impl ChunkedState {
} }
} }
fn read_size_lf( fn read_size_lf(
rdr: &mut BytesMut, size: &mut u64, rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<ChunkedState, io::Error> { ) -> Poll<ChunkedState, io::Error> {
match byte!(rdr) { match byte!(rdr) {
b'\n' if *size > 0 => Ok(Async::Ready(ChunkedState::Body)), b'\n' if *size > 0 => Ok(Async::Ready(ChunkedState::Body)),
@ -445,7 +462,9 @@ impl ChunkedState {
} }
fn read_body( fn read_body(
rdr: &mut BytesMut, rem: &mut u64, buf: &mut Option<Bytes>, rdr: &mut BytesMut,
rem: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<ChunkedState, io::Error> { ) -> Poll<ChunkedState, io::Error> {
trace!("Chunked read, remaining={:?}", rem); trace!("Chunked read, remaining={:?}", rem);

View File

@ -60,7 +60,10 @@ where
H: HttpHandler + 'static, H: HttpHandler + 'static,
{ {
pub fn new( pub fn new(
settings: ServiceConfig<H>, io: T, buf: Bytes, keepalive_timer: Option<Delay>, settings: ServiceConfig<H>,
io: T,
buf: Bytes,
keepalive_timer: Option<Delay>,
) -> Self { ) -> Self {
let addr = io.peer_addr(); let addr = io.peer_addr();
let extensions = io.extensions(); let extensions = io.extensions();
@ -284,10 +287,12 @@ where
if self.tasks.is_empty() { if self.tasks.is_empty() {
return Err(HttpDispatchError::ShutdownTimeout); return Err(HttpDispatchError::ShutdownTimeout);
} else if let Some(dl) = self.settings.keep_alive_expire() { } else if let Some(dl) = self.settings.keep_alive_expire() {
timer.reset(dl) timer.reset(dl);
let _ = timer.poll();
} }
} else { } else {
timer.reset(self.ka_expire) timer.reset(self.ka_expire);
let _ = timer.poll();
} }
} }
Ok(Async::NotReady) => (), Ok(Async::NotReady) => (),
@ -348,8 +353,11 @@ struct Entry<H: HttpHandler + 'static> {
impl<H: HttpHandler + 'static> Entry<H> { impl<H: HttpHandler + 'static> Entry<H> {
fn new( fn new(
parts: Parts, recv: RecvStream, resp: SendResponse<Bytes>, parts: Parts,
addr: Option<SocketAddr>, settings: ServiceConfig<H>, recv: RecvStream,
resp: SendResponse<Bytes>,
addr: Option<SocketAddr>,
settings: ServiceConfig<H>,
extensions: Option<Rc<Extensions>>, extensions: Option<Rc<Extensions>>,
) -> Entry<H> ) -> Entry<H>
where where

View File

@ -1,6 +1,6 @@
#![cfg_attr( #![cfg_attr(
feature = "cargo-clippy", feature = "cargo-clippy",
allow(clippy::redundant_field_names) allow(redundant_field_names)
)] )]
use std::{cmp, io}; use std::{cmp, io};

View File

@ -326,7 +326,7 @@ where
#[doc(hidden)] #[doc(hidden)]
#[cfg_attr( #[cfg_attr(
feature = "cargo-clippy", feature = "cargo-clippy",
allow(clippy::needless_pass_by_value) allow(needless_pass_by_value)
)] )]
pub fn bind_with<S, A>(mut self, addr: S, acceptor: A) -> io::Result<Self> pub fn bind_with<S, A>(mut self, addr: S, acceptor: A) -> io::Result<Self>
where where

View File

@ -334,7 +334,7 @@ impl IoStream for ::tokio_uds::UnixStream {
} }
#[inline] #[inline]
fn set_keepalive(&mut self, _nodelay: bool) -> io::Result<()> { fn set_keepalive(&mut self, _dur: Option<time::Duration>) -> io::Result<()> {
Ok(()) Ok(())
} }
} }

View File

@ -300,10 +300,10 @@ impl Output {
Some(true) => { Some(true) => {
// Enable transfer encoding // Enable transfer encoding
info.length = ResponseLength::Chunked; info.length = ResponseLength::Chunked;
if version == Version::HTTP_11 { if version == Version::HTTP_2 {
TransferEncoding::chunked(buf)
} else {
TransferEncoding::eof(buf) TransferEncoding::eof(buf)
} else {
TransferEncoding::chunked(buf)
} }
} }
Some(false) => TransferEncoding::eof(buf), Some(false) => TransferEncoding::eof(buf),
@ -337,10 +337,10 @@ impl Output {
} else { } else {
// Enable transfer encoding // Enable transfer encoding
info.length = ResponseLength::Chunked; info.length = ResponseLength::Chunked;
if version == Version::HTTP_11 { if version == Version::HTTP_2 {
TransferEncoding::chunked(buf)
} else {
TransferEncoding::eof(buf) TransferEncoding::eof(buf)
} else {
TransferEncoding::chunked(buf)
} }
} }
} }
@ -438,7 +438,7 @@ impl ContentEncoder {
} }
} }
#[cfg_attr(feature = "cargo-clippy", allow(clippy::inline_always))] #[cfg_attr(feature = "cargo-clippy", allow(inline_always))]
#[inline(always)] #[inline(always)]
pub fn write_eof(&mut self) -> Result<bool, io::Error> { pub fn write_eof(&mut self) -> Result<bool, io::Error> {
let encoder = let encoder =
@ -480,7 +480,7 @@ impl ContentEncoder {
} }
} }
#[cfg_attr(feature = "cargo-clippy", allow(clippy::inline_always))] #[cfg_attr(feature = "cargo-clippy", allow(inline_always))]
#[inline(always)] #[inline(always)]
pub fn write(&mut self, data: &[u8]) -> Result<(), io::Error> { pub fn write(&mut self, data: &[u8]) -> Result<(), io::Error> {
match *self { match *self {

View File

@ -46,7 +46,7 @@ impl Frame {
Frame::message(payload, OpCode::Close, true, genmask) Frame::message(payload, OpCode::Close, true, genmask)
} }
#[cfg_attr(feature = "cargo-clippy", allow(clippy::type_complexity))] #[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
fn read_copy_md<S>( fn read_copy_md<S>(
pl: &mut PayloadBuffer<S>, server: bool, max_size: usize, pl: &mut PayloadBuffer<S>, server: bool, max_size: usize,
) -> Poll<Option<(usize, bool, OpCode, usize, Option<u32>)>, ProtocolError> ) -> Poll<Option<(usize, bool, OpCode, usize, Option<u32>)>, ProtocolError>

View File

@ -1,5 +1,5 @@
//! This is code from [Tungstenite project](https://github.com/snapview/tungstenite-rs) //! This is code from [Tungstenite project](https://github.com/snapview/tungstenite-rs)
#![cfg_attr(feature = "cargo-clippy", allow(clippy::cast_ptr_alignment))] #![cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
use std::ptr::copy_nonoverlapping; use std::ptr::copy_nonoverlapping;
use std::slice; use std::slice;
@ -19,7 +19,7 @@ impl<'a> ShortSlice<'a> {
/// Faster version of `apply_mask()` which operates on 8-byte blocks. /// Faster version of `apply_mask()` which operates on 8-byte blocks.
#[inline] #[inline]
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))] #[cfg_attr(feature = "cargo-clippy", allow(cast_lossless))]
pub(crate) fn apply_mask(buf: &mut [u8], mask_u32: u32) { pub(crate) fn apply_mask(buf: &mut [u8], mask_u32: u32) {
// Extend the mask to 64 bits // Extend the mask to 64 bits
let mut mask_u64 = ((mask_u32 as u64) << 32) | (mask_u32 as u64); let mut mask_u64 = ((mask_u32 as u64) << 32) | (mask_u32 as u64);
@ -52,7 +52,7 @@ pub(crate) fn apply_mask(buf: &mut [u8], mask_u32: u32) {
// a `ShortSlice` must be smaller than a u64. // a `ShortSlice` must be smaller than a u64.
#[cfg_attr( #[cfg_attr(
feature = "cargo-clippy", feature = "cargo-clippy",
allow(clippy::needless_pass_by_value) allow(needless_pass_by_value)
)] )]
fn xor_short(buf: ShortSlice, mask: u64) { fn xor_short(buf: ShortSlice, mask: u64) {
// Unsafe: we know that a `ShortSlice` fits in a u64 // Unsafe: we know that a `ShortSlice` fits in a u64

View File

@ -385,10 +385,11 @@ fn test_ws_stopped() {
{ {
let (reader, mut writer) = srv.ws().unwrap(); let (reader, mut writer) = srv.ws().unwrap();
writer.text("text"); writer.text("text");
writer.close(None);
let (item, _) = srv.execute(reader.into_future()).unwrap(); let (item, _) = srv.execute(reader.into_future()).unwrap();
assert_eq!(item, Some(ws::Message::Text("text".to_owned()))); assert_eq!(item, Some(ws::Message::Text("text".to_owned())));
} }
thread::sleep(time::Duration::from_millis(1000)); thread::sleep(time::Duration::from_millis(100));
assert_eq!(num.load(Ordering::Relaxed), 1); assert_eq!(num.load(Ordering::Relaxed), 1);
} }