1
0
mirror of https://github.com/fafhrd91/actix-web synced 2024-11-24 00:21:08 +01:00

Merge remote-tracking branch 'origin/master' into remove-extensions-from-head

This commit is contained in:
Rob Ede 2021-12-08 07:30:14 +00:00
commit fb5b4734a4
No known key found for this signature in database
GPG Key ID: 97C636207D3EF933
62 changed files with 252 additions and 392 deletions

View File

@ -262,9 +262,9 @@ impl Files {
self self
} }
/// See [`Files::method_guard`].
#[doc(hidden)] #[doc(hidden)]
#[deprecated(since = "0.6.0", note = "Renamed to `method_guard`.")] #[deprecated(since = "0.6.0", note = "Renamed to `method_guard`.")]
/// See [`Files::method_guard`].
pub fn use_guards<G: Guard + 'static>(self, guard: G) -> Self { pub fn use_guards<G: Guard + 'static>(self, guard: G) -> Self {
self.method_guard(guard) self.method_guard(guard)
} }

View File

@ -11,8 +11,8 @@
//! .service(Files::new("/static", ".").prefer_utf8(true)); //! .service(Files::new("/static", ".").prefer_utf8(true));
//! ``` //! ```
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms, nonstandard_style)]
#![warn(missing_docs, missing_debug_implementations)] #![warn(future_incompatible, missing_docs, missing_debug_implementations)]
use actix_service::boxed::{BoxService, BoxServiceFactory}; use actix_service::boxed::{BoxService, BoxServiceFactory};
use actix_web::{ use actix_web::{

View File

@ -1,6 +1,7 @@
//! Various helpers for Actix applications to use during testing. //! Various helpers for Actix applications to use during testing.
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]

View File

@ -189,11 +189,7 @@ mod _original {
n /= 100; n /= 100;
curr -= 2; curr -= 2;
unsafe { unsafe {
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(lut_ptr.offset(d1 as isize), buf_ptr.offset(curr), 2);
lut_ptr.offset(d1 as isize),
buf_ptr.offset(curr),
2,
);
} }
// decode last 1 or 2 chars // decode last 1 or 2 chars
@ -206,11 +202,7 @@ mod _original {
let d1 = n << 1; let d1 = n << 1;
curr -= 2; curr -= 2;
unsafe { unsafe {
ptr::copy_nonoverlapping( ptr::copy_nonoverlapping(lut_ptr.offset(d1 as isize), buf_ptr.offset(curr), 2);
lut_ptr.offset(d1 as isize),
buf_ptr.offset(curr),
2,
);
} }
} }

View File

@ -54,15 +54,10 @@ const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
value: (0, 0), value: (0, 0),
}; };
const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] = const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] = [EMPTY_HEADER_INDEX; MAX_HEADERS];
[EMPTY_HEADER_INDEX; MAX_HEADERS];
impl HeaderIndex { impl HeaderIndex {
fn record( fn record(bytes: &[u8], headers: &[httparse::Header<'_>], indices: &mut [HeaderIndex]) {
bytes: &[u8],
headers: &[httparse::Header<'_>],
indices: &mut [HeaderIndex],
) {
let bytes_ptr = bytes.as_ptr() as usize; let bytes_ptr = bytes.as_ptr() as usize;
for (header, indices) in headers.iter().zip(indices.iter_mut()) { for (header, indices) in headers.iter().zip(indices.iter_mut()) {
let name_start = header.name.as_ptr() as usize - bytes_ptr; let name_start = header.name.as_ptr() as usize - bytes_ptr;

View File

@ -25,10 +25,7 @@ async fn main() -> io::Result<()> {
Ok::<_, Error>( Ok::<_, Error>(
Response::build(StatusCode::OK) Response::build(StatusCode::OK)
.insert_header(( .insert_header(("x-head", HeaderValue::from_static("dummy value!")))
"x-head",
HeaderValue::from_static("dummy value!"),
))
.body(body), .body(body),
) )
}) })

View File

@ -1,8 +1,7 @@
use std::io; use std::io;
use actix_http::{ use actix_http::{
body::MessageBody, header::HeaderValue, Error, HttpService, Request, Response, body::MessageBody, header::HeaderValue, Error, HttpService, Request, Response, StatusCode,
StatusCode,
}; };
use actix_server::Server; use actix_server::Server;
use bytes::BytesMut; use bytes::BytesMut;

View File

@ -21,10 +21,7 @@ async fn main() -> io::Result<()> {
log::info!("{:?}", req); log::info!("{:?}", req);
let mut res = Response::build(StatusCode::OK); let mut res = Response::build(StatusCode::OK);
res.insert_header(( res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
"x-head",
HeaderValue::from_static("dummy value!"),
));
let forty_two = req.extensions().get::<u32>().unwrap().to_string(); let forty_two = req.extensions().get::<u32>().unwrap().to_string();
res.insert_header(( res.insert_header((

View File

@ -60,10 +60,7 @@ impl Heartbeat {
impl Stream for Heartbeat { impl Stream for Heartbeat {
type Item = Result<Bytes, Error>; type Item = Result<Bytes, Error>;
fn poll_next( fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> {
log::trace!("poll"); log::trace!("poll");
ready!(self.as_mut().interval.poll_tick(cx)); ready!(self.as_mut().interval.poll_tick(cx));

View File

@ -1,5 +0,0 @@
max_width = 89
reorder_imports = true
#wrap_comments = true
#fn_args_density = "Compressed"
#use_small_heuristics = false

View File

@ -165,8 +165,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn stream_delayed_error() { async fn stream_delayed_error() {
let body = let body = BodyStream::new(stream::iter(vec![Ok(Bytes::from("1")), Err(StreamErr)]));
BodyStream::new(stream::iter(vec![Ok(Bytes::from("1")), Err(StreamErr)]));
assert!(matches!(to_bytes(body).await, Err(StreamErr))); assert!(matches!(to_bytes(body).await, Err(StreamErr)));
pin_project! { pin_project! {

View File

@ -24,9 +24,7 @@ impl BoxBody {
} }
/// Returns a mutable pinned reference to the inner message body type. /// Returns a mutable pinned reference to the inner message body type.
pub fn as_pin_mut( pub fn as_pin_mut(&mut self) -> Pin<&mut (dyn MessageBody<Error = Box<dyn StdError>>)> {
&mut self,
) -> Pin<&mut (dyn MessageBody<Error = Box<dyn StdError>>)> {
self.0.as_mut() self.0.as_mut()
} }
} }

View File

@ -68,8 +68,7 @@ mod test {
let bytes = to_bytes(body).await.unwrap(); let bytes = to_bytes(body).await.unwrap();
assert_eq!(bytes, b"123"[..]); assert_eq!(bytes, b"123"[..]);
let stream = let stream = stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
.map(Ok::<_, Error>); .map(Ok::<_, Error>);
let body = BodyStream::new(stream); let body = BodyStream::new(stream);
let bytes = to_bytes(body).await.unwrap(); let bytes = to_bytes(body).await.unwrap();

View File

@ -214,8 +214,7 @@ where
self.local_addr, self.local_addr,
); );
H2Service::with_config(cfg, service.into_factory()) H2Service::with_config(cfg, service.into_factory()).on_connect_ext(self.on_connect_ext)
.on_connect_ext(self.on_connect_ext)
} }
/// Finish service configuration and create `HttpService` instance. /// Finish service configuration and create `HttpService` instance.

View File

@ -44,17 +44,17 @@ where
pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> { pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> {
let decoder = match encoding { let decoder = match encoding {
#[cfg(feature = "compress-brotli")] #[cfg(feature = "compress-brotli")]
ContentEncoding::Br => Some(ContentDecoder::Br(Box::new( ContentEncoding::Br => Some(ContentDecoder::Br(Box::new(BrotliDecoder::new(
BrotliDecoder::new(Writer::new()), Writer::new(),
))), )))),
#[cfg(feature = "compress-gzip")] #[cfg(feature = "compress-gzip")]
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new( ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
ZlibDecoder::new(Writer::new()), ZlibDecoder::new(Writer::new()),
))), ))),
#[cfg(feature = "compress-gzip")] #[cfg(feature = "compress-gzip")]
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new( ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(GzDecoder::new(
GzDecoder::new(Writer::new()), Writer::new(),
))), )))),
#[cfg(feature = "compress-zstd")] #[cfg(feature = "compress-zstd")]
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new( ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
ZstdDecoder::new(Writer::new()).expect( ZstdDecoder::new(Writer::new()).expect(
@ -93,10 +93,7 @@ where
{ {
type Item = Result<Bytes, PayloadError>; type Item = Result<Bytes, PayloadError>;
fn poll_next( fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> {
loop { loop {
if let Some(ref mut fut) = self.fut { if let Some(ref mut fut) = self.fut {
let (chunk, decoder) = let (chunk, decoder) =

View File

@ -53,11 +53,7 @@ impl<B: MessageBody> Encoder<B> {
} }
} }
pub fn response( pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
encoding: ContentEncoding,
head: &mut ResponseHead,
body: B,
) -> Self {
let can_encode = !(head.headers().contains_key(&CONTENT_ENCODING) let can_encode = !(head.headers().contains_key(&CONTENT_ENCODING)
|| head.status == StatusCode::SWITCHING_PROTOCOLS || head.status == StatusCode::SWITCHING_PROTOCOLS
|| head.status == StatusCode::NO_CONTENT || head.status == StatusCode::NO_CONTENT

View File

@ -457,8 +457,7 @@ mod tests {
#[test] #[test]
fn test_payload_error() { fn test_payload_error() {
let err: PayloadError = let err: PayloadError = io::Error::new(io::ErrorKind::Other, "ParseError").into();
io::Error::new(io::ErrorKind::Other, "ParseError").into();
assert!(err.to_string().contains("ParseError")); assert!(err.to_string().contains("ParseError"));
let err = PayloadError::Incomplete(None); let err = PayloadError::Incomplete(None);

View File

@ -50,10 +50,7 @@ impl ChunkedState {
} }
} }
fn read_size( fn read_size(rdr: &mut BytesMut, size: &mut u64) -> Poll<Result<ChunkedState, io::Error>> {
rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
let radix = 16; let radix = 16;
let rem = match byte!(rdr) { let rem = match byte!(rdr) {
@ -111,10 +108,7 @@ impl ChunkedState {
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions _ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
} }
} }
fn read_size_lf( fn read_size_lf(rdr: &mut BytesMut, size: u64) -> Poll<Result<ChunkedState, io::Error>> {
rdr: &mut BytesMut,
size: u64,
) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) { match byte!(rdr) {
b'\n' if size > 0 => Poll::Ready(Ok(ChunkedState::Body)), b'\n' if size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
b'\n' if size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)), b'\n' if size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),

View File

@ -74,8 +74,7 @@ pub(crate) trait MessageType: Sized {
let headers = self.headers_mut(); let headers = self.headers_mut();
for idx in raw_headers.iter() { for idx in raw_headers.iter() {
let name = let name = HeaderName::from_bytes(&slice[idx.name.0..idx.name.1]).unwrap();
HeaderName::from_bytes(&slice[idx.name.0..idx.name.1]).unwrap();
// SAFETY: httparse already checks header value is only visible ASCII bytes // SAFETY: httparse already checks header value is only visible ASCII bytes
// from_maybe_shared_unchecked contains debug assertions so they are omitted here // from_maybe_shared_unchecked contains debug assertions so they are omitted here
@ -605,8 +604,7 @@ mod tests {
#[test] #[test]
fn test_parse_body() { fn test_parse_body() {
let mut buf = let mut buf = BytesMut::from("GET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody");
BytesMut::from("GET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody");
let mut reader = MessageDecoder::<Request>::default(); let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
@ -622,8 +620,7 @@ mod tests {
#[test] #[test]
fn test_parse_body_crlf() { fn test_parse_body_crlf() {
let mut buf = let mut buf = BytesMut::from("\r\nGET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody");
BytesMut::from("\r\nGET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody");
let mut reader = MessageDecoder::<Request>::default(); let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();

View File

@ -27,6 +27,7 @@ use crate::{
use super::{ use super::{
codec::Codec, codec::Codec,
decoder::MAX_BUFFER_SIZE,
payload::{Payload, PayloadSender, PayloadStatus}, payload::{Payload, PayloadSender, PayloadStatus},
Message, MessageType, Message, MessageType,
}; };
@ -259,10 +260,7 @@ where
} }
} }
fn poll_flush( fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), io::Error>> {
let InnerDispatcherProj { io, write_buf, .. } = self.project(); let InnerDispatcherProj { io, write_buf, .. } = self.project();
let mut io = Pin::new(io.as_mut().unwrap()); let mut io = Pin::new(io.as_mut().unwrap());
@ -272,10 +270,7 @@ where
while written < len { while written < len {
match io.as_mut().poll_write(cx, &write_buf[written..])? { match io.as_mut().poll_write(cx, &write_buf[written..])? {
Poll::Ready(0) => { Poll::Ready(0) => {
return Poll::Ready(Err(io::Error::new( return Poll::Ready(Err(io::Error::new(io::ErrorKind::WriteZero, "")))
io::ErrorKind::WriteZero,
"",
)))
} }
Poll::Ready(n) => written += n, Poll::Ready(n) => written += n,
Poll::Pending => { Poll::Pending => {
@ -418,15 +413,12 @@ where
while this.write_buf.len() < super::payload::MAX_BUFFER_SIZE { while this.write_buf.len() < super::payload::MAX_BUFFER_SIZE {
match stream.as_mut().poll_next(cx) { match stream.as_mut().poll_next(cx) {
Poll::Ready(Some(Ok(item))) => { Poll::Ready(Some(Ok(item))) => {
this.codec.encode( this.codec
Message::Chunk(Some(item)), .encode(Message::Chunk(Some(item)), this.write_buf)?;
this.write_buf,
)?;
} }
Poll::Ready(None) => { Poll::Ready(None) => {
this.codec this.codec.encode(Message::Chunk(None), this.write_buf)?;
.encode(Message::Chunk(None), this.write_buf)?;
// payload stream finished. // payload stream finished.
// set state to None and handle next message // set state to None and handle next message
this.state.set(State::None); this.state.set(State::None);
@ -453,15 +445,12 @@ where
while this.write_buf.len() < super::payload::MAX_BUFFER_SIZE { while this.write_buf.len() < super::payload::MAX_BUFFER_SIZE {
match stream.as_mut().poll_next(cx) { match stream.as_mut().poll_next(cx) {
Poll::Ready(Some(Ok(item))) => { Poll::Ready(Some(Ok(item))) => {
this.codec.encode( this.codec
Message::Chunk(Some(item)), .encode(Message::Chunk(Some(item)), this.write_buf)?;
this.write_buf,
)?;
} }
Poll::Ready(None) => { Poll::Ready(None) => {
this.codec this.codec.encode(Message::Chunk(None), this.write_buf)?;
.encode(Message::Chunk(None), this.write_buf)?;
// payload stream finished. // payload stream finished.
// set state to None and handle next message // set state to None and handle next message
this.state.set(State::None); this.state.set(State::None);
@ -567,9 +556,11 @@ where
} }
}; };
} }
_ => unreachable!( _ => {
unreachable!(
"State must be set to ServiceCall or ExceptCall in handle_request" "State must be set to ServiceCall or ExceptCall in handle_request"
), )
}
} }
} }
} }
@ -603,8 +594,7 @@ where
// everything remain in read buffer would be handed to // everything remain in read buffer would be handed to
// upgraded Request. // upgraded Request.
MessageType::Stream if this.flow.upgrade.is_some() => { MessageType::Stream if this.flow.upgrade.is_some() => {
this.messages this.messages.push_back(DispatcherMessage::Upgrade(req));
.push_back(DispatcherMessage::Upgrade(req));
break; break;
} }
@ -619,8 +609,7 @@ where
where the state can be collected and consumed. where the state can be collected and consumed.
*/ */
let (ps, pl) = Payload::create(false); let (ps, pl) = Payload::create(false);
let (req1, _) = let (req1, _) = req.replace_payload(crate::Payload::H1(pl));
req.replace_payload(crate::Payload::H1(pl));
req = req1; req = req1;
*this.payload = Some(ps); *this.payload = Some(ps);
} }
@ -641,9 +630,7 @@ where
if let Some(ref mut payload) = this.payload { if let Some(ref mut payload) = this.payload {
payload.feed_data(chunk); payload.feed_data(chunk);
} else { } else {
error!( error!("Internal server error: unexpected payload chunk");
"Internal server error: unexpected payload chunk"
);
this.flags.insert(Flags::READ_DISCONNECT); this.flags.insert(Flags::READ_DISCONNECT);
this.messages.push_back(DispatcherMessage::Error( this.messages.push_back(DispatcherMessage::Error(
Response::internal_server_error().drop_body(), Response::internal_server_error().drop_body(),
@ -681,12 +668,11 @@ where
payload.set_error(PayloadError::Overflow); payload.set_error(PayloadError::Overflow);
} }
// Requests overflow buffer size should be responded with 431 // Requests overflow buffer size should be responded with 431
this.messages.push_back(DispatcherMessage::Error( this.messages
Response::with_body( .push_back(DispatcherMessage::Error(Response::with_body(
StatusCode::REQUEST_HEADER_FIELDS_TOO_LARGE, StatusCode::REQUEST_HEADER_FIELDS_TOO_LARGE,
(), (),
), )));
));
this.flags.insert(Flags::READ_DISCONNECT); this.flags.insert(Flags::READ_DISCONNECT);
*this.error = Some(ParseError::TooLarge.into()); *this.error = Some(ParseError::TooLarge.into());
break; break;
@ -728,8 +714,7 @@ where
None => { None => {
// conditionally go into shutdown timeout // conditionally go into shutdown timeout
if this.flags.contains(Flags::SHUTDOWN) { if this.flags.contains(Flags::SHUTDOWN) {
if let Some(deadline) = this.codec.config().client_disconnect_timer() if let Some(deadline) = this.codec.config().client_disconnect_timer() {
{
// write client disconnect time out and poll again to // write client disconnect time out and poll again to
// go into Some<Pin<&mut Sleep>> branch // go into Some<Pin<&mut Sleep>> branch
this.ka_timer.set(Some(sleep_until(deadline))); this.ka_timer.set(Some(sleep_until(deadline)));
@ -772,9 +757,7 @@ where
this.flags.insert(Flags::STARTED | Flags::SHUTDOWN); this.flags.insert(Flags::STARTED | Flags::SHUTDOWN);
} }
// still have unfinished task. try to reset and register keep-alive. // still have unfinished task. try to reset and register keep-alive.
} else if let Some(deadline) = } else if let Some(deadline) = this.codec.config().keep_alive_expire() {
this.codec.config().keep_alive_expire()
{
timer.as_mut().reset(deadline); timer.as_mut().reset(deadline);
let _ = timer.poll(cx); let _ = timer.poll(cx);
} }
@ -793,7 +776,6 @@ where
/// Returns true when io stream can be disconnected after write to it. /// Returns true when io stream can be disconnected after write to it.
/// ///
/// It covers these conditions: /// It covers these conditions:
///
/// - `std::io::ErrorKind::ConnectionReset` after partial read. /// - `std::io::ErrorKind::ConnectionReset` after partial read.
/// - all data read done. /// - all data read done.
#[inline(always)] #[inline(always)]
@ -813,46 +795,39 @@ where
loop { loop {
// Return early when read buf exceed decoder's max buffer size. // Return early when read buf exceed decoder's max buffer size.
if this.read_buf.len() >= super::decoder::MAX_BUFFER_SIZE { if this.read_buf.len() >= MAX_BUFFER_SIZE {
/* // At this point it's not known IO stream is still scheduled to be waked up so
At this point it's not known IO stream is still scheduled // force wake up dispatcher just in case.
to be waked up. so force wake up dispatcher just in case. //
// Reason:
// AsyncRead mostly would only have guarantee wake up when the poll_read
// return Poll::Pending.
//
// Case:
// When read_buf is beyond max buffer size the early return could be successfully
// be parsed as a new Request. This case would not generate ParseError::TooLarge and
// at this point IO stream is not fully read to Pending and would result in
// dispatcher stuck until timeout (KA)
//
// Note:
// This is a perf choice to reduce branch on <Request as MessageType>::decode.
//
// A Request head too large to parse is only checked on
// `httparse::Status::Partial` condition.
Reason:
AsyncRead mostly would only have guarantee wake up
when the poll_read return Poll::Pending.
Case:
When read_buf is beyond max buffer size the early return
could be successfully be parsed as a new Request.
This case would not generate ParseError::TooLarge
and at this point IO stream is not fully read to Pending
and would result in dispatcher stuck until timeout (KA)
Note:
This is a perf choice to reduce branch on
<Request as MessageType>::decode.
A Request head too large to parse is only checked on
httparse::Status::Partial condition.
*/
if this.payload.is_none() { if this.payload.is_none() {
/* // When dispatcher has a payload the responsibility of wake up it would be shift
When dispatcher has a payload the responsibility of // to h1::payload::Payload.
wake up it would be shift to h1::payload::Payload. //
// Reason:
Reason: // Self wake up when there is payload would waste poll and/or result in
Self wake up when there is payload would waste poll // over read.
and/or result in over read. //
// Case:
Case: // When payload is (partial) dropped by user there is no need to do
When payload is (partial) dropped by user there is // read anymore. At this case read_buf could always remain beyond
no need to do read anymore. // MAX_BUFFER_SIZE and self wake up would be busy poll dispatcher and
At this case read_buf could always remain beyond // waste resources.
MAX_BUFFER_SIZE and self wake up would be busy poll
dispatcher and waste resource.
*/
cx.waker().wake_by_ref(); cx.waker().wake_by_ref();
} }
@ -1060,14 +1035,12 @@ mod tests {
} }
fn ok_service( fn ok_service(
) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> ) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {
{
fn_service(|_req: Request| ready(Ok::<_, Error>(Response::ok()))) fn_service(|_req: Request| ready(Ok::<_, Error>(Response::ok())))
} }
fn echo_path_service( fn echo_path_service(
) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> ) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {
{
fn_service(|req: Request| { fn_service(|req: Request| {
let path = req.path().as_bytes(); let path = req.path().as_bytes();
ready(Ok::<_, Error>( ready(Ok::<_, Error>(
@ -1076,8 +1049,8 @@ mod tests {
}) })
} }
fn echo_payload_service( fn echo_payload_service() -> impl Service<Request, Response = Response<Bytes>, Error = Error>
) -> impl Service<Request, Response = Response<Bytes>, Error = Error> { {
fn_service(|mut req: Request| { fn_service(|mut req: Request| {
Box::pin(async move { Box::pin(async move {
use futures_util::stream::StreamExt as _; use futures_util::stream::StreamExt as _;

View File

@ -103,9 +103,7 @@ pub(crate) trait MessageType: Sized {
dst.put_slice(b"\r\n"); dst.put_slice(b"\r\n");
} }
} }
BodySize::Sized(0) if camel_case => { BodySize::Sized(0) if camel_case => dst.put_slice(b"\r\nContent-Length: 0\r\n"),
dst.put_slice(b"\r\nContent-Length: 0\r\n")
}
BodySize::Sized(0) => dst.put_slice(b"\r\ncontent-length: 0\r\n"), BodySize::Sized(0) => dst.put_slice(b"\r\ncontent-length: 0\r\n"),
BodySize::Sized(len) => helpers::write_content_length(len, dst), BodySize::Sized(len) => helpers::write_content_length(len, dst),
BodySize::None => dst.put_slice(b"\r\n"), BodySize::None => dst.put_slice(b"\r\n"),
@ -307,11 +305,7 @@ impl MessageType for RequestHeadType {
Version::HTTP_11 => "HTTP/1.1", Version::HTTP_11 => "HTTP/1.1",
Version::HTTP_2 => "HTTP/2.0", Version::HTTP_2 => "HTTP/2.0",
Version::HTTP_3 => "HTTP/3.0", Version::HTTP_3 => "HTTP/3.0",
_ => _ => return Err(io::Error::new(io::ErrorKind::Other, "unsupported version")),
return Err(io::Error::new(
io::ErrorKind::Other,
"unsupported version"
)),
} }
) )
.map_err(|e| io::Error::new(io::ErrorKind::Other, e)) .map_err(|e| io::Error::new(io::ErrorKind::Other, e))
@ -568,8 +562,7 @@ mod tests {
ConnectionType::Close, ConnectionType::Close,
&ServiceConfig::default(), &ServiceConfig::default(),
); );
let data = let data = String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
assert!(data.contains("Content-Length: 0\r\n")); assert!(data.contains("Content-Length: 0\r\n"));
assert!(data.contains("Connection: close\r\n")); assert!(data.contains("Connection: close\r\n"));
@ -583,8 +576,7 @@ mod tests {
ConnectionType::KeepAlive, ConnectionType::KeepAlive,
&ServiceConfig::default(), &ServiceConfig::default(),
); );
let data = let data = String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
assert!(data.contains("Transfer-Encoding: chunked\r\n")); assert!(data.contains("Transfer-Encoding: chunked\r\n"));
assert!(data.contains("Content-Type: plain/text\r\n")); assert!(data.contains("Content-Type: plain/text\r\n"));
assert!(data.contains("Date: date\r\n")); assert!(data.contains("Date: date\r\n"));
@ -605,8 +597,7 @@ mod tests {
ConnectionType::KeepAlive, ConnectionType::KeepAlive,
&ServiceConfig::default(), &ServiceConfig::default(),
); );
let data = let data = String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
assert!(data.contains("transfer-encoding: chunked\r\n")); assert!(data.contains("transfer-encoding: chunked\r\n"));
assert!(data.contains("content-type: xml\r\n")); assert!(data.contains("content-type: xml\r\n"));
assert!(data.contains("content-type: plain/text\r\n")); assert!(data.contains("content-type: plain/text\r\n"));
@ -639,8 +630,7 @@ mod tests {
ConnectionType::Close, ConnectionType::Close,
&ServiceConfig::default(), &ServiceConfig::default(),
); );
let data = let data = String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
assert!(data.contains("content-length: 0\r\n")); assert!(data.contains("content-length: 0\r\n"));
assert!(data.contains("connection: close\r\n")); assert!(data.contains("connection: close\r\n"));
assert!(data.contains("authorization: another authorization\r\n")); assert!(data.contains("authorization: another authorization\r\n"));
@ -663,8 +653,7 @@ mod tests {
ConnectionType::Upgrade, ConnectionType::Upgrade,
&ServiceConfig::default(), &ServiceConfig::default(),
); );
let data = let data = String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
String::from_utf8(Vec::from(bytes.split().freeze().as_ref())).unwrap();
assert!(!data.contains("content-length: 0\r\n")); assert!(!data.contains("content-length: 0\r\n"));
assert!(!data.contains("transfer-encoding: chunked\r\n")); assert!(!data.contains("transfer-encoding: chunked\r\n"));
} }

View File

@ -227,10 +227,7 @@ impl Inner {
self.len self.len
} }
fn readany( fn readany(&mut self, cx: &mut Context<'_>) -> Poll<Option<Result<Bytes, PayloadError>>> {
&mut self,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, PayloadError>>> {
if let Some(data) = self.items.pop_front() { if let Some(data) = self.items.pop_front() {
self.len -= data.len(); self.len -= data.len();
self.need_read = self.len < MAX_BUFFER_SIZE; self.need_read = self.len < MAX_BUFFER_SIZE;

View File

@ -266,8 +266,7 @@ where
} }
} }
impl<T, S, B, X, U> ServiceFactory<(T, Option<net::SocketAddr>)> impl<T, S, B, X, U> ServiceFactory<(T, Option<net::SocketAddr>)> for H1Service<T, S, B, X, U>
for H1Service<T, S, B, X, U>
where where
T: AsyncRead + AsyncWrite + Unpin + 'static, T: AsyncRead + AsyncWrite + Unpin + 'static,
@ -310,9 +309,9 @@ where
let upgrade = match upgrade { let upgrade = match upgrade {
Some(upgrade) => { Some(upgrade) => {
let upgrade = upgrade.await.map_err(|e| { let upgrade = upgrade
log::error!("Init http upgrade service error: {:?}", e) .await
})?; .map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
Some(upgrade) Some(upgrade)
} }
None => None, None => None,
@ -336,8 +335,7 @@ where
/// `Service` implementation for HTTP/1 transport /// `Service` implementation for HTTP/1 transport
pub type H1ServiceHandler<T, S, B, X, U> = HttpServiceHandler<T, S, B, X, U>; pub type H1ServiceHandler<T, S, B, X, U> = HttpServiceHandler<T, S, B, X, U>;
impl<T, S, B, X, U> Service<(T, Option<net::SocketAddr>)> impl<T, S, B, X, U> Service<(T, Option<net::SocketAddr>)> for HttpServiceHandler<T, S, B, X, U>
for HttpServiceHandler<T, S, B, X, U>
where where
T: AsyncRead + AsyncWrite + Unpin, T: AsyncRead + AsyncWrite + Unpin,

View File

@ -70,12 +70,9 @@ where
.unwrap() .unwrap()
.is_write_buf_full() .is_write_buf_full()
{ {
let next = let next = match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)), Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => return Poll::Ready(Err(err.into())),
return Poll::Ready(Err(err.into()))
}
Poll::Ready(None) => Poll::Ready(None), Poll::Ready(None) => Poll::Ready(None),
Poll::Pending => Poll::Pending, Poll::Pending => Poll::Pending,
}; };
@ -88,9 +85,9 @@ where
let _ = this.body.take(); let _ = this.body.take();
} }
let framed = this.framed.as_mut().as_pin_mut().unwrap(); let framed = this.framed.as_mut().as_pin_mut().unwrap();
framed.write(Message::Chunk(item)).map_err(|err| { framed
Error::new_send_response().with_cause(err) .write(Message::Chunk(item))
})?; .map_err(|err| Error::new_send_response().with_cause(err))?;
} }
Poll::Pending => body_ready = false, Poll::Pending => body_ready = false,
} }

View File

@ -109,7 +109,7 @@ where
Poll::Ready(Some((req, tx))) => { Poll::Ready(Some((req, tx))) => {
let (parts, body) = req.into_parts(); let (parts, body) = req.into_parts();
let pl = crate::h2::Payload::new(body); let pl = crate::h2::Payload::new(body);
let pl = Payload::<crate::payload::PayloadStream>::H2(pl); let pl = Payload::H2(pl);
let mut req = Request::with_payload(pl); let mut req = Request::with_payload(pl);
let head = req.head_mut(); let head = req.head_mut();
@ -160,16 +160,11 @@ where
Poll::Ready(_) => { Poll::Ready(_) => {
ping_pong.on_flight = false; ping_pong.on_flight = false;
let dead_line = let dead_line = this.config.keep_alive_expire().unwrap();
this.config.keep_alive_expire().unwrap();
ping_pong.timer.as_mut().reset(dead_line); ping_pong.timer.as_mut().reset(dead_line);
} }
Poll::Pending => { Poll::Pending => {
return ping_pong return ping_pong.timer.as_mut().poll(cx).map(|_| Ok(()))
.timer
.as_mut()
.poll(cx)
.map(|_| Ok(()))
} }
} }
} else { } else {

View File

@ -40,10 +40,7 @@ impl Payload {
impl Stream for Payload { impl Stream for Payload {
type Item = Result<Bytes, PayloadError>; type Item = Result<Bytes, PayloadError>;
fn poll_next( fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> {
let this = self.get_mut(); let this = self.get_mut();
match ready!(Pin::new(&mut this.stream).poll_data(cx)) { match ready!(Pin::new(&mut this.stream).poll_data(cx)) {

View File

@ -10,8 +10,7 @@ use std::{
use actix_codec::{AsyncRead, AsyncWrite}; use actix_codec::{AsyncRead, AsyncWrite};
use actix_rt::net::TcpStream; use actix_rt::net::TcpStream;
use actix_service::{ use actix_service::{
fn_factory, fn_service, IntoServiceFactory, Service, ServiceFactory, fn_factory, fn_service, IntoServiceFactory, Service, ServiceFactory, ServiceFactoryExt as _,
ServiceFactoryExt as _,
}; };
use actix_utils::future::ready; use actix_utils::future::ready;
use futures_core::{future::LocalBoxFuture, ready}; use futures_core::{future::LocalBoxFuture, ready};
@ -279,8 +278,7 @@ where
} }
fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future { fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future {
let on_connect_data = let on_connect_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
H2ServiceHandlerResponse { H2ServiceHandlerResponse {
state: State::Handshake( state: State::Handshake(

View File

@ -6,7 +6,7 @@ use http::header::{HeaderName, InvalidHeaderName};
/// Sealed trait implemented for types that can be effectively borrowed as a [`HeaderValue`]. /// Sealed trait implemented for types that can be effectively borrowed as a [`HeaderValue`].
/// ///
/// [`HeaderValue`]: crate::http::HeaderValue /// [`HeaderValue`]: super::HeaderValue
pub trait AsHeaderName: Sealed {} pub trait AsHeaderName: Sealed {}
pub struct Seal; pub struct Seal;

View File

@ -12,7 +12,7 @@ use super::{Header, IntoHeaderValue};
/// An interface for types that can be converted into a [`HeaderName`]/[`HeaderValue`] pair for /// An interface for types that can be converted into a [`HeaderName`]/[`HeaderValue`] pair for
/// insertion into a [`HeaderMap`]. /// insertion into a [`HeaderMap`].
/// ///
/// [`HeaderMap`]: crate::http::HeaderMap /// [`HeaderMap`]: super::HeaderMap
pub trait IntoHeaderPair: Sized { pub trait IntoHeaderPair: Sized {
type Error: Into<HttpError>; type Error: Into<HttpError>;

View File

@ -123,8 +123,7 @@ impl HeaderMap {
let mut map = HeaderMap::with_capacity(capacity); let mut map = HeaderMap::with_capacity(capacity);
map.append(first_name.clone(), first_value); map.append(first_name.clone(), first_value);
let (map, _) = let (map, _) = drain.fold((map, first_name), |(mut map, prev_name), (name, value)| {
drain.fold((map, first_name), |(mut map, prev_name), (name, value)| {
let name = name.unwrap_or(prev_name); let name = name.unwrap_or(prev_name);
map.append(name.clone(), value); map.append(name.clone(), value);
(map, name) (map, name)

View File

@ -11,22 +11,20 @@ pub use http::header::{
pub use http::header::{ pub use http::header::{
ACCEPT, ACCEPT_CHARSET, ACCEPT_ENCODING, ACCEPT_LANGUAGE, ACCEPT_RANGES, ACCEPT, ACCEPT_CHARSET, ACCEPT_ENCODING, ACCEPT_LANGUAGE, ACCEPT_RANGES,
ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS,
ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS,
ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_MAX_AGE, ACCESS_CONTROL_MAX_AGE, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, AGE,
ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, AGE, ALLOW, ALT_SVC, ALLOW, ALT_SVC, AUTHORIZATION, CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION,
AUTHORIZATION, CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_ENCODING, CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_LOCATION, CONTENT_RANGE,
CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_LOCATION, CONTENT_RANGE, CONTENT_SECURITY_POLICY, CONTENT_SECURITY_POLICY_REPORT_ONLY, CONTENT_TYPE, COOKIE, DATE,
CONTENT_SECURITY_POLICY, CONTENT_SECURITY_POLICY_REPORT_ONLY, CONTENT_TYPE, COOKIE, DNT, ETAG, EXPECT, EXPIRES, FORWARDED, FROM, HOST, IF_MATCH, IF_MODIFIED_SINCE,
DATE, DNT, ETAG, EXPECT, EXPIRES, FORWARDED, FROM, HOST, IF_MATCH, IF_NONE_MATCH, IF_RANGE, IF_UNMODIFIED_SINCE, LAST_MODIFIED, LINK, LOCATION, MAX_FORWARDS,
IF_MODIFIED_SINCE, IF_NONE_MATCH, IF_RANGE, IF_UNMODIFIED_SINCE, LAST_MODIFIED, ORIGIN, PRAGMA, PROXY_AUTHENTICATE, PROXY_AUTHORIZATION, PUBLIC_KEY_PINS,
LINK, LOCATION, MAX_FORWARDS, ORIGIN, PRAGMA, PROXY_AUTHENTICATE, PUBLIC_KEY_PINS_REPORT_ONLY, RANGE, REFERER, REFERRER_POLICY, REFRESH, RETRY_AFTER,
PROXY_AUTHORIZATION, PUBLIC_KEY_PINS, PUBLIC_KEY_PINS_REPORT_ONLY, RANGE, REFERER, SEC_WEBSOCKET_ACCEPT, SEC_WEBSOCKET_EXTENSIONS, SEC_WEBSOCKET_KEY, SEC_WEBSOCKET_PROTOCOL,
REFERRER_POLICY, REFRESH, RETRY_AFTER, SEC_WEBSOCKET_ACCEPT,
SEC_WEBSOCKET_EXTENSIONS, SEC_WEBSOCKET_KEY, SEC_WEBSOCKET_PROTOCOL,
SEC_WEBSOCKET_VERSION, SERVER, SET_COOKIE, STRICT_TRANSPORT_SECURITY, TE, TRAILER, SEC_WEBSOCKET_VERSION, SERVER, SET_COOKIE, STRICT_TRANSPORT_SECURITY, TE, TRAILER,
TRANSFER_ENCODING, UPGRADE, UPGRADE_INSECURE_REQUESTS, USER_AGENT, VARY, VIA, TRANSFER_ENCODING, UPGRADE, UPGRADE_INSECURE_REQUESTS, USER_AGENT, VARY, VIA, WARNING,
WARNING, WWW_AUTHENTICATE, X_CONTENT_TYPE_OPTIONS, X_DNS_PREFETCH_CONTROL, WWW_AUTHENTICATE, X_CONTENT_TYPE_OPTIONS, X_DNS_PREFETCH_CONTROL, X_FRAME_OPTIONS,
X_FRAME_OPTIONS, X_XSS_PROTECTION, X_XSS_PROTECTION,
}; };
use crate::{error::ParseError, HttpMessage}; use crate::{error::ParseError, HttpMessage};
@ -43,8 +41,8 @@ pub use self::into_pair::IntoHeaderPair;
pub use self::into_value::IntoHeaderValue; pub use self::into_value::IntoHeaderValue;
pub use self::map::HeaderMap; pub use self::map::HeaderMap;
pub use self::shared::{ pub use self::shared::{
parse_extended_value, q, Charset, ContentEncoding, ExtendedValue, HttpDate, parse_extended_value, q, Charset, ContentEncoding, ExtendedValue, HttpDate, LanguageTag,
LanguageTag, Quality, QualityItem, Quality, QualityItem,
}; };
pub use self::utils::{ pub use self::utils::{
fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode, fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode,

View File

@ -63,9 +63,7 @@ pub struct ExtendedValue {
/// [RFC 2231 §7]: https://datatracker.ietf.org/doc/html/rfc2231#section-7 /// [RFC 2231 §7]: https://datatracker.ietf.org/doc/html/rfc2231#section-7
/// [RFC 2978 §2.3]: https://datatracker.ietf.org/doc/html/rfc2978#section-2.3 /// [RFC 2978 §2.3]: https://datatracker.ietf.org/doc/html/rfc2978#section-2.3
/// [RFC 3986 §2.1]: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1 /// [RFC 3986 §2.1]: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1
pub fn parse_extended_value( pub fn parse_extended_value(val: &str) -> Result<ExtendedValue, crate::error::ParseError> {
val: &str,
) -> Result<ExtendedValue, crate::error::ParseError> {
// Break into three pieces separated by the single-quote character // Break into three pieces separated by the single-quote character
let mut parts = val.splitn(3, '\''); let mut parts = val.splitn(3, '\'');
@ -100,8 +98,7 @@ pub fn parse_extended_value(
impl fmt::Display for ExtendedValue { impl fmt::Display for ExtendedValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let encoded_value = let encoded_value = percent_encoding::percent_encode(&self.value[..], HTTP_VALUE);
percent_encoding::percent_encode(&self.value[..], HTTP_VALUE);
if let Some(ref lang) = self.language_tag { if let Some(ref lang) = self.language_tag {
write!(f, "{}'{}'{}", self.charset, lang, encoded_value) write!(f, "{}'{}'{}", self.charset, lang, encoded_value)
} else { } else {
@ -143,8 +140,8 @@ mod tests {
assert!(extended_value.language_tag.is_none()); assert!(extended_value.language_tag.is_none());
assert_eq!( assert_eq!(
vec![ vec![
194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a', 194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a', b't',
b't', b'e', b's', b'e', b's',
], ],
extended_value.value extended_value.value
); );
@ -185,8 +182,8 @@ mod tests {
charset: Charset::Ext("UTF-8".to_string()), charset: Charset::Ext("UTF-8".to_string()),
language_tag: None, language_tag: None,
value: vec![ value: vec![
194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a', 194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a', b't',
b't', b'e', b's', b'e', b's',
], ],
}; };
assert_eq!( assert_eq!(

View File

@ -4,8 +4,7 @@ use bytes::BytesMut;
use http::header::{HeaderValue, InvalidHeaderValue}; use http::header::{HeaderValue, InvalidHeaderValue};
use crate::{ use crate::{
config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue, config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue, helpers::MutWriter,
helpers::MutWriter,
}; };
/// A timestamp with HTTP-style formatting and parsing. /// A timestamp with HTTP-style formatting and parsing.

View File

@ -120,8 +120,7 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
} }
let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?; let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
let q_value = let q_value = Quality::try_from(q_value).map_err(|_| ParseError::Header)?;
Quality::try_from(q_value).map_err(|_| ParseError::Header)?;
quality = q_value; quality = q_value;
raw_item = val; raw_item = val;

View File

@ -14,7 +14,8 @@
//! [rustls]: https://crates.io/crates/rustls //! [rustls]: https://crates.io/crates/rustls
//! [trust-dns]: https://crates.io/crates/trust-dns //! [trust-dns]: https://crates.io/crates/trust-dns
#![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)] #![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow( #![allow(
clippy::type_complexity, clippy::type_complexity,
clippy::too_many_arguments, clippy::too_many_arguments,
@ -87,10 +88,7 @@ pub(crate) struct OnConnectData(Option<Extensions>);
impl OnConnectData { impl OnConnectData {
/// Construct by calling the on-connect callback with the underlying transport I/O. /// Construct by calling the on-connect callback with the underlying transport I/O.
pub(crate) fn from_io<T>( pub(crate) fn from_io<T>(io: &T, on_connect_ext: Option<&ConnectCallback<T>>) -> Self {
io: &T,
on_connect_ext: Option<&ConnectCallback<T>>,
) -> Self {
let ext = on_connect_ext.map(|handler| { let ext = on_connect_ext.map(|handler| {
let mut extensions = Extensions::default(); let mut extensions = Extensions::default();
handler(io, &mut extensions); handler(io, &mut extensions);

View File

@ -56,10 +56,7 @@ where
type Item = Result<Bytes, PayloadError>; type Item = Result<Bytes, PayloadError>;
#[inline] #[inline]
fn poll_next( fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> {
match self.get_mut() { match self.get_mut() {
Payload::None => Poll::Ready(None), Payload::None => Poll::Ready(None),
Payload::H1(ref mut pl) => pl.readany(cx), Payload::H1(ref mut pl) => pl.readany(cx),

View File

@ -231,9 +231,7 @@ impl<B: Default> Default for Response<B> {
} }
} }
impl<I: Into<Response<BoxBody>>, E: Into<Error>> From<Result<I, E>> impl<I: Into<Response<BoxBody>>, E: Into<Error>> From<Result<I, E>> for Response<BoxBody> {
for Response<BoxBody>
{
fn from(res: Result<I, E>) -> Self { fn from(res: Result<I, E>) -> Self {
match res { match res {
Ok(val) => val.into(), Ok(val) => val.into(),

View File

@ -47,7 +47,8 @@ impl ResponseBuilder {
/// Create response builder /// Create response builder
/// ///
/// # Examples /// # Examples
// /// use actix_http::{Response, ResponseBuilder, StatusCode};, / `` /// ```
/// use actix_http::{Response, ResponseBuilder, StatusCode};
/// let res: Response<_> = ResponseBuilder::default().finish(); /// let res: Response<_> = ResponseBuilder::default().finish();
/// assert_eq!(res.status(), StatusCode::OK); /// assert_eq!(res.status(), StatusCode::OK);
/// ``` /// ```
@ -62,7 +63,8 @@ impl ResponseBuilder {
/// Set HTTP status code of this response. /// Set HTTP status code of this response.
/// ///
/// # Examples /// # Examples
// /// use actix_http::{ResponseBuilder, StatusCode};, / `` /// ```
/// use actix_http::{ResponseBuilder, StatusCode};
/// let res = ResponseBuilder::default().status(StatusCode::NOT_FOUND).finish(); /// let res = ResponseBuilder::default().status(StatusCode::NOT_FOUND).finish();
/// assert_eq!(res.status(), StatusCode::NOT_FOUND); /// assert_eq!(res.status(), StatusCode::NOT_FOUND);
/// ``` /// ```

View File

@ -161,11 +161,7 @@ where
X::Error: Into<Response<BoxBody>>, X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug, X::InitError: fmt::Debug,
U: ServiceFactory< U: ServiceFactory<(Request, Framed<TcpStream, h1::Codec>), Config = (), Response = ()>,
(Request, Framed<TcpStream, h1::Codec>),
Config = (),
Response = (),
>,
U::Future: 'static, U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>, U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
@ -381,9 +377,9 @@ where
let upgrade = match upgrade { let upgrade = match upgrade {
Some(upgrade) => { Some(upgrade) => {
let upgrade = upgrade.await.map_err(|e| { let upgrade = upgrade
log::error!("Init http upgrade service error: {:?}", e) .await
})?; .map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
Some(upgrade) Some(upgrade)
} }
None => None, None => None,
@ -626,8 +622,7 @@ where
StateProj::H2Handshake { handshake: data } => { StateProj::H2Handshake { handshake: data } => {
match ready!(Pin::new(&mut data.as_mut().unwrap().0).poll(cx)) { match ready!(Pin::new(&mut data.as_mut().unwrap().0).poll(cx)) {
Ok((conn, timer)) => { Ok((conn, timer)) => {
let (_, config, flow, conn_data, peer_addr) = let (_, config, flow, conn_data, peer_addr) = data.take().unwrap();
data.take().unwrap();
self.as_mut().project().state.set(State::H2 { self.as_mut().project().state.set(State::H2 {
dispatcher: h2::Dispatcher::new( dispatcher: h2::Dispatcher::new(

View File

@ -224,9 +224,7 @@ impl Decoder for Codec {
OpCode::Continue => { OpCode::Continue => {
if self.flags.contains(Flags::CONTINUATION) { if self.flags.contains(Flags::CONTINUATION) {
Ok(Some(Frame::Continuation(Item::Continue( Ok(Some(Frame::Continuation(Item::Continue(
payload payload.map(|pl| pl.freeze()).unwrap_or_else(Bytes::new),
.map(|pl| pl.freeze())
.unwrap_or_else(Bytes::new),
)))) ))))
} else { } else {
Err(ProtocolError::ContinuationNotStarted) Err(ProtocolError::ContinuationNotStarted)
@ -236,9 +234,7 @@ impl Decoder for Codec {
if !self.flags.contains(Flags::CONTINUATION) { if !self.flags.contains(Flags::CONTINUATION) {
self.flags.insert(Flags::CONTINUATION); self.flags.insert(Flags::CONTINUATION);
Ok(Some(Frame::Continuation(Item::FirstBinary( Ok(Some(Frame::Continuation(Item::FirstBinary(
payload payload.map(|pl| pl.freeze()).unwrap_or_else(Bytes::new),
.map(|pl| pl.freeze())
.unwrap_or_else(Bytes::new),
)))) ))))
} else { } else {
Err(ProtocolError::ContinuationStarted) Err(ProtocolError::ContinuationStarted)
@ -248,9 +244,7 @@ impl Decoder for Codec {
if !self.flags.contains(Flags::CONTINUATION) { if !self.flags.contains(Flags::CONTINUATION) {
self.flags.insert(Flags::CONTINUATION); self.flags.insert(Flags::CONTINUATION);
Ok(Some(Frame::Continuation(Item::FirstText( Ok(Some(Frame::Continuation(Item::FirstText(
payload payload.map(|pl| pl.freeze()).unwrap_or_else(Bytes::new),
.map(|pl| pl.freeze())
.unwrap_or_else(Bytes::new),
)))) ))))
} else { } else {
Err(ProtocolError::ContinuationStarted) Err(ProtocolError::ContinuationStarted)

View File

@ -304,8 +304,7 @@ mod inner {
let item = match this.framed.next_item(cx) { let item = match this.framed.next_item(cx) {
Poll::Ready(Some(Ok(el))) => el, Poll::Ready(Some(Ok(el))) => el,
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => {
*this.state = *this.state = State::FramedError(DispatcherError::Decoder(err));
State::FramedError(DispatcherError::Decoder(err));
return true; return true;
} }
Poll::Pending => return false, Poll::Pending => return false,
@ -348,8 +347,7 @@ mod inner {
match Pin::new(&mut this.rx).poll_next(cx) { match Pin::new(&mut this.rx).poll_next(cx) {
Poll::Ready(Some(Ok(Message::Item(msg)))) => { Poll::Ready(Some(Ok(Message::Item(msg)))) => {
if let Err(err) = this.framed.as_mut().write(msg) { if let Err(err) = this.framed.as_mut().write(msg) {
*this.state = *this.state = State::FramedError(DispatcherError::Encoder(err));
State::FramedError(DispatcherError::Encoder(err));
return true; return true;
} }
} }
@ -371,8 +369,7 @@ mod inner {
Poll::Ready(Ok(_)) => {} Poll::Ready(Ok(_)) => {}
Poll::Ready(Err(err)) => { Poll::Ready(Err(err)) => {
debug!("Error sending data: {:?}", err); debug!("Error sending data: {:?}", err);
*this.state = *this.state = State::FramedError(DispatcherError::Encoder(err));
State::FramedError(DispatcherError::Encoder(err));
return true; return true;
} }
} }
@ -432,9 +429,7 @@ mod inner {
Poll::Ready(Ok(())) Poll::Ready(Ok(()))
} }
} }
State::FramedError(_) => { State::FramedError(_) => Poll::Ready(Err(this.state.take_framed_error())),
Poll::Ready(Err(this.state.take_framed_error()))
}
State::Stopping => Poll::Ready(Ok(())), State::Stopping => Poll::Ready(Ok(())),
}; };
} }

View File

@ -16,8 +16,7 @@ impl Parser {
src: &[u8], src: &[u8],
server: bool, server: bool,
max_size: usize, max_size: usize,
) -> Result<Option<(usize, bool, OpCode, usize, Option<[u8; 4]>)>, ProtocolError> ) -> Result<Option<(usize, bool, OpCode, usize, Option<[u8; 4]>)>, ProtocolError> {
{
let chunk_len = src.len(); let chunk_len = src.len();
let mut idx = 2; let mut idx = 2;
@ -228,15 +227,11 @@ mod tests {
payload: Bytes, payload: Bytes,
} }
fn is_none( fn is_none(frm: &Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError>) -> bool {
frm: &Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError>,
) -> bool {
matches!(*frm, Ok(None)) matches!(*frm, Ok(None))
} }
fn extract( fn extract(frm: Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError>) -> F {
frm: Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError>,
) -> F {
match frm { match frm {
Ok(Some((finished, opcode, payload))) => F { Ok(Some((finished, opcode, payload))) => F {
finished, finished,

View File

@ -54,8 +54,8 @@ mod tests {
let mask = [0x6d, 0xb6, 0xb2, 0x80]; let mask = [0x6d, 0xb6, 0xb2, 0x80];
let unmasked = vec![ let unmasked = vec![
0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0x74, 0xf9,
0x74, 0xf9, 0x12, 0x03, 0x12, 0x03,
]; ];
// Check masking with proper alignment. // Check masking with proper alignment.
@ -85,8 +85,8 @@ mod tests {
fn test_apply_mask() { fn test_apply_mask() {
let mask = [0x6d, 0xb6, 0xb2, 0x80]; let mask = [0x6d, 0xb6, 0xb2, 0x80];
let unmasked = vec![ let unmasked = vec![
0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0x74, 0xf9,
0x74, 0xf9, 0x12, 0x03, 0x12, 0x03,
]; ];
for data_len in 0..=unmasked.len() { for data_len in 0..=unmasked.len() {

View File

@ -9,9 +9,7 @@ use derive_more::{Display, Error, From};
use http::{header, Method, StatusCode}; use http::{header, Method, StatusCode};
use crate::body::BoxBody; use crate::body::BoxBody;
use crate::{ use crate::{header::HeaderValue, message::RequestHead, response::Response, ResponseBuilder};
header::HeaderValue, message::RequestHead, response::Response, ResponseBuilder,
};
mod codec; mod codec;
mod dispatcher; mod dispatcher;

View File

@ -1,8 +1,6 @@
use std::convert::Infallible; use std::convert::Infallible;
use actix_http::{ use actix_http::{body::BoxBody, HttpMessage, HttpService, Request, Response, StatusCode};
body::BoxBody, HttpMessage, HttpService, Request, Response, StatusCode,
};
use actix_http_test::test_server; use actix_http_test::test_server;
use actix_service::ServiceFactoryExt; use actix_service::ServiceFactoryExt;
use actix_utils::future; use actix_utils::future;

View File

@ -170,7 +170,8 @@ async fn test_h2_headers() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
let data = data.clone(); let data = data.clone();
HttpService::build().h2(move |_| { HttpService::build()
.h2(move |_| {
let mut builder = Response::build(StatusCode::OK); let mut builder = Response::build(StatusCode::OK);
for idx in 0..90 { for idx in 0..90 {
builder.insert_header( builder.insert_header(
@ -194,7 +195,8 @@ async fn test_h2_headers() {
}) })
.openssl(tls_config()) .openssl(tls_config())
.map_err(|_| ()) .map_err(|_| ())
}).await; })
.await;
let response = srv.sget("/").send().await.unwrap(); let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success()); assert!(response.status().is_success());
@ -315,9 +317,8 @@ async fn test_h2_body_length() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| async { .h2(|_| async {
let body = once(async { let body =
Ok::<_, Infallible>(Bytes::from_static(STR.as_ref())) once(async { Ok::<_, Infallible>(Bytes::from_static(STR.as_ref())) });
});
Ok::<_, Infallible>( Ok::<_, Infallible>(
Response::ok().set_body(SizedStream::new(STR.len() as u64, body)), Response::ok().set_body(SizedStream::new(STR.len() as u64, body)),

View File

@ -238,7 +238,8 @@ async fn test_h2_headers() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
let data = data.clone(); let data = data.clone();
HttpService::build().h2(move |_| { HttpService::build()
.h2(move |_| {
let mut config = Response::build(StatusCode::OK); let mut config = Response::build(StatusCode::OK);
for idx in 0..90 { for idx in 0..90 {
config.insert_header(( config.insert_header((
@ -261,7 +262,8 @@ async fn test_h2_headers() {
ok::<_, Infallible>(config.body(data.clone())) ok::<_, Infallible>(config.body(data.clone()))
}) })
.rustls(tls_config()) .rustls(tls_config())
}).await; })
.await;
let response = srv.sget("/").send().await.unwrap(); let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success()); assert!(response.status().is_success());

View File

@ -154,9 +154,7 @@ async fn test_chunked_payload() {
}) })
.fold(0usize, |acc, chunk| ready(acc + chunk.len())) .fold(0usize, |acc, chunk| ready(acc + chunk.len()))
.map(|req_size| { .map(|req_size| {
Ok::<_, Error>( Ok::<_, Error>(Response::ok().set_body(format!("size={}", req_size)))
Response::ok().set_body(format!("size={}", req_size)),
)
}) })
})) }))
.tcp() .tcp()
@ -165,8 +163,7 @@ async fn test_chunked_payload() {
let returned_size = { let returned_size = {
let mut stream = net::TcpStream::connect(srv.addr()).unwrap(); let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream let _ = stream.write_all(b"POST /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n");
.write_all(b"POST /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n");
for chunk_size in chunk_sizes.iter() { for chunk_size in chunk_sizes.iter() {
let mut bytes = Vec::new(); let mut bytes = Vec::new();
@ -293,8 +290,7 @@ async fn test_http1_keepalive_close() {
.await; .await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap(); let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = let _ = stream.write_all(b"GET /test/tests/test HTTP/1.1\r\nconnection: close\r\n\r\n");
stream.write_all(b"GET /test/tests/test HTTP/1.1\r\nconnection: close\r\n\r\n");
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let _ = stream.read(&mut data); let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n"); assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
@ -338,8 +334,8 @@ async fn test_http10_keepalive() {
.await; .await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap(); let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream let _ =
.write_all(b"GET /test/tests/test HTTP/1.0\r\nconnection: keep-alive\r\n\r\n"); stream.write_all(b"GET /test/tests/test HTTP/1.0\r\nconnection: keep-alive\r\n\r\n");
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let _ = stream.read(&mut data); let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.0 200 OK\r\n"); assert_eq!(&data[..17], b"HTTP/1.0 200 OK\r\n");
@ -436,7 +432,8 @@ async fn test_h1_headers() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
let data = data.clone(); let data = data.clone();
HttpService::build().h1(move |_| { HttpService::build()
.h1(move |_| {
let mut builder = Response::build(StatusCode::OK); let mut builder = Response::build(StatusCode::OK);
for idx in 0..90 { for idx in 0..90 {
builder.insert_header(( builder.insert_header((
@ -457,8 +454,10 @@ async fn test_h1_headers() {
)); ));
} }
ok::<_, Infallible>(builder.body(data.clone())) ok::<_, Infallible>(builder.body(data.clone()))
}).tcp() })
}).await; .tcp()
})
.await;
let response = srv.get("/").send().await.unwrap(); let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success()); assert!(response.status().is_success());
@ -655,9 +654,7 @@ async fn test_h1_body_chunked_implicit() {
HttpService::build() HttpService::build()
.h1(|_| { .h1(|_| {
let body = once(ok::<_, Error>(Bytes::from_static(STR.as_ref()))); let body = once(ok::<_, Error>(Bytes::from_static(STR.as_ref())));
ok::<_, Infallible>( ok::<_, Infallible>(Response::build(StatusCode::OK).body(BodyStream::new(body)))
Response::build(StatusCode::OK).body(BodyStream::new(body)),
)
}) })
.tcp() .tcp()
}) })
@ -776,10 +773,8 @@ async fn test_not_modified_spec_h1() {
.h1(|req: Request| { .h1(|req: Request| {
let res: Response<BoxBody> = match req.path() { let res: Response<BoxBody> = match req.path() {
// with no content-length // with no content-length
"/none" => { "/none" => Response::with_body(StatusCode::NOT_MODIFIED, body::None::new())
Response::with_body(StatusCode::NOT_MODIFIED, body::None::new()) .map_into_boxed_body(),
.map_into_boxed_body()
}
// with no content-length // with no content-length
"/body" => Response::with_body(StatusCode::NOT_MODIFIED, "1234") "/body" => Response::with_body(StatusCode::NOT_MODIFIED, "1234")
@ -787,10 +782,8 @@ async fn test_not_modified_spec_h1() {
// with manual content-length header and specific None body // with manual content-length header and specific None body
"/cl-none" => { "/cl-none" => {
let mut res = Response::with_body( let mut res =
StatusCode::NOT_MODIFIED, Response::with_body(StatusCode::NOT_MODIFIED, body::None::new());
body::None::new(),
);
res.headers_mut() res.headers_mut()
.insert(CL.clone(), header::HeaderValue::from_static("24")); .insert(CL.clone(), header::HeaderValue::from_static("24"));
res.map_into_boxed_body() res.map_into_boxed_body()
@ -798,8 +791,7 @@ async fn test_not_modified_spec_h1() {
// with manual content-length header and ignore-able body // with manual content-length header and ignore-able body
"/cl-body" => { "/cl-body" => {
let mut res = let mut res = Response::with_body(StatusCode::NOT_MODIFIED, "1234");
Response::with_body(StatusCode::NOT_MODIFIED, "1234");
res.headers_mut() res.headers_mut()
.insert(CL.clone(), header::HeaderValue::from_static("4")); .insert(CL.clone(), header::HeaderValue::from_static("4"));
res.map_into_boxed_body() res.map_into_boxed_body()

View File

@ -56,8 +56,9 @@ impl From<WsServiceError> for Response<BoxBody> {
WsServiceError::Http(err) => err.into(), WsServiceError::Http(err) => err.into(),
WsServiceError::Ws(err) => err.into(), WsServiceError::Ws(err) => err.into(),
WsServiceError::Io(_err) => unreachable!(), WsServiceError::Io(_err) => unreachable!(),
WsServiceError::Dispatcher => Response::internal_server_error() WsServiceError::Dispatcher => {
.set_body(BoxBody::new(format!("{}", err))), Response::internal_server_error().set_body(BoxBody::new(format!("{}", err)))
}
} }
} }
} }
@ -97,9 +98,7 @@ where
async fn service(msg: Frame) -> Result<Message, Error> { async fn service(msg: Frame) -> Result<Message, Error> {
let msg = match msg { let msg = match msg {
Frame::Ping(msg) => Message::Pong(msg), Frame::Ping(msg) => Message::Pong(msg),
Frame::Text(text) => { Frame::Text(text) => Message::Text(String::from_utf8_lossy(&text).into_owned().into()),
Message::Text(String::from_utf8_lossy(&text).into_owned().into())
}
Frame::Binary(bin) => Message::Binary(bin), Frame::Binary(bin) => Message::Binary(bin),
Frame::Continuation(item) => Message::Continuation(item), Frame::Continuation(item) => Message::Continuation(item),
Frame::Close(reason) => Message::Close(reason), Frame::Close(reason) => Message::Close(reason),

View File

@ -1,6 +1,7 @@
//! Multipart form support for Actix Web. //! Multipart form support for Actix Web.
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow(clippy::borrow_interior_mutable_const)] #![allow(clippy::borrow_interior_mutable_const)]
mod error; mod error;

View File

@ -1,6 +1,7 @@
//! Resource path matching and router. //! Resource path matching and router.
#![deny(rust_2018_idioms, nonstandard_style)] #![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]

View File

@ -168,7 +168,7 @@ const REGEX_FLAGS: &str = "(?s-m)";
/// extracted in the same way as non-tail dynamic segments. /// extracted in the same way as non-tail dynamic segments.
/// ///
/// ## Examples /// ## Examples
/// ```rust /// ```
/// # use actix_router::{Path, ResourceDef}; /// # use actix_router::{Path, ResourceDef};
/// let resource = ResourceDef::new("/blob/{tail}*"); /// let resource = ResourceDef::new("/blob/{tail}*");
/// assert!(resource.is_match("/blob/HEAD/Cargo.toml")); /// assert!(resource.is_match("/blob/HEAD/Cargo.toml"));
@ -191,7 +191,7 @@ const REGEX_FLAGS: &str = "(?s-m)";
/// expectations in the router using these definitions and cause runtime panics. /// expectations in the router using these definitions and cause runtime panics.
/// ///
/// ## Examples /// ## Examples
/// ```rust /// ```
/// # use actix_router::ResourceDef; /// # use actix_router::ResourceDef;
/// let resource = ResourceDef::new(["/home", "/index"]); /// let resource = ResourceDef::new(["/home", "/index"]);
/// assert!(resource.is_match("/home")); /// assert!(resource.is_match("/home"));
@ -206,7 +206,7 @@ const REGEX_FLAGS: &str = "(?s-m)";
/// resource-path pairs that would not be compatible. /// resource-path pairs that would not be compatible.
/// ///
/// ## Examples /// ## Examples
/// ```rust /// ```
/// # use actix_router::ResourceDef; /// # use actix_router::ResourceDef;
/// assert!(!ResourceDef::new("/root").is_match("/root/")); /// assert!(!ResourceDef::new("/root").is_match("/root/"));
/// assert!(!ResourceDef::new("/root/").is_match("/root")); /// assert!(!ResourceDef::new("/root/").is_match("/root"));

View File

@ -26,6 +26,9 @@
//! } //! }
//! ``` //! ```
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#[cfg(feature = "openssl")] #[cfg(feature = "openssl")]
extern crate tls_openssl as openssl; extern crate tls_openssl as openssl;
#[cfg(feature = "rustls")] #[cfg(feature = "rustls")]

View File

@ -1,7 +1,7 @@
//! Actix actors support for Actix Web. //! Actix actors support for Actix Web.
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms, nonstandard_style)]
#![allow(clippy::borrow_interior_mutable_const)] #![warn(future_incompatible)]
mod context; mod context;
pub mod ws; pub mod ws;

View File

@ -57,6 +57,8 @@
//! [DELETE]: macro@delete //! [DELETE]: macro@delete
#![recursion_limit = "512"] #![recursion_limit = "512"]
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote::quote; use quote::quote;

View File

@ -1,7 +1,4 @@
extern crate proc_macro; use std::{collections::HashSet, convert::TryFrom};
use std::collections::HashSet;
use std::convert::TryFrom;
use actix_router::ResourceDef; use actix_router::ResourceDef;
use proc_macro::TokenStream; use proc_macro::TokenStream;

View File

@ -95,7 +95,8 @@
//! # } //! # }
//! ``` //! ```
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow( #![allow(
clippy::type_complexity, clippy::type_complexity,
clippy::borrow_interior_mutable_const, clippy::borrow_interior_mutable_const,

View File

@ -128,7 +128,7 @@ macro_rules! error_helper {
InternalError::new(err, StatusCode::$status).into() InternalError::new(err, StatusCode::$status).into()
} }
} }
} };
} }
error_helper!(ErrorBadRequest, BAD_REQUEST); error_helper!(ErrorBadRequest, BAD_REQUEST);

View File

@ -1,8 +1,9 @@
//! Error and Result module //! Error and Result module
// This is meant to be a glob import of the whole error module except for `Error`. Rustdoc can't yet
/// This is meant to be a glob import of the whole error module, but rustdoc can't handle // correctly resolve the conflicting `Error` type defined in this module, so these re-exports are
/// shadowing `Error` type, so it is expanded manually. // expanded manually.
/// See https://github.com/rust-lang/rust/issues/83375 //
// See <https://github.com/rust-lang/rust/issues/83375>
pub use actix_http::error::{ pub use actix_http::error::{
BlockingError, ContentTypeError, DispatchError, HttpError, ParseError, PayloadError, BlockingError, ContentTypeError, DispatchError, HttpError, ParseError, PayloadError,
}; };

View File

@ -65,6 +65,7 @@
//! * `secure-cookies` - secure cookies support //! * `secure-cookies` - secure cookies support
#![deny(rust_2018_idioms, nonstandard_style)] #![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow(clippy::needless_doctest_main, clippy::type_complexity)] #![allow(clippy::needless_doctest_main, clippy::type_complexity)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]

View File

@ -173,7 +173,7 @@ impl HttpRequest {
/// let opt_t = req.conn_data::<PeerCertificate>(); /// let opt_t = req.conn_data::<PeerCertificate>();
/// ``` /// ```
/// ///
/// [on-connect]: crate::HttpServiceBuilder::on_connect_ext /// [on-connect]: crate::HttpServer::on_connect
pub fn conn_data<T: 'static>(&self) -> Option<&T> { pub fn conn_data<T: 'static>(&self) -> Option<&T> {
self.inner self.inner
.conn_data .conn_data

View File

@ -109,6 +109,7 @@ impl HttpResponseBuilder {
} }
/// Replaced with [`Self::insert_header()`]. /// Replaced with [`Self::insert_header()`].
#[doc(hidden)]
#[deprecated( #[deprecated(
since = "4.0.0", since = "4.0.0",
note = "Replaced with `insert_header((key, value))`. Will be removed in v5." note = "Replaced with `insert_header((key, value))`. Will be removed in v5."
@ -133,6 +134,7 @@ impl HttpResponseBuilder {
} }
/// Replaced with [`Self::append_header()`]. /// Replaced with [`Self::append_header()`].
#[doc(hidden)]
#[deprecated( #[deprecated(
since = "4.0.0", since = "4.0.0",
note = "Replaced with `append_header((key, value))`. Will be removed in v5." note = "Replaced with `append_header((key, value))`. Will be removed in v5."