mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-05 02:16:33 +02:00
Compare commits
9 Commits
web-v3.3.0
...
web-v3.3.3
Author | SHA1 | Date | |
---|---|---|---|
0669ed0f06 | |||
c9c36679e4 | |||
655d7b4f05 | |||
24d525d978 | |||
1f70ef155d | |||
7981e0068a | |||
32d59ca904 | |||
ea8bf36104 | |||
0b5b463cfa |
22
CHANGES.md
22
CHANGES.md
@ -3,6 +3,28 @@
|
|||||||
## Unreleased - 2020-xx-xx
|
## Unreleased - 2020-xx-xx
|
||||||
|
|
||||||
|
|
||||||
|
## 3.3.3 - 2021-12-18
|
||||||
|
### Changed
|
||||||
|
* Soft-deprecate `NormalizePath::default()`, noting upcoming behavior change in v4. [#2529]
|
||||||
|
|
||||||
|
[#2529]: https://github.com/actix/actix-web/pull/2529
|
||||||
|
|
||||||
|
|
||||||
|
## 3.3.2 - 2020-12-01
|
||||||
|
### Fixed
|
||||||
|
* Removed an occasional `unwrap` on `None` panic in `NormalizePathNormalization`. [#1762]
|
||||||
|
* Fix `match_pattern()` returning `None` for scope with empty path resource. [#1798]
|
||||||
|
* Increase minimum `socket2` version. [#1803]
|
||||||
|
|
||||||
|
[#1762]: https://github.com/actix/actix-web/pull/1762
|
||||||
|
[#1798]: https://github.com/actix/actix-web/pull/1798
|
||||||
|
[#1803]: https://github.com/actix/actix-web/pull/1803
|
||||||
|
|
||||||
|
|
||||||
|
## 3.3.1 - 2020-11-29
|
||||||
|
* Ensure `actix-http` dependency uses same `serde_urlencoded`.
|
||||||
|
|
||||||
|
|
||||||
## 3.3.0 - 2020-11-25
|
## 3.3.0 - 2020-11-25
|
||||||
### Added
|
### Added
|
||||||
* Add `Either<A, B>` extractor helper. [#1788]
|
* Add `Either<A, B>` extractor helper. [#1788]
|
||||||
|
12
Cargo.toml
12
Cargo.toml
@ -1,8 +1,8 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-web"
|
name = "actix-web"
|
||||||
version = "3.3.0"
|
version = "3.3.3"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "Actix web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
keywords = ["actix", "http", "web", "framework", "async"]
|
keywords = ["actix", "http", "web", "framework", "async"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
@ -85,11 +85,11 @@ actix-threadpool = "0.3.1"
|
|||||||
actix-tls = "2.0.0"
|
actix-tls = "2.0.0"
|
||||||
|
|
||||||
actix-web-codegen = "0.4.0"
|
actix-web-codegen = "0.4.0"
|
||||||
actix-http = "2.1.0"
|
actix-http = "2.2.0"
|
||||||
awc = { version = "2.0.0", default-features = false }
|
awc = { version = "2.0.3", default-features = false }
|
||||||
|
|
||||||
bytes = "0.5.3"
|
bytes = "0.5.3"
|
||||||
derive_more = "0.99.2"
|
derive_more = "0.99.5"
|
||||||
encoding_rs = "0.8"
|
encoding_rs = "0.8"
|
||||||
futures-channel = { version = "0.3.5", default-features = false }
|
futures-channel = { version = "0.3.5", default-features = false }
|
||||||
futures-core = { version = "0.3.5", default-features = false }
|
futures-core = { version = "0.3.5", default-features = false }
|
||||||
@ -97,7 +97,7 @@ futures-util = { version = "0.3.5", default-features = false }
|
|||||||
fxhash = "0.2.1"
|
fxhash = "0.2.1"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
mime = "0.3"
|
mime = "0.3"
|
||||||
socket2 = "0.3"
|
socket2 = "0.3.16"
|
||||||
pin-project = "1.0.0"
|
pin-project = "1.0.0"
|
||||||
regex = "1.4"
|
regex = "1.4"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<h1>Actix web</h1>
|
<h1>Actix web</h1>
|
||||||
<p>
|
<p>
|
||||||
<strong>Actix web is a powerful, pragmatic, and extremely fast web framework for Rust</strong>
|
<strong>Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust</strong>
|
||||||
</p>
|
</p>
|
||||||
<p>
|
<p>
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-web)
|
[](https://crates.io/crates/actix-web)
|
||||||
[](https://docs.rs/actix-web/3.3.0)
|
[](https://docs.rs/actix-web/3.3.3)
|
||||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.42.html)
|
[](https://blog.rust-lang.org/2020/03/12/Rust-1.42.html)
|
||||||

|

|
||||||
[](https://deps.rs/crate/actix-web/3.3.0)
|
[](https://deps.rs/crate/actix-web/3.3.3)
|
||||||
<br />
|
<br />
|
||||||
[](https://travis-ci.org/actix/actix-web)
|
[](https://travis-ci.org/actix/actix-web)
|
||||||
[](https://codecov.io/gh/actix/actix-web)
|
[](https://codecov.io/gh/actix/actix-web)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "2.2.0"
|
version = "2.2.1"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "HTTP primitives for the Actix ecosystem"
|
description = "HTTP primitives for the Actix ecosystem"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
@ -3,9 +3,9 @@
|
|||||||
> HTTP primitives for the Actix ecosystem.
|
> HTTP primitives for the Actix ecosystem.
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-http)
|
[](https://crates.io/crates/actix-http)
|
||||||
[](https://docs.rs/actix-http/2.2.0)
|
[](https://docs.rs/actix-http/2.2.1)
|
||||||

|

|
||||||
[](https://deps.rs/crate/actix-http/2.2.0)
|
[](https://deps.rs/crate/actix-http/2.2.1)
|
||||||
[](https://gitter.im/actix/actix-web?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[](https://gitter.im/actix/actix-web?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
|
|
||||||
## Documentation & Resources
|
## Documentation & Resources
|
||||||
|
@ -55,7 +55,7 @@ impl Error {
|
|||||||
|
|
||||||
/// Similar to `as_response_error` but downcasts.
|
/// Similar to `as_response_error` but downcasts.
|
||||||
pub fn as_error<T: ResponseError + 'static>(&self) -> Option<&T> {
|
pub fn as_error<T: ResponseError + 'static>(&self) -> Option<&T> {
|
||||||
ResponseError::downcast_ref(self.cause.as_ref())
|
<dyn ResponseError>::downcast_ref(self.cause.as_ref())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
let mut has_upgrade_websocket = false;
|
let mut has_upgrade_websocket = false;
|
||||||
let mut expect = false;
|
let mut expect = false;
|
||||||
let mut chunked = false;
|
let mut chunked = false;
|
||||||
|
let mut seen_te = false;
|
||||||
let mut content_length = None;
|
let mut content_length = None;
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match name {
|
match name {
|
||||||
header::CONTENT_LENGTH => {
|
header::CONTENT_LENGTH if content_length.is_some() => {
|
||||||
if let Ok(s) = value.to_str() {
|
debug!("multiple Content-Length");
|
||||||
|
return Err(ParseError::Header);
|
||||||
|
}
|
||||||
|
|
||||||
|
header::CONTENT_LENGTH => match value.to_str() {
|
||||||
|
Ok(s) if s.trim().starts_with("+") => {
|
||||||
|
debug!("illegal Content-Length: {:?}", s);
|
||||||
|
return Err(ParseError::Header);
|
||||||
|
}
|
||||||
|
Ok(s) => {
|
||||||
if let Ok(len) = s.parse::<u64>() {
|
if let Ok(len) = s.parse::<u64>() {
|
||||||
if len != 0 {
|
if len != 0 {
|
||||||
content_length = Some(len);
|
content_length = Some(len);
|
||||||
@ -95,15 +105,31 @@ pub(crate) trait MessageType: Sized {
|
|||||||
debug!("illegal Content-Length: {:?}", s);
|
debug!("illegal Content-Length: {:?}", s);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
|
Err(_) => {
|
||||||
debug!("illegal Content-Length: {:?}", value);
|
debug!("illegal Content-Length: {:?}", value);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
|
||||||
// transfer-encoding
|
// transfer-encoding
|
||||||
|
header::TRANSFER_ENCODING if seen_te => {
|
||||||
|
debug!("multiple Transfer-Encoding not allowed");
|
||||||
|
return Err(ParseError::Header);
|
||||||
|
}
|
||||||
|
|
||||||
header::TRANSFER_ENCODING => {
|
header::TRANSFER_ENCODING => {
|
||||||
|
seen_te = true;
|
||||||
|
|
||||||
if let Ok(s) = value.to_str().map(|s| s.trim()) {
|
if let Ok(s) = value.to_str().map(|s| s.trim()) {
|
||||||
chunked = s.eq_ignore_ascii_case("chunked");
|
if s.eq_ignore_ascii_case("chunked") {
|
||||||
|
chunked = true;
|
||||||
|
} else if s.eq_ignore_ascii_case("identity") {
|
||||||
|
// allow silently since multiple TE headers are already checked
|
||||||
|
} else {
|
||||||
|
debug!("illegal Transfer-Encoding: {:?}", s);
|
||||||
|
return Err(ParseError::Header);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
@ -510,19 +536,11 @@ impl ChunkedState {
|
|||||||
size: &mut u64,
|
size: &mut u64,
|
||||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||||
let radix = 16;
|
let radix = 16;
|
||||||
match byte!(rdr) {
|
|
||||||
b @ b'0'..=b'9' => {
|
let rem = match byte!(rdr) {
|
||||||
*size *= radix;
|
b @ b'0'..=b'9' => b - b'0',
|
||||||
*size += u64::from(b - b'0');
|
b @ b'a'..=b'f' => b + 10 - b'a',
|
||||||
}
|
b @ b'A'..=b'F' => b + 10 - b'A',
|
||||||
b @ b'a'..=b'f' => {
|
|
||||||
*size *= radix;
|
|
||||||
*size += u64::from(b + 10 - b'a');
|
|
||||||
}
|
|
||||||
b @ b'A'..=b'F' => {
|
|
||||||
*size *= radix;
|
|
||||||
*size += u64::from(b + 10 - b'A');
|
|
||||||
}
|
|
||||||
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||||
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
||||||
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||||
@ -532,8 +550,23 @@ impl ChunkedState {
|
|||||||
"Invalid chunk size line: Invalid Size",
|
"Invalid chunk size line: Invalid Size",
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
match size.checked_mul(radix) {
|
||||||
|
Some(n) => {
|
||||||
|
*size = n as u64;
|
||||||
|
*size += rem as u64;
|
||||||
|
|
||||||
|
Poll::Ready(Ok(ChunkedState::Size))
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
debug!("chunk size would overflow");
|
||||||
|
Poll::Ready(Err(io::Error::new(
|
||||||
|
io::ErrorKind::InvalidInput,
|
||||||
|
"Invalid chunk size line: Invalid Size",
|
||||||
|
)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Poll::Ready(Ok(ChunkedState::Size))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||||
@ -552,6 +585,11 @@ impl ChunkedState {
|
|||||||
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||||
match byte!(rdr) {
|
match byte!(rdr) {
|
||||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||||
|
// strictly 0x20 (space) should be disallowed but we don't parse quoted strings here
|
||||||
|
0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new(
|
||||||
|
io::ErrorKind::InvalidInput,
|
||||||
|
"Invalid character in chunk extension",
|
||||||
|
))),
|
||||||
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -977,13 +1015,7 @@ mod tests {
|
|||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
transfer-encoding: chnked\r\n\r\n",
|
transfer-encoding: chnked\r\n\r\n",
|
||||||
);
|
);
|
||||||
let req = parse_ready!(&mut buf);
|
expect_parse_err!(&mut buf);
|
||||||
|
|
||||||
if let Ok(val) = req.chunked() {
|
|
||||||
assert!(!val);
|
|
||||||
} else {
|
|
||||||
unreachable!("Error");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -80,6 +80,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
match length {
|
match length {
|
||||||
BodySize::Stream => {
|
BodySize::Stream => {
|
||||||
if chunked {
|
if chunked {
|
||||||
|
skip_len = true;
|
||||||
if camel_case {
|
if camel_case {
|
||||||
dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n")
|
dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n")
|
||||||
} else {
|
} else {
|
||||||
|
@ -3,6 +3,11 @@
|
|||||||
## Unreleased - 2020-xx-xx
|
## Unreleased - 2020-xx-xx
|
||||||
|
|
||||||
|
|
||||||
|
## 2.0.3 - 2020-11-29
|
||||||
|
### Fixed
|
||||||
|
* Ensure `actix-http` dependency uses same `serde_urlencoded`.
|
||||||
|
|
||||||
|
|
||||||
## 2.0.2 - 2020-11-25
|
## 2.0.2 - 2020-11-25
|
||||||
### Changed
|
### Changed
|
||||||
* Upgrade `serde_urlencoded` to `0.7`. [#1773]
|
* Upgrade `serde_urlencoded` to `0.7`. [#1773]
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "awc"
|
name = "awc"
|
||||||
version = "2.0.2"
|
version = "2.0.3"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "Async HTTP and WebSocket client library built on the Actix ecosystem"
|
description = "Async HTTP and WebSocket client library built on the Actix ecosystem"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@ -39,7 +39,7 @@ compress = ["actix-http/compress"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
actix-codec = "0.3.0"
|
actix-codec = "0.3.0"
|
||||||
actix-service = "1.0.6"
|
actix-service = "1.0.6"
|
||||||
actix-http = "2.0.0"
|
actix-http = "2.2.0"
|
||||||
actix-rt = "1.0.0"
|
actix-rt = "1.0.0"
|
||||||
|
|
||||||
base64 = "0.13"
|
base64 = "0.13"
|
||||||
|
@ -3,9 +3,9 @@
|
|||||||
> Async HTTP and WebSocket client library.
|
> Async HTTP and WebSocket client library.
|
||||||
|
|
||||||
[](https://crates.io/crates/awc)
|
[](https://crates.io/crates/awc)
|
||||||
[](https://docs.rs/awc/2.0.2)
|
[](https://docs.rs/awc/2.0.3)
|
||||||

|

|
||||||
[](https://deps.rs/crate/awc/2.0.2)
|
[](https://deps.rs/crate/awc/2.0.3)
|
||||||
[](https://gitter.im/actix/actix-web?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[](https://gitter.im/actix/actix-web?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
|
|
||||||
## Documentation & Resources
|
## Documentation & Resources
|
||||||
|
@ -17,7 +17,7 @@ digraph {
|
|||||||
"multipart" -> { "web" "service" "utils" }
|
"multipart" -> { "web" "service" "utils" }
|
||||||
"http" -> { "service" "codec" "connect" "utils" "rt" "threadpool" }
|
"http" -> { "service" "codec" "connect" "utils" "rt" "threadpool" }
|
||||||
"http" -> { "actix" "tls" }[color=blue] // optional
|
"http" -> { "actix" "tls" }[color=blue] // optional
|
||||||
"files" -> { "web" "http" }
|
"files" -> { "web" }
|
||||||
"http-test" -> { "service" "codec" "connect" "utils" "rt" "server" "testing" "awc" }
|
"http-test" -> { "service" "codec" "connect" "utils" "rt" "server" "testing" "awc" }
|
||||||
|
|
||||||
// net
|
// net
|
||||||
|
@ -11,11 +11,11 @@ digraph {
|
|||||||
"actix-http-test"
|
"actix-http-test"
|
||||||
}
|
}
|
||||||
|
|
||||||
"actix-web" -> { "actix-web-codegen" "actix-http" "awc" }
|
"actix-web" -> { "actix-web-codegen" "actix-http" "awc" }
|
||||||
"awc" -> { "actix-http" }
|
"awc" -> { "actix-http" }
|
||||||
"actix-web-actors" -> { "actix" "actix-web" "actix-http" }
|
"actix-web-actors" -> { "actix" "actix-web" "actix-http" }
|
||||||
"actix-multipart" -> { "actix-web" }
|
"actix-multipart" -> { "actix-web" }
|
||||||
"actix-http" -> { "actix" }[color=blue] // optional
|
"actix-http" -> { "actix" }[color=blue] // optional
|
||||||
"actix-files" -> { "actix-web" "actix-http" }
|
"actix-files" -> { "actix-web" }
|
||||||
"actix-http-test" -> { "awc" }
|
"actix-http-test" -> { "awc" }
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
//! Actix web is a powerful, pragmatic, and extremely fast web framework for Rust.
|
//! Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust.
|
||||||
//!
|
//!
|
||||||
//! ## Example
|
//! ## Example
|
||||||
//!
|
//!
|
||||||
|
@ -31,7 +31,7 @@ impl Default for TrailingSlash {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
/// `Middleware` to normalize request's URI in place
|
/// `Middleware` to normalize request's URI in place
|
||||||
///
|
///
|
||||||
/// Performs following:
|
/// Performs following:
|
||||||
@ -56,6 +56,18 @@ impl Default for TrailingSlash {
|
|||||||
|
|
||||||
pub struct NormalizePath(TrailingSlash);
|
pub struct NormalizePath(TrailingSlash);
|
||||||
|
|
||||||
|
impl Default for NormalizePath {
|
||||||
|
fn default() -> Self {
|
||||||
|
log::warn!(
|
||||||
|
"`NormalizePath::default()` is deprecated. The default trailing slash behavior will \
|
||||||
|
change in v4 from `Always` to `Trim`. Update your call to `NormalizePath::new(...)` to \
|
||||||
|
avoid inaccessible routes when upgrading."
|
||||||
|
);
|
||||||
|
|
||||||
|
Self(TrailingSlash::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl NormalizePath {
|
impl NormalizePath {
|
||||||
/// Create new `NormalizePath` middleware with the specified trailing slash style.
|
/// Create new `NormalizePath` middleware with the specified trailing slash style.
|
||||||
pub fn new(trailing_slash_style: TrailingSlash) -> Self {
|
pub fn new(trailing_slash_style: TrailingSlash) -> Self {
|
||||||
@ -137,9 +149,9 @@ where
|
|||||||
// so the change can not be deduced from the length comparison
|
// so the change can not be deduced from the length comparison
|
||||||
if path != original_path {
|
if path != original_path {
|
||||||
let mut parts = head.uri.clone().into_parts();
|
let mut parts = head.uri.clone().into_parts();
|
||||||
let pq = parts.path_and_query.as_ref().unwrap();
|
let query = parts.path_and_query.as_ref().and_then(|pq| pq.query());
|
||||||
|
|
||||||
let path = if let Some(q) = pq.query() {
|
let path = if let Some(q) = query {
|
||||||
Bytes::from(format!("{}?{}", path, q))
|
Bytes::from(format!("{}?{}", path, q))
|
||||||
} else {
|
} else {
|
||||||
Bytes::copy_from_slice(path.as_bytes())
|
Bytes::copy_from_slice(path.as_bytes())
|
||||||
|
@ -675,4 +675,40 @@ mod tests {
|
|||||||
let res = call_service(&mut srv, req).await;
|
let res = call_service(&mut srv, req).await;
|
||||||
assert_eq!(res.status(), StatusCode::OK);
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn extract_path_pattern_complex() {
|
||||||
|
let mut srv = init_service(
|
||||||
|
App::new()
|
||||||
|
.service(web::scope("/user").service(web::scope("/{id}").service(
|
||||||
|
web::resource("").to(move |req: HttpRequest| {
|
||||||
|
assert_eq!(req.match_pattern(), Some("/user/{id}".to_owned()));
|
||||||
|
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
}),
|
||||||
|
)))
|
||||||
|
.service(web::resource("/").to(move |req: HttpRequest| {
|
||||||
|
assert_eq!(req.match_pattern(), Some("/".to_owned()));
|
||||||
|
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
}))
|
||||||
|
.default_service(web::to(move |req: HttpRequest| {
|
||||||
|
assert!(req.match_pattern().is_none());
|
||||||
|
HttpResponse::Ok().finish()
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = TestRequest::get().uri("/user/test").to_request();
|
||||||
|
let res = call_service(&mut srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let req = TestRequest::get().uri("/").to_request();
|
||||||
|
let res = call_service(&mut srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let req = TestRequest::get().uri("/not-exist").to_request();
|
||||||
|
let res = call_service(&mut srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -86,7 +86,7 @@ impl ResourceMap {
|
|||||||
if let Some(plen) = pattern.is_prefix_match(path) {
|
if let Some(plen) = pattern.is_prefix_match(path) {
|
||||||
return rmap.has_resource(&path[plen..]);
|
return rmap.has_resource(&path[plen..]);
|
||||||
}
|
}
|
||||||
} else if pattern.is_match(path) {
|
} else if pattern.is_match(path) || pattern.pattern() == "" && path == "/" {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user