mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-06 10:50:17 +02:00
Compare commits
56 Commits
http-v3.0.
...
http-v3.2.
Author | SHA1 | Date | |
---|---|---|---|
f3f41a0cc7 | |||
987067698b | |||
b62f1b4ef7 | |||
df5257c373 | |||
226ea696ce | |||
e524fc86ea | |||
7e990e423f | |||
8f9a12ed5d | |||
c6eba2da9b | |||
06c7945801 | |||
0dba6310c6 | |||
f7d7d92984 | |||
3d6ea7fe9b | |||
8dbf7da89f | |||
de92b3be2e | |||
5d0e8138ee | |||
6b7196225e | |||
265fa0d050 | |||
062127a210 | |||
3926416580 | |||
43671ae4aa | |||
264a703d94 | |||
498fb954b3 | |||
2253eae2bb | |||
8e76a1c775 | |||
dce57a79c9 | |||
6a5b370206 | |||
b1c85ba85b | |||
9aab911600 | |||
017e40f733 | |||
45592b37b6 | |||
8abcb94512 | |||
f2cacc4c9d | |||
56b9c0d08e | |||
de9e41484a | |||
2fed978597 | |||
40048a5811 | |||
e942d3e3b1 | |||
09cffc093c | |||
c58f287044 | |||
7b27493e4c | |||
478b33b8a3 | |||
592b40f914 | |||
fe5279c77a | |||
80d222aa78 | |||
a03a2a0076 | |||
745e738955 | |||
1fd90f0b10 | |||
a35804b89f | |||
5611b98c0d | |||
dce9438518 | |||
be986d96b3 | |||
8ddb24b49b | |||
87f627cd5d | |||
03456b8a33 | |||
8c2fad3164 |
48
.github/workflows/ci-post-merge.yml
vendored
48
.github/workflows/ci-post-merge.yml
vendored
@ -23,6 +23,7 @@ jobs:
|
|||||||
CI: 1
|
CI: 1
|
||||||
CARGO_INCREMENTAL: 0
|
CARGO_INCREMENTAL: 0
|
||||||
VCPKGRS_DYNAMIC: 1
|
VCPKGRS_DYNAMIC: 1
|
||||||
|
CARGO_UNSTABLE_SPARSE_REGISTRY: true
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -78,15 +79,6 @@ jobs:
|
|||||||
cargo test --lib --tests -p=actix-multipart --all-features
|
cargo test --lib --tests -p=actix-multipart --all-features
|
||||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
cargo test --lib --tests -p=actix-web-actors --all-features
|
||||||
|
|
||||||
- name: tests (io-uring)
|
|
||||||
if: matrix.target.os == 'ubuntu-latest'
|
|
||||||
timeout-minutes: 60
|
|
||||||
run: >
|
|
||||||
sudo bash -c "ulimit -Sl 512
|
|
||||||
&& ulimit -Hl 512
|
|
||||||
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
|
||||||
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
|
|
||||||
|
|
||||||
- name: Clear the cargo caches
|
- name: Clear the cargo caches
|
||||||
run: |
|
run: |
|
||||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||||
@ -95,6 +87,11 @@ jobs:
|
|||||||
ci_feature_powerset_check:
|
ci_feature_powerset_check:
|
||||||
name: Verify Feature Combinations
|
name: Verify Feature Combinations
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
env:
|
||||||
|
CI: 1
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
@ -125,37 +122,14 @@ jobs:
|
|||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with: { command: ci-check-all-feature-powerset-linux }
|
with: { command: ci-check-all-feature-powerset-linux }
|
||||||
|
|
||||||
# job currently (1st Feb 2022) segfaults
|
|
||||||
# coverage:
|
|
||||||
# name: coverage
|
|
||||||
# runs-on: ubuntu-latest
|
|
||||||
# steps:
|
|
||||||
# - uses: actions/checkout@v2
|
|
||||||
|
|
||||||
# - name: Install stable
|
|
||||||
# uses: actions-rs/toolchain@v1
|
|
||||||
# with:
|
|
||||||
# toolchain: stable-x86_64-unknown-linux-gnu
|
|
||||||
# profile: minimal
|
|
||||||
# override: true
|
|
||||||
|
|
||||||
# - name: Generate Cargo.lock
|
|
||||||
# uses: actions-rs/cargo@v1
|
|
||||||
# with: { command: generate-lockfile }
|
|
||||||
# - name: Cache Dependencies
|
|
||||||
# uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
# - name: Generate coverage file
|
|
||||||
# run: |
|
|
||||||
# cargo install cargo-tarpaulin --vers "^0.13"
|
|
||||||
# cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
|
|
||||||
# - name: Upload to Codecov
|
|
||||||
# uses: codecov/codecov-action@v1
|
|
||||||
# with: { file: cobertura.xml }
|
|
||||||
|
|
||||||
nextest:
|
nextest:
|
||||||
name: nextest
|
name: nextest
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
env:
|
||||||
|
CI: 1
|
||||||
|
CARGO_INCREMENTAL: 0
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
34
.github/workflows/ci.yml
vendored
34
.github/workflows/ci.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
|||||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||||
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
||||||
version:
|
version:
|
||||||
- 1.54.0 # MSRV
|
- 1.57.0 # MSRV
|
||||||
- stable
|
- stable
|
||||||
|
|
||||||
name: ${{ matrix.target.name }} / ${{ matrix.version }}
|
name: ${{ matrix.target.name }} / ${{ matrix.version }}
|
||||||
@ -81,19 +81,37 @@ jobs:
|
|||||||
cargo test --lib --tests -p=actix-multipart --all-features
|
cargo test --lib --tests -p=actix-multipart --all-features
|
||||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
cargo test --lib --tests -p=actix-web-actors --all-features
|
||||||
|
|
||||||
|
- name: Clear the cargo caches
|
||||||
|
run: |
|
||||||
|
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||||
|
cargo-cache
|
||||||
|
|
||||||
|
io-uring:
|
||||||
|
name: io-uring tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable-x86_64-unknown-linux-gnu
|
||||||
|
profile: minimal
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Generate Cargo.lock
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with: { command: generate-lockfile }
|
||||||
|
- name: Cache Dependencies
|
||||||
|
uses: Swatinem/rust-cache@v1.3.0
|
||||||
|
|
||||||
- name: tests (io-uring)
|
- name: tests (io-uring)
|
||||||
if: matrix.target.os == 'ubuntu-latest'
|
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
run: >
|
run: >
|
||||||
sudo bash -c "ulimit -Sl 512
|
sudo bash -c "ulimit -Sl 512
|
||||||
&& ulimit -Hl 512
|
&& ulimit -Hl 512
|
||||||
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
||||||
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
|
&& RUSTUP_TOOLCHAIN=stable cargo test --lib --tests -p=actix-files --all-features"
|
||||||
|
|
||||||
- name: Clear the cargo caches
|
|
||||||
run: |
|
|
||||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
|
||||||
cargo-cache
|
|
||||||
|
|
||||||
rustdoc:
|
rustdoc:
|
||||||
name: doc tests
|
name: doc tests
|
||||||
|
36
.github/workflows/coverage.yml
vendored
Normal file
36
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
# disabled because `cargo tarpaulin` currently segfaults
|
||||||
|
|
||||||
|
name: Coverage
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# job currently (1st Feb 2022) segfaults
|
||||||
|
coverage:
|
||||||
|
name: coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Install stable
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable-x86_64-unknown-linux-gnu
|
||||||
|
profile: minimal
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: Generate Cargo.lock
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with: { command: generate-lockfile }
|
||||||
|
- name: Cache Dependencies
|
||||||
|
uses: Swatinem/rust-cache@v1.2.0
|
||||||
|
|
||||||
|
- name: Generate coverage file
|
||||||
|
run: |
|
||||||
|
cargo install cargo-tarpaulin --vers "^0.13"
|
||||||
|
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
|
||||||
|
- name: Upload to Codecov
|
||||||
|
uses: codecov/codecov-action@v1
|
||||||
|
with: { file: cobertura.xml }
|
@ -1,8 +1,16 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
- Add support for streaming audio files by setting the `content-disposition` header `inline` instead of `attachement`. [#2645]
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.1 - 2022-06-11
|
||||||
|
- Add `NamedFile::{modified, metadata, content_type, content_disposition, encoding}()` getters. [#2021]
|
||||||
|
- Update `tokio-uring` dependency to `0.3`.
|
||||||
|
- Audio files now use `Content-Disposition: inline` instead of `attachment`. [#2645]
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||||
|
|
||||||
|
[#2021]: https://github.com/actix/actix-web/pull/2021
|
||||||
[#2645]: https://github.com/actix/actix-web/pull/2645
|
[#2645]: https://github.com/actix/actix-web/pull/2645
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-files"
|
name = "actix-files"
|
||||||
version = "0.6.0"
|
version = "0.6.1"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"fakeshadow <24548779@qq.com>",
|
"fakeshadow <24548779@qq.com>",
|
||||||
@ -39,10 +39,13 @@ mime_guess = "2.0.1"
|
|||||||
percent-encoding = "2.1"
|
percent-encoding = "2.1"
|
||||||
pin-project-lite = "0.2.7"
|
pin-project-lite = "0.2.7"
|
||||||
|
|
||||||
tokio-uring = { version = "0.2", optional = true, features = ["bytes"] }
|
# experimental-io-uring
|
||||||
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
|
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
||||||
|
actix-server = { version = "2.1", optional = true } # ensure matching tokio-uring versions
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.2"
|
actix-rt = "2.7"
|
||||||
actix-test = "0.1.0-beta.13"
|
actix-test = "0.1.0-beta.13"
|
||||||
actix-web = "4.0.0"
|
actix-web = "4"
|
||||||
tempfile = "3.2"
|
tempfile = "3.2"
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
> Static file serving for Actix Web
|
> Static file serving for Actix Web
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-files)
|
[](https://crates.io/crates/actix-files)
|
||||||
[](https://docs.rs/actix-files/0.6.0)
|
[](https://docs.rs/actix-files/0.6.1)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-files/0.6.0)
|
[](https://deps.rs/crate/actix-files/0.6.1)
|
||||||
[](https://crates.io/crates/actix-files)
|
[](https://crates.io/crates/actix-files)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
@ -364,20 +364,43 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(deprecated)]
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_named_file_status_code_text() {
|
async fn status_code_customize_same_output() {
|
||||||
let mut file = NamedFile::open_async("Cargo.toml")
|
let file1 = NamedFile::open_async("Cargo.toml")
|
||||||
.await
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.set_status_code(StatusCode::NOT_FOUND);
|
.set_status_code(StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
let file2 = NamedFile::open_async("Cargo.toml")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.customize()
|
||||||
|
.with_status(StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
let req = TestRequest::default().to_http_request();
|
||||||
|
let res1 = file1.respond_to(&req);
|
||||||
|
let res2 = file2.respond_to(&req);
|
||||||
|
|
||||||
|
assert_eq!(res1.status(), StatusCode::NOT_FOUND);
|
||||||
|
assert_eq!(res2.status(), StatusCode::NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_named_file_status_code_text() {
|
||||||
|
let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||||
|
|
||||||
{
|
{
|
||||||
file.file();
|
file.file();
|
||||||
let _f: &File = &file;
|
let _f: &File = &file;
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let _f: &mut File = &mut file;
|
let _f: &mut File = &mut file;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let file = file.customize().with_status(StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req);
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -23,6 +23,7 @@ use actix_web::{
|
|||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use derive_more::{Deref, DerefMut};
|
use derive_more::{Deref, DerefMut};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
|
use mime::Mime;
|
||||||
use mime_guess::from_path;
|
use mime_guess::from_path;
|
||||||
|
|
||||||
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
||||||
@ -76,8 +77,8 @@ pub struct NamedFile {
|
|||||||
pub(crate) md: Metadata,
|
pub(crate) md: Metadata,
|
||||||
pub(crate) flags: Flags,
|
pub(crate) flags: Flags,
|
||||||
pub(crate) status_code: StatusCode,
|
pub(crate) status_code: StatusCode,
|
||||||
pub(crate) content_type: mime::Mime,
|
pub(crate) content_type: Mime,
|
||||||
pub(crate) content_disposition: header::ContentDisposition,
|
pub(crate) content_disposition: ContentDisposition,
|
||||||
pub(crate) encoding: Option<ContentEncoding>,
|
pub(crate) encoding: Option<ContentEncoding>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,11 +210,10 @@ impl NamedFile {
|
|||||||
Self::from_file(file, path)
|
Self::from_file(file, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(rustdoc::broken_intra_doc_links)]
|
|
||||||
/// Attempts to open a file asynchronously in read-only mode.
|
/// Attempts to open a file asynchronously in read-only mode.
|
||||||
///
|
///
|
||||||
/// When the `experimental-io-uring` crate feature is enabled, this will be async.
|
/// When the `experimental-io-uring` crate feature is enabled, this will be async. Otherwise, it
|
||||||
/// Otherwise, it will be just like [`open`][Self::open].
|
/// will behave just like `open`.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
@ -238,13 +238,13 @@ impl NamedFile {
|
|||||||
Self::from_file(file, path)
|
Self::from_file(file, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns reference to the underlying `File` object.
|
/// Returns reference to the underlying file object.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn file(&self) -> &File {
|
pub fn file(&self) -> &File {
|
||||||
&self.file
|
&self.file
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieve the path of this file.
|
/// Returns the filesystem path to this file.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
@ -262,16 +262,53 @@ impl NamedFile {
|
|||||||
self.path.as_path()
|
self.path.as_path()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set response **Status Code**
|
/// Returns the time the file was last modified.
|
||||||
|
///
|
||||||
|
/// Returns `None` only on unsupported platforms; see [`std::fs::Metadata::modified()`].
|
||||||
|
/// Therefore, it is usually safe to unwrap this.
|
||||||
|
#[inline]
|
||||||
|
pub fn modified(&self) -> Option<SystemTime> {
|
||||||
|
self.modified
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the filesystem metadata associated with this file.
|
||||||
|
#[inline]
|
||||||
|
pub fn metadata(&self) -> &Metadata {
|
||||||
|
&self.md
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `Content-Type` header that will be used when serving this file.
|
||||||
|
#[inline]
|
||||||
|
pub fn content_type(&self) -> &Mime {
|
||||||
|
&self.content_type
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `Content-Disposition` that will be used when serving this file.
|
||||||
|
#[inline]
|
||||||
|
pub fn content_disposition(&self) -> &ContentDisposition {
|
||||||
|
&self.content_disposition
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `Content-Encoding` that will be used when serving this file.
|
||||||
|
///
|
||||||
|
/// A return value of `None` indicates that the content is not already using a compressed
|
||||||
|
/// representation and may be subject to compression downstream.
|
||||||
|
#[inline]
|
||||||
|
pub fn content_encoding(&self) -> Option<ContentEncoding> {
|
||||||
|
self.encoding
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set response status code.
|
||||||
|
#[deprecated(since = "0.7.0", note = "Prefer `Responder::customize()`.")]
|
||||||
pub fn set_status_code(mut self, status: StatusCode) -> Self {
|
pub fn set_status_code(mut self, status: StatusCode) -> Self {
|
||||||
self.status_code = status;
|
self.status_code = status;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the MIME Content-Type for serving this file. By default the Content-Type is inferred
|
/// Sets the `Content-Type` header that will be used when serving this file. By default the
|
||||||
/// from the filename extension.
|
/// `Content-Type` is inferred from the filename extension.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set_content_type(mut self, mime_type: mime::Mime) -> Self {
|
pub fn set_content_type(mut self, mime_type: Mime) -> Self {
|
||||||
self.content_type = mime_type;
|
self.content_type = mime_type;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -284,15 +321,15 @@ impl NamedFile {
|
|||||||
/// filename is taken from the path provided in the `open` method after converting it to UTF-8
|
/// filename is taken from the path provided in the `open` method after converting it to UTF-8
|
||||||
/// (using `to_string_lossy`).
|
/// (using `to_string_lossy`).
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set_content_disposition(mut self, cd: header::ContentDisposition) -> Self {
|
pub fn set_content_disposition(mut self, cd: ContentDisposition) -> Self {
|
||||||
self.content_disposition = cd;
|
self.content_disposition = cd;
|
||||||
self.flags.insert(Flags::CONTENT_DISPOSITION);
|
self.flags.insert(Flags::CONTENT_DISPOSITION);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disable `Content-Disposition` header.
|
/// Disables `Content-Disposition` header.
|
||||||
///
|
///
|
||||||
/// By default Content-Disposition` header is enabled.
|
/// By default, the `Content-Disposition` header is sent.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn disable_content_disposition(mut self) -> Self {
|
pub fn disable_content_disposition(mut self) -> Self {
|
||||||
self.flags.remove(Flags::CONTENT_DISPOSITION);
|
self.flags.remove(Flags::CONTENT_DISPOSITION);
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0-beta.13 - 2022-02-16
|
## 3.0.0-beta.13 - 2022-02-16
|
||||||
|
@ -29,13 +29,13 @@ default = []
|
|||||||
openssl = ["tls-openssl", "awc/openssl"]
|
openssl = ["tls-openssl", "awc/openssl"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-service = "2.0.0"
|
actix-service = "2"
|
||||||
actix-codec = "0.5"
|
actix-codec = "0.5"
|
||||||
actix-tls = "3"
|
actix-tls = "3"
|
||||||
actix-utils = "3.0.0"
|
actix-utils = "3"
|
||||||
actix-rt = "2.2"
|
actix-rt = "2.2"
|
||||||
actix-server = "2"
|
actix-server = "2"
|
||||||
awc = { version = "3.0.0-beta.21", default-features = false }
|
awc = { version = "3", default-features = false }
|
||||||
|
|
||||||
base64 = "0.13"
|
base64 = "0.13"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
@ -51,5 +51,5 @@ tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
|||||||
tokio = { version = "1.8.4", features = ["sync"] }
|
tokio = { version = "1.8.4", features = ["sync"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-web = { version = "4.0.0", default-features = false, features = ["cookies"] }
|
actix-web = { version = "4", default-features = false, features = ["cookies"] }
|
||||||
actix-http = "3.0.0"
|
actix-http = "3"
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
[](https://crates.io/crates/actix-http-test)
|
[](https://crates.io/crates/actix-http-test)
|
||||||
[](https://docs.rs/actix-http-test/3.0.0-beta.13)
|
[](https://docs.rs/actix-http-test/3.0.0-beta.13)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|

|
||||||

|

|
||||||
<br>
|
<br>
|
||||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.13)
|
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.13)
|
||||||
|
@ -1,6 +1,49 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
|
||||||
|
|
||||||
|
## 3.2.1 - 2022-07-02
|
||||||
|
### Fixed
|
||||||
|
- Fix parsing ambiguity in Transfer-Encoding and Content-Length headers for HTTP/1.0 requests. [#2794]
|
||||||
|
|
||||||
|
[#2794]: https://github.com/actix/actix-web/pull/2794
|
||||||
|
|
||||||
|
|
||||||
|
## 3.2.0 - 2022-06-30
|
||||||
|
### Changed
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Websocket parser no longer throws endless overflow errors after receiving an oversized frame. [#2790]
|
||||||
|
- Retain previously set Vary headers when using compression encoder. [#2798]
|
||||||
|
|
||||||
|
[#2790]: https://github.com/actix/actix-web/pull/2790
|
||||||
|
[#2798]: https://github.com/actix/actix-web/pull/2798
|
||||||
|
|
||||||
|
|
||||||
|
## 3.1.0 - 2022-06-11
|
||||||
|
### Changed
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Revert broken fix in [#2624] that caused erroneous 500 error responses. Temporarily re-introduces [#2357] bug. [#2779]
|
||||||
|
|
||||||
|
[#2357]: https://github.com/actix/actix-web/issues/2357
|
||||||
|
[#2624]: https://github.com/actix/actix-web/issues/2624
|
||||||
|
[#2779]: https://github.com/actix/actix-web/issues/2779
|
||||||
|
|
||||||
|
|
||||||
|
## 3.0.4 - 2022-03-09
|
||||||
|
### Fixed
|
||||||
|
- Document on docs.rs with `ws` feature enabled.
|
||||||
|
|
||||||
|
|
||||||
|
## 3.0.3 - 2022-03-08
|
||||||
|
### Fixed
|
||||||
|
- Allow spaces between header name and colon when parsing responses. [#2684]
|
||||||
|
|
||||||
|
[#2684]: https://github.com/actix/actix-web/issues/2684
|
||||||
|
|
||||||
|
|
||||||
## 3.0.2 - 2022-03-05
|
## 3.0.2 - 2022-03-05
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "3.0.2"
|
version = "3.2.1"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
@ -20,7 +20,7 @@ edition = "2018"
|
|||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
# features that docs.rs will build with
|
# features that docs.rs will build with
|
||||||
features = ["http2", "openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
features = ["http2", "ws", "openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "actix_http"
|
name = "actix_http"
|
||||||
@ -37,7 +37,7 @@ ws = [
|
|||||||
"local-channel",
|
"local-channel",
|
||||||
"base64",
|
"base64",
|
||||||
"rand",
|
"rand",
|
||||||
"sha-1",
|
"sha1",
|
||||||
]
|
]
|
||||||
|
|
||||||
# TLS via OpenSSL
|
# TLS via OpenSSL
|
||||||
@ -73,11 +73,11 @@ httparse = "1.5.1"
|
|||||||
httpdate = "1.0.1"
|
httpdate = "1.0.1"
|
||||||
itoa = "1"
|
itoa = "1"
|
||||||
language-tags = "0.3"
|
language-tags = "0.3"
|
||||||
log = "0.4"
|
|
||||||
mime = "0.3"
|
mime = "0.3"
|
||||||
percent-encoding = "2.1"
|
percent-encoding = "2.1"
|
||||||
pin-project-lite = "0.2"
|
pin-project-lite = "0.2"
|
||||||
smallvec = "1.6.1"
|
smallvec = "1.6.1"
|
||||||
|
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||||
|
|
||||||
# http2
|
# http2
|
||||||
h2 = { version = "0.3.9", optional = true }
|
h2 = { version = "0.3.9", optional = true }
|
||||||
@ -86,7 +86,7 @@ h2 = { version = "0.3.9", optional = true }
|
|||||||
local-channel = { version = "0.1", optional = true }
|
local-channel = { version = "0.1", optional = true }
|
||||||
base64 = { version = "0.13", optional = true }
|
base64 = { version = "0.13", optional = true }
|
||||||
rand = { version = "0.8", optional = true }
|
rand = { version = "0.8", optional = true }
|
||||||
sha-1 = { version = "0.10", optional = true }
|
sha1 = { version = "0.10", optional = true }
|
||||||
|
|
||||||
# openssl/rustls
|
# openssl/rustls
|
||||||
actix-tls = { version = "3", default-features = false, optional = true }
|
actix-tls = { version = "3", default-features = false, optional = true }
|
||||||
@ -94,13 +94,13 @@ actix-tls = { version = "3", default-features = false, optional = true }
|
|||||||
# compress-*
|
# compress-*
|
||||||
brotli = { version = "3.3.3", optional = true }
|
brotli = { version = "3.3.3", optional = true }
|
||||||
flate2 = { version = "1.0.13", optional = true }
|
flate2 = { version = "1.0.13", optional = true }
|
||||||
zstd = { version = "0.10", optional = true }
|
zstd = { version = "0.11", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-http-test = { version = "3.0.0-beta.13", features = ["openssl"] }
|
actix-http-test = { version = "3.0.0-beta.13", features = ["openssl"] }
|
||||||
actix-server = "2"
|
actix-server = "2"
|
||||||
actix-tls = { version = "3", features = ["openssl"] }
|
actix-tls = { version = "3", features = ["openssl"] }
|
||||||
actix-web = "4.0.0"
|
actix-web = "4"
|
||||||
|
|
||||||
async-stream = "0.3"
|
async-stream = "0.3"
|
||||||
criterion = { version = "0.3", features = ["html_reports"] }
|
criterion = { version = "0.3", features = ["html_reports"] }
|
||||||
@ -108,9 +108,10 @@ env_logger = "0.9"
|
|||||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||||
memchr = "2.4"
|
memchr = "2.4"
|
||||||
once_cell = "1.9"
|
once_cell = "1.9"
|
||||||
rcgen = "0.8"
|
rcgen = "0.9"
|
||||||
regex = "1.3"
|
regex = "1.3"
|
||||||
rustls-pemfile = "0.2"
|
rustversion = "1"
|
||||||
|
rustls-pemfile = "1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
static_assertions = "1"
|
static_assertions = "1"
|
||||||
@ -120,7 +121,7 @@ tokio = { version = "1.8.4", features = ["net", "rt", "macros"] }
|
|||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "ws"
|
name = "ws"
|
||||||
required-features = ["rustls"]
|
required-features = ["ws", "rustls"]
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "write-camel-case"
|
name = "write-camel-case"
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
> HTTP primitives for the Actix ecosystem.
|
> HTTP primitives for the Actix ecosystem.
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-http)
|
[](https://crates.io/crates/actix-http)
|
||||||
[](https://docs.rs/actix-http/3.0.2)
|
[](https://docs.rs/actix-http/3.2.1)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-http/3.0.2)
|
[](https://deps.rs/crate/actix-http/3.2.1)
|
||||||
[](https://crates.io/crates/actix-http)
|
[](https://crates.io/crates/actix-http)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
@ -25,7 +25,7 @@ use actix_http::{HttpService, Response};
|
|||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use futures_util::future;
|
use futures_util::future;
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
use log::info;
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
|
@ -114,11 +114,12 @@ mod _original {
|
|||||||
use std::mem::MaybeUninit;
|
use std::mem::MaybeUninit;
|
||||||
|
|
||||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
||||||
#![allow(clippy::uninit_assumed_init)]
|
#![allow(invalid_value, clippy::uninit_assumed_init)]
|
||||||
|
|
||||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||||
unsafe { MaybeUninit::uninit().assume_init() };
|
unsafe { MaybeUninit::uninit().assume_init() };
|
||||||
|
|
||||||
|
#[allow(invalid_value)]
|
||||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
||||||
unsafe { MaybeUninit::uninit().assume_init() };
|
unsafe { MaybeUninit::uninit().assume_init() };
|
||||||
|
|
||||||
|
@ -18,7 +18,8 @@ async fn main() -> std::io::Result<()> {
|
|||||||
HttpService::build()
|
HttpService::build()
|
||||||
// pass the app to service builder
|
// pass the app to service builder
|
||||||
// map_config is used to map App's configuration to ServiceBuilder
|
// map_config is used to map App's configuration to ServiceBuilder
|
||||||
.finish(map_config(app, |_| AppConfig::default()))
|
// h1 will configure server to only use HTTP/1.1
|
||||||
|
.h1(map_config(app, |_| AppConfig::default()))
|
||||||
.tcp()
|
.tcp()
|
||||||
})?
|
})?
|
||||||
.run()
|
.run()
|
||||||
|
@ -5,6 +5,7 @@ use actix_server::Server;
|
|||||||
use bytes::BytesMut;
|
use bytes::BytesMut;
|
||||||
use futures_util::StreamExt as _;
|
use futures_util::StreamExt as _;
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -22,7 +23,7 @@ async fn main() -> io::Result<()> {
|
|||||||
body.extend_from_slice(&item?);
|
body.extend_from_slice(&item?);
|
||||||
}
|
}
|
||||||
|
|
||||||
log::info!("request body: {:?}", body);
|
info!("request body: {:?}", body);
|
||||||
|
|
||||||
let res = Response::build(StatusCode::OK)
|
let res = Response::build(StatusCode::OK)
|
||||||
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
use std::{convert::Infallible, io, time::Duration};
|
use std::{convert::Infallible, io, time::Duration};
|
||||||
|
|
||||||
use actix_http::{
|
use actix_http::{header::HeaderValue, HttpService, Request, Response, StatusCode};
|
||||||
header::HeaderValue, HttpMessage, HttpService, Request, Response, StatusCode,
|
|
||||||
};
|
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -18,12 +17,12 @@ async fn main() -> io::Result<()> {
|
|||||||
ext.insert(42u32);
|
ext.insert(42u32);
|
||||||
})
|
})
|
||||||
.finish(|req: Request| async move {
|
.finish(|req: Request| async move {
|
||||||
log::info!("{:?}", req);
|
info!("{:?}", req);
|
||||||
|
|
||||||
let mut res = Response::build(StatusCode::OK);
|
let mut res = Response::build(StatusCode::OK);
|
||||||
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
||||||
|
|
||||||
let forty_two = req.extensions().get::<u32>().unwrap().to_string();
|
let forty_two = req.conn_data::<u32>().unwrap().to_string();
|
||||||
res.insert_header((
|
res.insert_header((
|
||||||
"x-forty-two",
|
"x-forty-two",
|
||||||
HeaderValue::from_str(&forty_two).unwrap(),
|
HeaderValue::from_str(&forty_two).unwrap(),
|
||||||
|
@ -12,6 +12,7 @@ use actix_http::{body::BodyStream, HttpService, Response};
|
|||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use async_stream::stream;
|
use async_stream::stream;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -21,7 +22,7 @@ async fn main() -> io::Result<()> {
|
|||||||
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
||||||
HttpService::build()
|
HttpService::build()
|
||||||
.finish(|req| async move {
|
.finish(|req| async move {
|
||||||
log::info!("{:?}", req);
|
info!("{:?}", req);
|
||||||
let res = Response::ok();
|
let res = Response::ok();
|
||||||
|
|
||||||
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
||||||
|
@ -17,6 +17,7 @@ use actix_server::Server;
|
|||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use bytestring::ByteString;
|
use bytestring::ByteString;
|
||||||
use futures_core::{ready, Stream};
|
use futures_core::{ready, Stream};
|
||||||
|
use tracing::{info, trace};
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -34,13 +35,13 @@ async fn main() -> io::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
||||||
log::info!("handshaking");
|
info!("handshaking");
|
||||||
let mut res = ws::handshake(req.head())?;
|
let mut res = ws::handshake(req.head())?;
|
||||||
|
|
||||||
// handshake will always fail under HTTP/2
|
// handshake will always fail under HTTP/2
|
||||||
|
|
||||||
log::info!("responding");
|
info!("responding");
|
||||||
Ok(res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))?)
|
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Heartbeat {
|
struct Heartbeat {
|
||||||
@ -61,7 +62,7 @@ impl Stream for Heartbeat {
|
|||||||
type Item = Result<Bytes, Error>;
|
type Item = Result<Bytes, Error>;
|
||||||
|
|
||||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
log::trace!("poll");
|
trace!("poll");
|
||||||
|
|
||||||
ready!(self.as_mut().interval.poll_tick(cx));
|
ready!(self.as_mut().interval.poll_tick(cx));
|
||||||
|
|
||||||
|
@ -481,6 +481,7 @@ mod tests {
|
|||||||
assert_poll_next_none!(pl);
|
assert_poll_next_none!(pl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::let_unit_value)]
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_unit() {
|
async fn test_unit() {
|
||||||
let pl = ();
|
let pl = ();
|
||||||
|
@ -17,6 +17,7 @@ use pin_project_lite::pin_project;
|
|||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||||
|
|
||||||
|
use tracing::trace;
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||||
|
|
||||||
@ -256,7 +257,7 @@ fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
|
|||||||
head.headers_mut()
|
head.headers_mut()
|
||||||
.insert(header::CONTENT_ENCODING, encoding.to_header_value());
|
.insert(header::CONTENT_ENCODING, encoding.to_header_value());
|
||||||
head.headers_mut()
|
head.headers_mut()
|
||||||
.insert(header::VARY, HeaderValue::from_static("accept-encoding"));
|
.append(header::VARY, HeaderValue::from_static("accept-encoding"));
|
||||||
|
|
||||||
head.no_chunking(false);
|
head.no_chunking(false);
|
||||||
}
|
}
|
||||||
@ -356,7 +357,7 @@ impl ContentEncoder {
|
|||||||
ContentEncoder::Brotli(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Brotli(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::trace!("Error decoding br encoding: {}", err);
|
trace!("Error decoding br encoding: {}", err);
|
||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -365,7 +366,7 @@ impl ContentEncoder {
|
|||||||
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::trace!("Error decoding gzip encoding: {}", err);
|
trace!("Error decoding gzip encoding: {}", err);
|
||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -374,7 +375,7 @@ impl ContentEncoder {
|
|||||||
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::trace!("Error decoding deflate encoding: {}", err);
|
trace!("Error decoding deflate encoding: {}", err);
|
||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -383,7 +384,7 @@ impl ContentEncoder {
|
|||||||
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::trace!("Error decoding ztsd encoding: {}", err);
|
trace!("Error decoding ztsd encoding: {}", err);
|
||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use std::{io, task::Poll};
|
use std::{io, task::Poll};
|
||||||
|
|
||||||
use bytes::{Buf as _, Bytes, BytesMut};
|
use bytes::{Buf as _, Bytes, BytesMut};
|
||||||
|
use tracing::{debug, trace};
|
||||||
|
|
||||||
macro_rules! byte (
|
macro_rules! byte (
|
||||||
($rdr:ident) => ({
|
($rdr:ident) => ({
|
||||||
@ -76,7 +77,7 @@ impl ChunkedState {
|
|||||||
Poll::Ready(Ok(ChunkedState::Size))
|
Poll::Ready(Ok(ChunkedState::Size))
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
log::debug!("chunk size would overflow u64");
|
debug!("chunk size would overflow u64");
|
||||||
Poll::Ready(Err(io::Error::new(
|
Poll::Ready(Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
"Invalid chunk size line: Size is too big",
|
"Invalid chunk size line: Size is too big",
|
||||||
@ -124,7 +125,7 @@ impl ChunkedState {
|
|||||||
rem: &mut u64,
|
rem: &mut u64,
|
||||||
buf: &mut Option<Bytes>,
|
buf: &mut Option<Bytes>,
|
||||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||||
log::trace!("Chunked read, remaining={:?}", rem);
|
trace!("Chunked read, remaining={:?}", rem);
|
||||||
|
|
||||||
let len = rdr.len() as u64;
|
let len = rdr.len() as u64;
|
||||||
if len == 0 {
|
if len == 0 {
|
||||||
|
@ -6,7 +6,7 @@ use http::{
|
|||||||
header::{self, HeaderName, HeaderValue},
|
header::{self, HeaderName, HeaderValue},
|
||||||
Method, StatusCode, Uri, Version,
|
Method, StatusCode, Uri, Version,
|
||||||
};
|
};
|
||||||
use log::{debug, error, trace};
|
use tracing::{debug, error, trace};
|
||||||
|
|
||||||
use super::chunked::ChunkedState;
|
use super::chunked::ChunkedState;
|
||||||
use crate::{error::ParseError, header::HeaderMap, ConnectionType, Request, ResponseHead};
|
use crate::{error::ParseError, header::HeaderMap, ConnectionType, Request, ResponseHead};
|
||||||
@ -46,6 +46,23 @@ pub(crate) enum PayloadLength {
|
|||||||
None,
|
None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PayloadLength {
|
||||||
|
/// Returns true if variant is `None`.
|
||||||
|
fn is_none(&self) -> bool {
|
||||||
|
matches!(self, Self::None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if variant is represents zero-length (not none) payload.
|
||||||
|
fn is_zero(&self) -> bool {
|
||||||
|
matches!(
|
||||||
|
self,
|
||||||
|
PayloadLength::Payload(PayloadType::Payload(PayloadDecoder {
|
||||||
|
kind: Kind::Length(0)
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) trait MessageType: Sized {
|
pub(crate) trait MessageType: Sized {
|
||||||
fn set_connection_type(&mut self, conn_type: Option<ConnectionType>);
|
fn set_connection_type(&mut self, conn_type: Option<ConnectionType>);
|
||||||
|
|
||||||
@ -59,6 +76,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
&mut self,
|
&mut self,
|
||||||
slice: &Bytes,
|
slice: &Bytes,
|
||||||
raw_headers: &[HeaderIndex],
|
raw_headers: &[HeaderIndex],
|
||||||
|
version: Version,
|
||||||
) -> Result<PayloadLength, ParseError> {
|
) -> Result<PayloadLength, ParseError> {
|
||||||
let mut ka = None;
|
let mut ka = None;
|
||||||
let mut has_upgrade_websocket = false;
|
let mut has_upgrade_websocket = false;
|
||||||
@ -87,21 +105,23 @@ pub(crate) trait MessageType: Sized {
|
|||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
|
|
||||||
header::CONTENT_LENGTH => match value.to_str() {
|
header::CONTENT_LENGTH => match value.to_str().map(str::trim) {
|
||||||
Ok(s) if s.trim().starts_with('+') => {
|
Ok(val) if val.starts_with('+') => {
|
||||||
debug!("illegal Content-Length: {:?}", s);
|
debug!("illegal Content-Length: {:?}", val);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
Ok(s) => {
|
|
||||||
if let Ok(len) = s.parse::<u64>() {
|
Ok(val) => {
|
||||||
if len != 0 {
|
if let Ok(len) = val.parse::<u64>() {
|
||||||
content_length = Some(len);
|
// accept 0 lengths here and remove them in `decode` after all
|
||||||
}
|
// headers have been processed to prevent request smuggling issues
|
||||||
|
content_length = Some(len);
|
||||||
} else {
|
} else {
|
||||||
debug!("illegal Content-Length: {:?}", s);
|
debug!("illegal Content-Length: {:?}", val);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
debug!("illegal Content-Length: {:?}", value);
|
debug!("illegal Content-Length: {:?}", value);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
@ -114,22 +134,23 @@ pub(crate) trait MessageType: Sized {
|
|||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
|
|
||||||
header::TRANSFER_ENCODING => {
|
header::TRANSFER_ENCODING if version == Version::HTTP_11 => {
|
||||||
seen_te = true;
|
seen_te = true;
|
||||||
|
|
||||||
if let Ok(s) = value.to_str().map(str::trim) {
|
if let Ok(val) = value.to_str().map(str::trim) {
|
||||||
if s.eq_ignore_ascii_case("chunked") {
|
if val.eq_ignore_ascii_case("chunked") {
|
||||||
chunked = true;
|
chunked = true;
|
||||||
} else if s.eq_ignore_ascii_case("identity") {
|
} else if val.eq_ignore_ascii_case("identity") {
|
||||||
// allow silently since multiple TE headers are already checked
|
// allow silently since multiple TE headers are already checked
|
||||||
} else {
|
} else {
|
||||||
debug!("illegal Transfer-Encoding: {:?}", s);
|
debug!("illegal Transfer-Encoding: {:?}", val);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// connection keep-alive state
|
// connection keep-alive state
|
||||||
header::CONNECTION => {
|
header::CONNECTION => {
|
||||||
ka = if let Ok(conn) = value.to_str().map(str::trim) {
|
ka = if let Ok(conn) = value.to_str().map(str::trim) {
|
||||||
@ -146,6 +167,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
header::UPGRADE => {
|
header::UPGRADE => {
|
||||||
if let Ok(val) = value.to_str().map(str::trim) {
|
if let Ok(val) = value.to_str().map(str::trim) {
|
||||||
if val.eq_ignore_ascii_case("websocket") {
|
if val.eq_ignore_ascii_case("websocket") {
|
||||||
@ -153,19 +175,23 @@ pub(crate) trait MessageType: Sized {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
header::EXPECT => {
|
header::EXPECT => {
|
||||||
let bytes = value.as_bytes();
|
let bytes = value.as_bytes();
|
||||||
if bytes.len() >= 4 && &bytes[0..4] == b"100-" {
|
if bytes.len() >= 4 && &bytes[0..4] == b"100-" {
|
||||||
expect = true;
|
expect = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
headers.append(name, value);
|
headers.append(name, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.set_connection_type(ka);
|
self.set_connection_type(ka);
|
||||||
|
|
||||||
if expect {
|
if expect {
|
||||||
self.set_expect()
|
self.set_expect()
|
||||||
}
|
}
|
||||||
@ -249,7 +275,22 @@ impl MessageType for Request {
|
|||||||
let mut msg = Request::new();
|
let mut msg = Request::new();
|
||||||
|
|
||||||
// convert headers
|
// convert headers
|
||||||
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
|
let mut length =
|
||||||
|
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||||
|
|
||||||
|
// disallow HTTP/1.0 POST requests that do not contain a Content-Length headers
|
||||||
|
// see https://datatracker.ietf.org/doc/html/rfc1945#section-7.2.2
|
||||||
|
if ver == Version::HTTP_10 && method == Method::POST && length.is_none() {
|
||||||
|
debug!("no Content-Length specified for HTTP/1.0 POST request");
|
||||||
|
return Err(ParseError::Header);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
||||||
|
// Protects against some request smuggling attacks.
|
||||||
|
// See https://github.com/actix/actix-web/issues/2767.
|
||||||
|
if length.is_zero() {
|
||||||
|
length = PayloadLength::None;
|
||||||
|
}
|
||||||
|
|
||||||
// payload decoder
|
// payload decoder
|
||||||
let decoder = match length {
|
let decoder = match length {
|
||||||
@ -293,22 +334,35 @@ impl MessageType for ResponseHead {
|
|||||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||||
|
|
||||||
let (len, ver, status, h_len) = {
|
let (len, ver, status, h_len) = {
|
||||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
// SAFETY:
|
||||||
|
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is safe because the
|
||||||
|
// type we are claiming to have initialized here is a bunch of `MaybeUninit`s, which
|
||||||
|
// do not require initialization.
|
||||||
|
let mut parsed = unsafe {
|
||||||
|
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||||
|
.assume_init()
|
||||||
|
};
|
||||||
|
|
||||||
let mut res = httparse::Response::new(&mut parsed);
|
let mut res = httparse::Response::new(&mut []);
|
||||||
match res.parse(src)? {
|
|
||||||
|
let mut config = httparse::ParserConfig::default();
|
||||||
|
config.allow_spaces_after_header_name_in_responses(true);
|
||||||
|
|
||||||
|
match config.parse_response_with_uninit_headers(&mut res, src, &mut parsed)? {
|
||||||
httparse::Status::Complete(len) => {
|
httparse::Status::Complete(len) => {
|
||||||
let version = if res.version.unwrap() == 1 {
|
let version = if res.version.unwrap() == 1 {
|
||||||
Version::HTTP_11
|
Version::HTTP_11
|
||||||
} else {
|
} else {
|
||||||
Version::HTTP_10
|
Version::HTTP_10
|
||||||
};
|
};
|
||||||
|
|
||||||
let status = StatusCode::from_u16(res.code.unwrap())
|
let status = StatusCode::from_u16(res.code.unwrap())
|
||||||
.map_err(|_| ParseError::Status)?;
|
.map_err(|_| ParseError::Status)?;
|
||||||
HeaderIndex::record(src, res.headers, &mut headers);
|
HeaderIndex::record(src, res.headers, &mut headers);
|
||||||
|
|
||||||
(len, version, status, res.headers.len())
|
(len, version, status, res.headers.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
httparse::Status::Partial => {
|
httparse::Status::Partial => {
|
||||||
return if src.len() >= MAX_BUFFER_SIZE {
|
return if src.len() >= MAX_BUFFER_SIZE {
|
||||||
error!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
error!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
||||||
@ -324,7 +378,15 @@ impl MessageType for ResponseHead {
|
|||||||
msg.version = ver;
|
msg.version = ver;
|
||||||
|
|
||||||
// convert headers
|
// convert headers
|
||||||
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
|
let mut length =
|
||||||
|
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||||
|
|
||||||
|
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
||||||
|
// Protects against some request smuggling attacks.
|
||||||
|
// See https://github.com/actix/actix-web/issues/2767.
|
||||||
|
if length.is_zero() {
|
||||||
|
length = PayloadLength::None;
|
||||||
|
}
|
||||||
|
|
||||||
// message payload
|
// message payload
|
||||||
let decoder = if let PayloadLength::Payload(pl) = length {
|
let decoder = if let PayloadLength::Payload(pl) = length {
|
||||||
@ -360,9 +422,6 @@ pub(crate) const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
|
|||||||
pub(crate) const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
|
pub(crate) const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
|
||||||
[EMPTY_HEADER_INDEX; MAX_HEADERS];
|
[EMPTY_HEADER_INDEX; MAX_HEADERS];
|
||||||
|
|
||||||
pub(crate) const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
|
|
||||||
[httparse::EMPTY_HEADER; MAX_HEADERS];
|
|
||||||
|
|
||||||
impl HeaderIndex {
|
impl HeaderIndex {
|
||||||
pub(crate) fn record(
|
pub(crate) fn record(
|
||||||
bytes: &[u8],
|
bytes: &[u8],
|
||||||
@ -596,14 +655,100 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_post() {
|
fn parse_h09_reject() {
|
||||||
let mut buf = BytesMut::from("POST /test2 HTTP/1.0\r\n\r\n");
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test1 HTTP/0.9\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
reader.decode(&mut buf).unwrap_err();
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test2 HTTP/0.9\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
\r\n
|
||||||
|
abc",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
reader.decode(&mut buf).unwrap_err();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_h10_get() {
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test1 HTTP/1.0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::GET);
|
||||||
|
assert_eq!(req.path(), "/test1");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test2 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::GET);
|
||||||
|
assert_eq!(req.path(), "/test2");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test3 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
\r\n
|
||||||
|
abc",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::GET);
|
||||||
|
assert_eq!(req.path(), "/test3");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_h10_post() {
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test1 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
\r\n\
|
||||||
|
abc",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::POST);
|
||||||
|
assert_eq!(req.path(), "/test1");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test2 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
let mut reader = MessageDecoder::<Request>::default();
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
assert_eq!(req.version(), Version::HTTP_10);
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
assert_eq!(*req.method(), Method::POST);
|
assert_eq!(*req.method(), Method::POST);
|
||||||
assert_eq!(req.path(), "/test2");
|
assert_eq!(req.path(), "/test2");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test3 HTTP/1.0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let err = reader.decode(&mut buf).unwrap_err();
|
||||||
|
assert!(err.to_string().contains("Header"))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -971,6 +1116,17 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
expect_parse_err!(&mut buf);
|
expect_parse_err!(&mut buf);
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET / HTTP/1.1\r\n\
|
||||||
|
Host: example.com\r\n\
|
||||||
|
Content-Length: 0\r\n\
|
||||||
|
Content-Length: 2\r\n\
|
||||||
|
\r\n\
|
||||||
|
ab",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect_parse_err!(&mut buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -986,6 +1142,40 @@ mod tests {
|
|||||||
expect_parse_err!(&mut buf);
|
expect_parse_err!(&mut buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hrs_te_http10() {
|
||||||
|
// in HTTP/1.0 transfer encoding is ignored and must therefore contain a CL header
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST / HTTP/1.0\r\n\
|
||||||
|
Host: example.com\r\n\
|
||||||
|
Transfer-Encoding: chunked\r\n\
|
||||||
|
\r\n\
|
||||||
|
3\r\n\
|
||||||
|
aaa\r\n\
|
||||||
|
0\r\n\
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
expect_parse_err!(&mut buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hrs_cl_and_te_http10() {
|
||||||
|
// in HTTP/1.0 transfer encoding is simply ignored so it's fine to have both
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET / HTTP/1.0\r\n\
|
||||||
|
Host: example.com\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
Transfer-Encoding: chunked\r\n\
|
||||||
|
\r\n\
|
||||||
|
000",
|
||||||
|
);
|
||||||
|
|
||||||
|
parse_ready!(&mut buf);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn hrs_unknown_transfer_encoding() {
|
fn hrs_unknown_transfer_encoding() {
|
||||||
let mut buf = BytesMut::from(
|
let mut buf = BytesMut::from(
|
||||||
|
@ -15,13 +15,14 @@ use bitflags::bitflags;
|
|||||||
use bytes::{Buf, BytesMut};
|
use bytes::{Buf, BytesMut};
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
use tracing::{error, trace};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BodySize, BoxBody, MessageBody},
|
body::{BodySize, BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
error::{DispatchError, ParseError, PayloadError},
|
error::{DispatchError, ParseError, PayloadError},
|
||||||
service::HttpFlow,
|
service::HttpFlow,
|
||||||
ConnectionType, Error, Extensions, OnConnectData, Request, Response, StatusCode,
|
Error, Extensions, OnConnectData, Request, Response, StatusCode,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
@ -336,7 +337,7 @@ where
|
|||||||
while written < len {
|
while written < len {
|
||||||
match io.as_mut().poll_write(cx, &write_buf[written..])? {
|
match io.as_mut().poll_write(cx, &write_buf[written..])? {
|
||||||
Poll::Ready(0) => {
|
Poll::Ready(0) => {
|
||||||
log::error!("write zero; closing");
|
error!("write zero; closing");
|
||||||
return Poll::Ready(Err(io::Error::new(io::ErrorKind::WriteZero, "")));
|
return Poll::Ready(Err(io::Error::new(io::ErrorKind::WriteZero, "")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -568,7 +569,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
StateProj::ExpectCall { fut } => {
|
StateProj::ExpectCall { fut } => {
|
||||||
log::trace!(" calling expect service");
|
trace!(" calling expect service");
|
||||||
|
|
||||||
match fut.poll(cx) {
|
match fut.poll(cx) {
|
||||||
// expect resolved. write continue to buffer and set InnerDispatcher state
|
// expect resolved. write continue to buffer and set InnerDispatcher state
|
||||||
@ -690,74 +691,12 @@ where
|
|||||||
let can_not_read = !self.can_read(cx);
|
let can_not_read = !self.can_read(cx);
|
||||||
|
|
||||||
// limit amount of non-processed requests
|
// limit amount of non-processed requests
|
||||||
if pipeline_queue_full {
|
if pipeline_queue_full || can_not_read {
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut this = self.as_mut().project();
|
let mut this = self.as_mut().project();
|
||||||
|
|
||||||
if can_not_read {
|
|
||||||
log::debug!("cannot read request payload");
|
|
||||||
|
|
||||||
if let Some(sender) = &this.payload {
|
|
||||||
// ...maybe handler does not want to read any more payload...
|
|
||||||
if let PayloadStatus::Dropped = sender.need_read(cx) {
|
|
||||||
log::debug!("handler dropped payload early; attempt to clean connection");
|
|
||||||
// ...in which case poll request payload a few times
|
|
||||||
loop {
|
|
||||||
match this.codec.decode(this.read_buf)? {
|
|
||||||
Some(msg) => {
|
|
||||||
match msg {
|
|
||||||
// payload decoded did not yield EOF yet
|
|
||||||
Message::Chunk(Some(_)) => {
|
|
||||||
// if non-clean connection, next loop iter will detect empty
|
|
||||||
// read buffer and close connection
|
|
||||||
}
|
|
||||||
|
|
||||||
// connection is in clean state for next request
|
|
||||||
Message::Chunk(None) => {
|
|
||||||
log::debug!("connection successfully cleaned");
|
|
||||||
|
|
||||||
// reset dispatcher state
|
|
||||||
let _ = this.payload.take();
|
|
||||||
this.state.set(State::None);
|
|
||||||
|
|
||||||
// break out of payload decode loop
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Either whole payload is read and loop is broken or more data
|
|
||||||
// was expected in which case connection is closed. In both
|
|
||||||
// situations dispatcher cannot get here.
|
|
||||||
Message::Item(_) => {
|
|
||||||
unreachable!("dispatcher is in payload receive state")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// not enough info to decide if connection is going to be clean or not
|
|
||||||
None => {
|
|
||||||
log::error!(
|
|
||||||
"handler did not read whole payload and dispatcher could not \
|
|
||||||
drain read buf; return 500 and close connection"
|
|
||||||
);
|
|
||||||
|
|
||||||
this.flags.insert(Flags::SHUTDOWN);
|
|
||||||
let mut res = Response::internal_server_error().drop_body();
|
|
||||||
res.head_mut().set_connection_type(ConnectionType::Close);
|
|
||||||
this.messages.push_back(DispatcherMessage::Error(res));
|
|
||||||
*this.error = Some(DispatchError::HandlerDroppedPayload);
|
|
||||||
return Ok(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// can_not_read and no request payload
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut updated = false;
|
let mut updated = false;
|
||||||
|
|
||||||
// decode from read buf as many full requests as possible
|
// decode from read buf as many full requests as possible
|
||||||
@ -813,7 +752,7 @@ where
|
|||||||
if let Some(ref mut payload) = this.payload {
|
if let Some(ref mut payload) = this.payload {
|
||||||
payload.feed_data(chunk);
|
payload.feed_data(chunk);
|
||||||
} else {
|
} else {
|
||||||
log::error!("Internal server error: unexpected payload chunk");
|
error!("Internal server error: unexpected payload chunk");
|
||||||
this.flags.insert(Flags::READ_DISCONNECT);
|
this.flags.insert(Flags::READ_DISCONNECT);
|
||||||
this.messages.push_back(DispatcherMessage::Error(
|
this.messages.push_back(DispatcherMessage::Error(
|
||||||
Response::internal_server_error().drop_body(),
|
Response::internal_server_error().drop_body(),
|
||||||
@ -827,7 +766,7 @@ where
|
|||||||
if let Some(mut payload) = this.payload.take() {
|
if let Some(mut payload) = this.payload.take() {
|
||||||
payload.feed_eof();
|
payload.feed_eof();
|
||||||
} else {
|
} else {
|
||||||
log::error!("Internal server error: unexpected eof");
|
error!("Internal server error: unexpected eof");
|
||||||
this.flags.insert(Flags::READ_DISCONNECT);
|
this.flags.insert(Flags::READ_DISCONNECT);
|
||||||
this.messages.push_back(DispatcherMessage::Error(
|
this.messages.push_back(DispatcherMessage::Error(
|
||||||
Response::internal_server_error().drop_body(),
|
Response::internal_server_error().drop_body(),
|
||||||
@ -844,7 +783,7 @@ where
|
|||||||
Ok(None) => break,
|
Ok(None) => break,
|
||||||
|
|
||||||
Err(ParseError::Io(err)) => {
|
Err(ParseError::Io(err)) => {
|
||||||
log::trace!("I/O error: {}", &err);
|
trace!("I/O error: {}", &err);
|
||||||
self.as_mut().client_disconnected();
|
self.as_mut().client_disconnected();
|
||||||
this = self.as_mut().project();
|
this = self.as_mut().project();
|
||||||
*this.error = Some(DispatchError::Io(err));
|
*this.error = Some(DispatchError::Io(err));
|
||||||
@ -852,7 +791,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
Err(ParseError::TooLarge) => {
|
Err(ParseError::TooLarge) => {
|
||||||
log::trace!("request head was too big; returning 431 response");
|
trace!("request head was too big; returning 431 response");
|
||||||
|
|
||||||
if let Some(mut payload) = this.payload.take() {
|
if let Some(mut payload) = this.payload.take() {
|
||||||
payload.set_error(PayloadError::Overflow);
|
payload.set_error(PayloadError::Overflow);
|
||||||
@ -872,7 +811,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::trace!("parse error {}", &err);
|
trace!("parse error {}", &err);
|
||||||
|
|
||||||
if let Some(mut payload) = this.payload.take() {
|
if let Some(mut payload) = this.payload.take() {
|
||||||
payload.set_error(PayloadError::EncodingCorrupted);
|
payload.set_error(PayloadError::EncodingCorrupted);
|
||||||
@ -903,10 +842,7 @@ where
|
|||||||
if timer.as_mut().poll(cx).is_ready() {
|
if timer.as_mut().poll(cx).is_ready() {
|
||||||
// timeout on first request (slow request) return 408
|
// timeout on first request (slow request) return 408
|
||||||
|
|
||||||
log::trace!(
|
trace!("timed out on slow request; replying with 408 and closing connection");
|
||||||
"timed out on slow request; \
|
|
||||||
replying with 408 and closing connection"
|
|
||||||
);
|
|
||||||
|
|
||||||
let _ = self.as_mut().send_error_response(
|
let _ = self.as_mut().send_error_response(
|
||||||
Response::with_body(StatusCode::REQUEST_TIMEOUT, ()),
|
Response::with_body(StatusCode::REQUEST_TIMEOUT, ()),
|
||||||
@ -949,7 +885,7 @@ where
|
|||||||
// keep-alive timer has timed out
|
// keep-alive timer has timed out
|
||||||
if timer.as_mut().poll(cx).is_ready() {
|
if timer.as_mut().poll(cx).is_ready() {
|
||||||
// no tasks at hand
|
// no tasks at hand
|
||||||
log::trace!("timer timed out; closing connection");
|
trace!("timer timed out; closing connection");
|
||||||
this.flags.insert(Flags::SHUTDOWN);
|
this.flags.insert(Flags::SHUTDOWN);
|
||||||
|
|
||||||
if let Some(deadline) = this.config.client_disconnect_deadline() {
|
if let Some(deadline) = this.config.client_disconnect_deadline() {
|
||||||
@ -979,7 +915,7 @@ where
|
|||||||
|
|
||||||
// timed-out during shutdown; drop connection
|
// timed-out during shutdown; drop connection
|
||||||
if timer.as_mut().poll(cx).is_ready() {
|
if timer.as_mut().poll(cx).is_ready() {
|
||||||
log::trace!("timed-out during shutdown");
|
trace!("timed-out during shutdown");
|
||||||
return Err(DispatchError::DisconnectTimeout);
|
return Err(DispatchError::DisconnectTimeout);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1138,12 +1074,12 @@ where
|
|||||||
|
|
||||||
match this.inner.project() {
|
match this.inner.project() {
|
||||||
DispatcherStateProj::Upgrade { fut: upgrade } => upgrade.poll(cx).map_err(|err| {
|
DispatcherStateProj::Upgrade { fut: upgrade } => upgrade.poll(cx).map_err(|err| {
|
||||||
log::error!("Upgrade handler error: {}", err);
|
error!("Upgrade handler error: {}", err);
|
||||||
DispatchError::Upgrade
|
DispatchError::Upgrade
|
||||||
}),
|
}),
|
||||||
|
|
||||||
DispatcherStateProj::Normal { mut inner } => {
|
DispatcherStateProj::Normal { mut inner } => {
|
||||||
log::trace!("start flags: {:?}", &inner.flags);
|
trace!("start flags: {:?}", &inner.flags);
|
||||||
|
|
||||||
trace_timer_states(
|
trace_timer_states(
|
||||||
"start",
|
"start",
|
||||||
@ -1250,7 +1186,7 @@ where
|
|||||||
|
|
||||||
// client is gone
|
// client is gone
|
||||||
if inner.flags.contains(Flags::WRITE_DISCONNECT) {
|
if inner.flags.contains(Flags::WRITE_DISCONNECT) {
|
||||||
log::trace!("client is gone; disconnecting");
|
trace!("client is gone; disconnecting");
|
||||||
return Poll::Ready(Ok(()));
|
return Poll::Ready(Ok(()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1259,14 +1195,14 @@ where
|
|||||||
|
|
||||||
// read half is closed; we do not process any responses
|
// read half is closed; we do not process any responses
|
||||||
if inner_p.flags.contains(Flags::READ_DISCONNECT) && state_is_none {
|
if inner_p.flags.contains(Flags::READ_DISCONNECT) && state_is_none {
|
||||||
log::trace!("read half closed; start shutdown");
|
trace!("read half closed; start shutdown");
|
||||||
inner_p.flags.insert(Flags::SHUTDOWN);
|
inner_p.flags.insert(Flags::SHUTDOWN);
|
||||||
}
|
}
|
||||||
|
|
||||||
// keep-alive and stream errors
|
// keep-alive and stream errors
|
||||||
if state_is_none && inner_p.write_buf.is_empty() {
|
if state_is_none && inner_p.write_buf.is_empty() {
|
||||||
if let Some(err) = inner_p.error.take() {
|
if let Some(err) = inner_p.error.take() {
|
||||||
log::error!("stream error: {}", &err);
|
error!("stream error: {}", &err);
|
||||||
return Poll::Ready(Err(err));
|
return Poll::Ready(Err(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1295,7 +1231,7 @@ where
|
|||||||
Poll::Pending
|
Poll::Pending
|
||||||
};
|
};
|
||||||
|
|
||||||
log::trace!("end flags: {:?}", &inner.flags);
|
trace!("end flags: {:?}", &inner.flags);
|
||||||
|
|
||||||
poll
|
poll
|
||||||
}
|
}
|
||||||
@ -1310,17 +1246,17 @@ fn trace_timer_states(
|
|||||||
ka_timer: &TimerState,
|
ka_timer: &TimerState,
|
||||||
shutdown_timer: &TimerState,
|
shutdown_timer: &TimerState,
|
||||||
) {
|
) {
|
||||||
log::trace!("{} timers:", label);
|
trace!("{} timers:", label);
|
||||||
|
|
||||||
if head_timer.is_enabled() {
|
if head_timer.is_enabled() {
|
||||||
log::trace!(" head {}", &head_timer);
|
trace!(" head {}", &head_timer);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ka_timer.is_enabled() {
|
if ka_timer.is_enabled() {
|
||||||
log::trace!(" keep-alive {}", &ka_timer);
|
trace!(" keep-alive {}", &ka_timer);
|
||||||
}
|
}
|
||||||
|
|
||||||
if shutdown_timer.is_enabled() {
|
if shutdown_timer.is_enabled() {
|
||||||
log::trace!(" shutdown {}", &shutdown_timer);
|
trace!(" shutdown {}", &shutdown_timer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -783,6 +783,9 @@ async fn upgrade_handling() {
|
|||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fix in #2624 reverted temporarily
|
||||||
|
// complete fix tracked in #2745
|
||||||
|
#[ignore]
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn handler_drop_payload() {
|
async fn handler_drop_payload() {
|
||||||
let _ = env_logger::try_init();
|
let _ = env_logger::try_init();
|
||||||
|
@ -263,7 +263,8 @@ mod tests {
|
|||||||
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
|
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
|
||||||
|
|
||||||
assert_impl_all!(Inner: Unpin, Send, Sync);
|
assert_impl_all!(Inner: Unpin, Send, Sync);
|
||||||
assert_not_impl_any!(Inner: UnwindSafe, RefUnwindSafe);
|
// assertion not stable wrt rustc versions yet
|
||||||
|
// assert_impl_all!(Inner: UnwindSafe, RefUnwindSafe);
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_unread_data() {
|
async fn test_unread_data() {
|
||||||
|
@ -13,6 +13,7 @@ use actix_service::{
|
|||||||
};
|
};
|
||||||
use actix_utils::future::ready;
|
use actix_utils::future::ready;
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
@ -305,13 +306,13 @@ where
|
|||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let expect = expect
|
let expect = expect
|
||||||
.await
|
.await
|
||||||
.map_err(|e| log::error!("Init http expect service error: {:?}", e))?;
|
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
||||||
|
|
||||||
let upgrade = match upgrade {
|
let upgrade = match upgrade {
|
||||||
Some(upgrade) => {
|
Some(upgrade) => {
|
||||||
let upgrade = upgrade
|
let upgrade = upgrade
|
||||||
.await
|
.await
|
||||||
.map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
|
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
||||||
Some(upgrade)
|
Some(upgrade)
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
@ -319,7 +320,7 @@ where
|
|||||||
|
|
||||||
let service = service
|
let service = service
|
||||||
.await
|
.await
|
||||||
.map_err(|e| log::error!("Init http service error: {:?}", e))?;
|
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
||||||
|
|
||||||
Ok(H1ServiceHandler::new(
|
Ok(H1ServiceHandler::new(
|
||||||
cfg,
|
cfg,
|
||||||
@ -357,7 +358,7 @@ where
|
|||||||
|
|
||||||
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||||
self._poll_ready(cx).map_err(|err| {
|
self._poll_ready(cx).map_err(|err| {
|
||||||
log::error!("HTTP/1 service readiness error: {:?}", err);
|
error!("HTTP/1 service readiness error: {:?}", err);
|
||||||
DispatchError::Service(err)
|
DispatchError::Service(err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use std::{fmt, future::Future, pin::Pin, task::Context};
|
use std::{fmt, future::Future, pin::Pin, task::Context};
|
||||||
|
|
||||||
use actix_rt::time::{Instant, Sleep};
|
use actix_rt::time::{Instant, Sleep};
|
||||||
|
use tracing::trace;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(super) enum TimerState {
|
pub(super) enum TimerState {
|
||||||
@ -24,7 +25,7 @@ impl TimerState {
|
|||||||
|
|
||||||
pub(super) fn set(&mut self, timer: Sleep, line: u32) {
|
pub(super) fn set(&mut self, timer: Sleep, line: u32) {
|
||||||
if matches!(self, Self::Disabled) {
|
if matches!(self, Self::Disabled) {
|
||||||
log::trace!("setting disabled timer from line {}", line);
|
trace!("setting disabled timer from line {}", line);
|
||||||
}
|
}
|
||||||
|
|
||||||
*self = Self::Active {
|
*self = Self::Active {
|
||||||
@ -39,11 +40,11 @@ impl TimerState {
|
|||||||
|
|
||||||
pub(super) fn clear(&mut self, line: u32) {
|
pub(super) fn clear(&mut self, line: u32) {
|
||||||
if matches!(self, Self::Disabled) {
|
if matches!(self, Self::Disabled) {
|
||||||
log::trace!("trying to clear a disabled timer from line {}", line);
|
trace!("trying to clear a disabled timer from line {}", line);
|
||||||
}
|
}
|
||||||
|
|
||||||
if matches!(self, Self::Inactive) {
|
if matches!(self, Self::Inactive) {
|
||||||
log::trace!("trying to clear an inactive timer from line {}", line);
|
trace!("trying to clear an inactive timer from line {}", line);
|
||||||
}
|
}
|
||||||
|
|
||||||
*self = Self::Inactive;
|
*self = Self::Inactive;
|
||||||
|
@ -19,8 +19,8 @@ use h2::{
|
|||||||
server::{Connection, SendResponse},
|
server::{Connection, SendResponse},
|
||||||
Ping, PingPong,
|
Ping, PingPong,
|
||||||
};
|
};
|
||||||
use log::{error, trace};
|
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
use tracing::{error, trace, warn};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BodySize, BoxBody, MessageBody},
|
body::{BodySize, BoxBody, MessageBody},
|
||||||
@ -143,7 +143,7 @@ where
|
|||||||
DispatchError::SendResponse(err) => {
|
DispatchError::SendResponse(err) => {
|
||||||
trace!("Error sending HTTP/2 response: {:?}", err)
|
trace!("Error sending HTTP/2 response: {:?}", err)
|
||||||
}
|
}
|
||||||
DispatchError::SendData(err) => log::warn!("{:?}", err),
|
DispatchError::SendData(err) => warn!("{:?}", err),
|
||||||
DispatchError::ResponseBody(err) => {
|
DispatchError::ResponseBody(err) => {
|
||||||
error!("Response payload stream error: {:?}", err)
|
error!("Response payload stream error: {:?}", err)
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ use actix_service::{
|
|||||||
};
|
};
|
||||||
use actix_utils::future::ready;
|
use actix_utils::future::ready;
|
||||||
use futures_core::{future::LocalBoxFuture, ready};
|
use futures_core::{future::LocalBoxFuture, ready};
|
||||||
use log::error;
|
use tracing::{error, trace};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
@ -355,7 +355,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::trace!("H2 handshake error: {}", err);
|
trace!("H2 handshake error: {}", err);
|
||||||
Poll::Ready(Err(err))
|
Poll::Ready(Err(err))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -25,6 +25,7 @@
|
|||||||
)]
|
)]
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
|
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||||
|
|
||||||
pub use ::http::{uri, uri::Uri};
|
pub use ::http::{uri, uri::Uri};
|
||||||
pub use ::http::{Method, StatusCode, Version};
|
pub use ::http::{Method, StatusCode, Version};
|
||||||
@ -69,6 +70,8 @@ pub use self::payload::{BoxedPayloadStream, Payload, PayloadStream};
|
|||||||
pub use self::requests::{Request, RequestHead, RequestHeadType};
|
pub use self::requests::{Request, RequestHead, RequestHeadType};
|
||||||
pub use self::responses::{Response, ResponseBuilder, ResponseHead};
|
pub use self::responses::{Response, ResponseBuilder, ResponseHead};
|
||||||
pub use self::service::HttpService;
|
pub use self::service::HttpService;
|
||||||
|
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||||
|
pub use self::service::TlsAcceptorConfig;
|
||||||
|
|
||||||
/// A major HTTP protocol version.
|
/// A major HTTP protocol version.
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
|
@ -13,7 +13,8 @@ use crate::error::PayloadError;
|
|||||||
/// A boxed payload stream.
|
/// A boxed payload stream.
|
||||||
pub type BoxedPayloadStream = Pin<Box<dyn Stream<Item = Result<Bytes, PayloadError>>>>;
|
pub type BoxedPayloadStream = Pin<Box<dyn Stream<Item = Result<Bytes, PayloadError>>>>;
|
||||||
|
|
||||||
#[deprecated(since = "4.0.0", note = "Renamed to `BoxedPayloadStream`.")]
|
#[doc(hidden)]
|
||||||
|
#[deprecated(since = "3.0.0", note = "Renamed to `BoxedPayloadStream`.")]
|
||||||
pub type PayloadStream = BoxedPayloadStream;
|
pub type PayloadStream = BoxedPayloadStream;
|
||||||
|
|
||||||
#[cfg(not(feature = "http2"))]
|
#[cfg(not(feature = "http2"))]
|
||||||
|
@ -144,7 +144,7 @@ impl ResponseBuilder {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set connection type to Upgrade
|
/// Set connection type to `Upgrade`.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn upgrade<V>(&mut self, value: V) -> &mut Self
|
pub fn upgrade<V>(&mut self, value: V) -> &mut Self
|
||||||
where
|
where
|
||||||
@ -161,7 +161,7 @@ impl ResponseBuilder {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Force close connection, even if it is marked as keep-alive
|
/// Force-close connection, even if it is marked as keep-alive.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn force_close(&mut self) -> &mut Self {
|
pub fn force_close(&mut self) -> &mut Self {
|
||||||
if let Some(parts) = self.inner() {
|
if let Some(parts) = self.inner() {
|
||||||
|
@ -237,7 +237,7 @@ mod tests {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
|
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
|
||||||
let _ = stream
|
stream
|
||||||
.write_all(b"GET /camel HTTP/1.1\r\nConnection: Close\r\n\r\n")
|
.write_all(b"GET /camel HTTP/1.1\r\nConnection: Close\r\n\r\n")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let mut data = vec![];
|
let mut data = vec![];
|
||||||
@ -251,7 +251,7 @@ mod tests {
|
|||||||
assert!(memmem::find(&data, b"content-length").is_none());
|
assert!(memmem::find(&data, b"content-length").is_none());
|
||||||
|
|
||||||
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
|
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
|
||||||
let _ = stream
|
stream
|
||||||
.write_all(b"GET /lower HTTP/1.1\r\nConnection: Close\r\n\r\n")
|
.write_all(b"GET /lower HTTP/1.1\r\nConnection: Close\r\n\r\n")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let mut data = vec![];
|
let mut data = vec![];
|
||||||
|
@ -15,6 +15,7 @@ use actix_service::{
|
|||||||
};
|
};
|
||||||
use futures_core::{future::LocalBoxFuture, ready};
|
use futures_core::{future::LocalBoxFuture, ready};
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
@ -180,6 +181,25 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Configuration options used when accepting TLS connection.
|
||||||
|
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||||
|
#[cfg_attr(docsrs, doc(cfg(any(feature = "openssl", feature = "rustls"))))]
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct TlsAcceptorConfig {
|
||||||
|
pub(crate) handshake_timeout: Option<std::time::Duration>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||||
|
impl TlsAcceptorConfig {
|
||||||
|
/// Set TLS handshake timeout duration.
|
||||||
|
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
|
||||||
|
Self {
|
||||||
|
handshake_timeout: Some(dur),
|
||||||
|
// ..self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
mod openssl {
|
mod openssl {
|
||||||
use actix_service::ServiceFactoryExt as _;
|
use actix_service::ServiceFactoryExt as _;
|
||||||
@ -229,7 +249,28 @@ mod openssl {
|
|||||||
Error = TlsError<SslError, DispatchError>,
|
Error = TlsError<SslError, DispatchError>,
|
||||||
InitError = (),
|
InitError = (),
|
||||||
> {
|
> {
|
||||||
Acceptor::new(acceptor)
|
self.openssl_with_config(acceptor, TlsAcceptorConfig::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create OpenSSL based service with custom TLS acceptor configuration.
|
||||||
|
pub fn openssl_with_config(
|
||||||
|
self,
|
||||||
|
acceptor: SslAcceptor,
|
||||||
|
tls_acceptor_config: TlsAcceptorConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<SslError, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
let mut acceptor = Acceptor::new(acceptor);
|
||||||
|
|
||||||
|
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
|
||||||
|
acceptor.set_handshake_timeout(handshake_timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
acceptor
|
||||||
.map_init_err(|_| {
|
.map_init_err(|_| {
|
||||||
unreachable!("TLS acceptor service factory does not error on init")
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
})
|
})
|
||||||
@ -292,8 +333,23 @@ mod rustls {
|
|||||||
{
|
{
|
||||||
/// Create Rustls based service.
|
/// Create Rustls based service.
|
||||||
pub fn rustls(
|
pub fn rustls(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
self.rustls_with_config(config, TlsAcceptorConfig::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create Rustls based service with custom TLS acceptor configuration.
|
||||||
|
pub fn rustls_with_config(
|
||||||
self,
|
self,
|
||||||
mut config: ServerConfig,
|
mut config: ServerConfig,
|
||||||
|
tls_acceptor_config: TlsAcceptorConfig,
|
||||||
) -> impl ServiceFactory<
|
) -> impl ServiceFactory<
|
||||||
TcpStream,
|
TcpStream,
|
||||||
Config = (),
|
Config = (),
|
||||||
@ -305,7 +361,13 @@ mod rustls {
|
|||||||
protos.extend_from_slice(&config.alpn_protocols);
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
config.alpn_protocols = protos;
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
Acceptor::new(config)
|
let mut acceptor = Acceptor::new(config);
|
||||||
|
|
||||||
|
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
|
||||||
|
acceptor.set_handshake_timeout(handshake_timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
acceptor
|
||||||
.map_init_err(|_| {
|
.map_init_err(|_| {
|
||||||
unreachable!("TLS acceptor service factory does not error on init")
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
})
|
})
|
||||||
@ -369,13 +431,13 @@ where
|
|||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let expect = expect
|
let expect = expect
|
||||||
.await
|
.await
|
||||||
.map_err(|e| log::error!("Init http expect service error: {:?}", e))?;
|
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
||||||
|
|
||||||
let upgrade = match upgrade {
|
let upgrade = match upgrade {
|
||||||
Some(upgrade) => {
|
Some(upgrade) => {
|
||||||
let upgrade = upgrade
|
let upgrade = upgrade
|
||||||
.await
|
.await
|
||||||
.map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
|
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
||||||
Some(upgrade)
|
Some(upgrade)
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
@ -383,7 +445,7 @@ where
|
|||||||
|
|
||||||
let service = service
|
let service = service
|
||||||
.await
|
.await
|
||||||
.map_err(|e| log::error!("Init http service error: {:?}", e))?;
|
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
||||||
|
|
||||||
Ok(HttpServiceHandler::new(
|
Ok(HttpServiceHandler::new(
|
||||||
cfg,
|
cfg,
|
||||||
@ -490,7 +552,7 @@ where
|
|||||||
|
|
||||||
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||||
self._poll_ready(cx).map_err(|err| {
|
self._poll_ready(cx).map_err(|err| {
|
||||||
log::error!("HTTP service readiness error: {:?}", err);
|
error!("HTTP service readiness error: {:?}", err);
|
||||||
DispatchError::Service(err)
|
DispatchError::Service(err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -666,7 +728,7 @@ where
|
|||||||
self.poll(cx)
|
self.poll(cx)
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log::trace!("H2 handshake error: {}", err);
|
tracing::trace!("H2 handshake error: {}", err);
|
||||||
Poll::Ready(Err(err))
|
Poll::Ready(Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,29 +19,7 @@ use crate::{
|
|||||||
Request,
|
Request,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Test `Request` builder
|
/// Test `Request` builder.
|
||||||
///
|
|
||||||
/// ```ignore
|
|
||||||
/// # use http::{header, StatusCode};
|
|
||||||
/// # use actix_web::*;
|
|
||||||
/// use actix_web::test::TestRequest;
|
|
||||||
///
|
|
||||||
/// fn index(req: &HttpRequest) -> Response {
|
|
||||||
/// if let Some(hdr) = req.headers().get(header::CONTENT_TYPE) {
|
|
||||||
/// Response::Ok().into()
|
|
||||||
/// } else {
|
|
||||||
/// Response::BadRequest().into()
|
|
||||||
/// }
|
|
||||||
/// }
|
|
||||||
///
|
|
||||||
/// let resp = TestRequest::default().insert_header("content-type", "text/plain")
|
|
||||||
/// .run(&index)
|
|
||||||
/// .unwrap();
|
|
||||||
/// assert_eq!(resp.status(), StatusCode::OK);
|
|
||||||
///
|
|
||||||
/// let resp = TestRequest::default().run(&index).unwrap();
|
|
||||||
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
|
||||||
/// ```
|
|
||||||
pub struct TestRequest(Option<Inner>);
|
pub struct TestRequest(Option<Inner>);
|
||||||
|
|
||||||
struct Inner {
|
struct Inner {
|
||||||
|
@ -2,6 +2,7 @@ use actix_codec::{Decoder, Encoder};
|
|||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use bytestring::ByteString;
|
use bytestring::ByteString;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
frame::Parser,
|
frame::Parser,
|
||||||
@ -253,7 +254,7 @@ impl Decoder for Codec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
log::error!("Unfinished fragment {:?}", opcode);
|
error!("Unfinished fragment {:?}", opcode);
|
||||||
Err(ProtocolError::ContinuationFragment(opcode))
|
Err(ProtocolError::ContinuationFragment(opcode))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -73,8 +73,8 @@ mod inner {
|
|||||||
use actix_service::{IntoService, Service};
|
use actix_service::{IntoService, Service};
|
||||||
use futures_core::stream::Stream;
|
use futures_core::stream::Stream;
|
||||||
use local_channel::mpsc;
|
use local_channel::mpsc;
|
||||||
use log::debug;
|
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use actix_codec::{AsyncRead, AsyncWrite, Decoder, Encoder, Framed};
|
use actix_codec::{AsyncRead, AsyncWrite, Decoder, Encoder, Framed};
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
|
|
||||||
use bytes::{Buf, BufMut, BytesMut};
|
use bytes::{Buf, BufMut, BytesMut};
|
||||||
use log::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
mask::apply_mask,
|
mask::apply_mask,
|
||||||
@ -17,7 +17,6 @@ impl Parser {
|
|||||||
fn parse_metadata(
|
fn parse_metadata(
|
||||||
src: &[u8],
|
src: &[u8],
|
||||||
server: bool,
|
server: bool,
|
||||||
max_size: usize,
|
|
||||||
) -> Result<Option<(usize, bool, OpCode, usize, Option<[u8; 4]>)>, ProtocolError> {
|
) -> Result<Option<(usize, bool, OpCode, usize, Option<[u8; 4]>)>, ProtocolError> {
|
||||||
let chunk_len = src.len();
|
let chunk_len = src.len();
|
||||||
|
|
||||||
@ -60,20 +59,12 @@ impl Parser {
|
|||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
let len = u64::from_be_bytes(TryFrom::try_from(&src[idx..idx + 8]).unwrap());
|
let len = u64::from_be_bytes(TryFrom::try_from(&src[idx..idx + 8]).unwrap());
|
||||||
if len > max_size as u64 {
|
|
||||||
return Err(ProtocolError::Overflow);
|
|
||||||
}
|
|
||||||
idx += 8;
|
idx += 8;
|
||||||
len as usize
|
len as usize
|
||||||
} else {
|
} else {
|
||||||
len as usize
|
len as usize
|
||||||
};
|
};
|
||||||
|
|
||||||
// check for max allowed size
|
|
||||||
if length > max_size {
|
|
||||||
return Err(ProtocolError::Overflow);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mask = if server {
|
let mask = if server {
|
||||||
if chunk_len < idx + 4 {
|
if chunk_len < idx + 4 {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
@ -98,11 +89,10 @@ impl Parser {
|
|||||||
max_size: usize,
|
max_size: usize,
|
||||||
) -> Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError> {
|
) -> Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError> {
|
||||||
// try to parse ws frame metadata
|
// try to parse ws frame metadata
|
||||||
let (idx, finished, opcode, length, mask) =
|
let (idx, finished, opcode, length, mask) = match Parser::parse_metadata(src, server)? {
|
||||||
match Parser::parse_metadata(src, server, max_size)? {
|
None => return Ok(None),
|
||||||
None => return Ok(None),
|
Some(res) => res,
|
||||||
Some(res) => res,
|
};
|
||||||
};
|
|
||||||
|
|
||||||
// not enough data
|
// not enough data
|
||||||
if src.len() < idx + length {
|
if src.len() < idx + length {
|
||||||
@ -112,6 +102,13 @@ impl Parser {
|
|||||||
// remove prefix
|
// remove prefix
|
||||||
src.advance(idx);
|
src.advance(idx);
|
||||||
|
|
||||||
|
// check for max allowed size
|
||||||
|
if length > max_size {
|
||||||
|
// drop the payload
|
||||||
|
src.advance(length);
|
||||||
|
return Err(ProtocolError::Overflow);
|
||||||
|
}
|
||||||
|
|
||||||
// no need for body
|
// no need for body
|
||||||
if length == 0 {
|
if length == 0 {
|
||||||
return Ok(Some((finished, opcode, None)));
|
return Ok(Some((finished, opcode, None)));
|
||||||
@ -339,6 +336,30 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_frame_max_size_recoverability() {
|
||||||
|
let mut buf = BytesMut::new();
|
||||||
|
// The first text frame with length == 2, payload doesn't matter.
|
||||||
|
buf.extend(&[0b0000_0001u8, 0b0000_0010u8, 0b0000_0000u8, 0b0000_0000u8]);
|
||||||
|
// Next binary frame with length == 2 and payload == `[0x1111_1111u8, 0x1111_1111u8]`.
|
||||||
|
buf.extend(&[0b0000_0010u8, 0b0000_0010u8, 0b1111_1111u8, 0b1111_1111u8]);
|
||||||
|
|
||||||
|
assert_eq!(buf.len(), 8);
|
||||||
|
assert!(matches!(
|
||||||
|
Parser::parse(&mut buf, false, 1),
|
||||||
|
Err(ProtocolError::Overflow)
|
||||||
|
));
|
||||||
|
assert_eq!(buf.len(), 4);
|
||||||
|
let frame = extract(Parser::parse(&mut buf, false, 2));
|
||||||
|
assert!(!frame.finished);
|
||||||
|
assert_eq!(frame.opcode, OpCode::Binary);
|
||||||
|
assert_eq!(
|
||||||
|
frame.payload,
|
||||||
|
Bytes::from(vec![0b1111_1111u8, 0b1111_1111u8])
|
||||||
|
);
|
||||||
|
assert_eq!(buf.len(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_ping_frame() {
|
fn test_ping_frame() {
|
||||||
let mut buf = BytesMut::new();
|
let mut buf = BytesMut::new();
|
||||||
|
@ -3,6 +3,8 @@ use std::{
|
|||||||
fmt,
|
fmt,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
/// Operation codes defined in [RFC 6455 §11.8].
|
/// Operation codes defined in [RFC 6455 §11.8].
|
||||||
///
|
///
|
||||||
/// [RFC 6455]: https://datatracker.ietf.org/doc/html/rfc6455#section-11.8
|
/// [RFC 6455]: https://datatracker.ietf.org/doc/html/rfc6455#section-11.8
|
||||||
@ -58,7 +60,7 @@ impl From<OpCode> for u8 {
|
|||||||
Ping => 9,
|
Ping => 9,
|
||||||
Pong => 10,
|
Pong => 10,
|
||||||
Bad => {
|
Bad => {
|
||||||
log::error!("Attempted to convert invalid opcode to u8. This is a bug.");
|
error!("Attempted to convert invalid opcode to u8. This is a bug.");
|
||||||
8 // if this somehow happens, a close frame will help us tear down quickly
|
8 // if this somehow happens, a close frame will help us tear down quickly
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
extern crate tls_openssl as openssl;
|
extern crate tls_openssl as openssl;
|
||||||
|
|
||||||
use std::{convert::Infallible, io};
|
use std::{convert::Infallible, io, time::Duration};
|
||||||
|
|
||||||
use actix_http::{
|
use actix_http::{
|
||||||
body::{BodyStream, BoxBody, SizedStream},
|
body::{BodyStream, BoxBody, SizedStream},
|
||||||
error::PayloadError,
|
error::PayloadError,
|
||||||
header::{self, HeaderValue},
|
header::{self, HeaderValue},
|
||||||
Error, HttpService, Method, Request, Response, StatusCode, Version,
|
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
|
||||||
};
|
};
|
||||||
use actix_http_test::test_server;
|
use actix_http_test::test_server;
|
||||||
use actix_service::{fn_service, ServiceFactoryExt};
|
use actix_service::{fn_service, ServiceFactoryExt};
|
||||||
@ -89,7 +89,10 @@ async fn h2_1() -> io::Result<()> {
|
|||||||
assert_eq!(req.version(), Version::HTTP_2);
|
assert_eq!(req.version(), Version::HTTP_2);
|
||||||
ok::<_, Error>(Response::ok())
|
ok::<_, Error>(Response::ok())
|
||||||
})
|
})
|
||||||
.openssl(tls_config())
|
.openssl_with_config(
|
||||||
|
tls_config(),
|
||||||
|
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
|
||||||
|
)
|
||||||
.map_err(|_| ())
|
.map_err(|_| ())
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
@ -8,13 +8,14 @@ use std::{
|
|||||||
net::{SocketAddr, TcpStream as StdTcpStream},
|
net::{SocketAddr, TcpStream as StdTcpStream},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
task::Poll,
|
task::Poll,
|
||||||
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
use actix_http::{
|
use actix_http::{
|
||||||
body::{BodyStream, BoxBody, SizedStream},
|
body::{BodyStream, BoxBody, SizedStream},
|
||||||
error::PayloadError,
|
error::PayloadError,
|
||||||
header::{self, HeaderName, HeaderValue},
|
header::{self, HeaderName, HeaderValue},
|
||||||
Error, HttpService, Method, Request, Response, StatusCode, Version,
|
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
|
||||||
};
|
};
|
||||||
use actix_http_test::test_server;
|
use actix_http_test::test_server;
|
||||||
use actix_rt::pin;
|
use actix_rt::pin;
|
||||||
@ -160,7 +161,10 @@ async fn h2_1() -> io::Result<()> {
|
|||||||
assert_eq!(req.version(), Version::HTTP_2);
|
assert_eq!(req.version(), Version::HTTP_2);
|
||||||
ok::<_, Error>(Response::ok())
|
ok::<_, Error>(Response::ok())
|
||||||
})
|
})
|
||||||
.rustls(tls_config())
|
.rustls_with_config(
|
||||||
|
tls_config(),
|
||||||
|
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
@ -212,6 +216,7 @@ async fn h2_content_length() {
|
|||||||
let value = HeaderValue::from_static("0");
|
let value = HeaderValue::from_static("0");
|
||||||
|
|
||||||
{
|
{
|
||||||
|
#[allow(clippy::single_element_loop)]
|
||||||
for &i in &[0] {
|
for &i in &[0] {
|
||||||
let req = srv
|
let req = srv
|
||||||
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
|
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
|
||||||
@ -226,6 +231,7 @@ async fn h2_content_length() {
|
|||||||
// assert_eq!(response.headers().get(&header), None);
|
// assert_eq!(response.headers().get(&header), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::single_element_loop)]
|
||||||
for &i in &[1] {
|
for &i in &[1] {
|
||||||
let req = srv
|
let req = srv
|
||||||
.request(Method::GET, srv.surl(&format!("/{}", i)))
|
.request(Method::GET, srv.surl(&format!("/{}", i)))
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
|
||||||
## 0.4.0 - 2022-02-25
|
## 0.4.0 - 2022-02-25
|
||||||
|
@ -14,8 +14,8 @@ name = "actix_multipart"
|
|||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-utils = "3.0.0"
|
actix-utils = "3"
|
||||||
actix-web = { version = "4.0.0", default-features = false }
|
actix-web = { version = "4", default-features = false }
|
||||||
|
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
derive_more = "0.99.5"
|
derive_more = "0.99.5"
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
[](https://crates.io/crates/actix-multipart)
|
[](https://crates.io/crates/actix-multipart)
|
||||||
[](https://docs.rs/actix-multipart/0.4.0)
|
[](https://docs.rs/actix-multipart/0.4.0)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-multipart/0.4.0)
|
[](https://deps.rs/crate/actix-multipart/0.4.0)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
|
||||||
## 0.5.0 - 2022-02-22
|
## 0.5.0 - 2022-02-22
|
||||||
|
@ -21,18 +21,21 @@ default = ["http"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bytestring = ">=0.1.5, <2"
|
bytestring = ">=0.1.5, <2"
|
||||||
firestorm = "0.5"
|
|
||||||
http = { version = "0.2.3", optional = true }
|
http = { version = "0.2.3", optional = true }
|
||||||
log = "0.4"
|
|
||||||
regex = "1.5"
|
regex = "1.5"
|
||||||
serde = "1"
|
serde = "1"
|
||||||
|
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.3", features = ["html_reports"] }
|
criterion = { version = "0.3", features = ["html_reports"] }
|
||||||
firestorm = { version = "0.5", features = ["enable_system_time"] }
|
|
||||||
http = "0.2.5"
|
http = "0.2.5"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
percent-encoding = "2.1"
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "router"
|
name = "router"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "quoter"
|
||||||
|
harness = false
|
||||||
|
52
actix-router/benches/quoter.rs
Normal file
52
actix-router/benches/quoter.rs
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||||
|
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
fn compare_quoters(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("Compare Quoters");
|
||||||
|
|
||||||
|
let quoter = actix_router::Quoter::new(b"", b"");
|
||||||
|
let path_quoted = (0..=0x7f)
|
||||||
|
.map(|c| format!("%{:02X}", c))
|
||||||
|
.collect::<String>();
|
||||||
|
let path_unquoted = ('\u{00}'..='\u{7f}').collect::<String>();
|
||||||
|
|
||||||
|
group.bench_function("quoter_unquoted", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
for _ in 0..10 {
|
||||||
|
black_box(quoter.requote(path_unquoted.as_bytes()));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("percent_encode_unquoted", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
for _ in 0..10 {
|
||||||
|
let decode = percent_encoding::percent_decode(path_unquoted.as_bytes());
|
||||||
|
black_box(Into::<Cow<'_, [u8]>>::into(decode));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("quoter_quoted", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
for _ in 0..10 {
|
||||||
|
black_box(quoter.requote(path_quoted.as_bytes()));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("percent_encode_quoted", |b| {
|
||||||
|
b.iter(|| {
|
||||||
|
for _ in 0..10 {
|
||||||
|
let decode = percent_encoding::percent_decode(path_quoted.as_bytes());
|
||||||
|
black_box(Into::<Cow<'_, [u8]>>::into(decode));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(benches, compare_quoters);
|
||||||
|
criterion_main!(benches);
|
@ -1,169 +0,0 @@
|
|||||||
macro_rules! register {
|
|
||||||
(brackets) => {{
|
|
||||||
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
|
|
||||||
}};
|
|
||||||
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
|
|
||||||
let arr = [
|
|
||||||
concat!("/authorizations"),
|
|
||||||
concat!("/authorizations/", $p1),
|
|
||||||
concat!("/applications/", $p1, "/tokens/", $p2),
|
|
||||||
concat!("/events"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/events"),
|
|
||||||
concat!("/networks/", $p1, "/", $p2, "/events"),
|
|
||||||
concat!("/orgs/", $p1, "/events"),
|
|
||||||
concat!("/users/", $p1, "/received_events"),
|
|
||||||
concat!("/users/", $p1, "/received_events/public"),
|
|
||||||
concat!("/users/", $p1, "/events"),
|
|
||||||
concat!("/users/", $p1, "/events/public"),
|
|
||||||
concat!("/users/", $p1, "/events/orgs/", $p2),
|
|
||||||
concat!("/feeds"),
|
|
||||||
concat!("/notifications"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/notifications"),
|
|
||||||
concat!("/notifications/threads/", $p1),
|
|
||||||
concat!("/notifications/threads/", $p1, "/subscription"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
|
|
||||||
concat!("/users/", $p1, "/starred"),
|
|
||||||
concat!("/user/starred"),
|
|
||||||
concat!("/user/starred/", $p1, "/", $p2),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
|
|
||||||
concat!("/users/", $p1, "/subscriptions"),
|
|
||||||
concat!("/user/subscriptions"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/subscription"),
|
|
||||||
concat!("/user/subscriptions/", $p1, "/", $p2),
|
|
||||||
concat!("/users/", $p1, "/gists"),
|
|
||||||
concat!("/gists"),
|
|
||||||
concat!("/gists/", $p1),
|
|
||||||
concat!("/gists/", $p1, "/star"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
|
|
||||||
concat!("/issues"),
|
|
||||||
concat!("/user/issues"),
|
|
||||||
concat!("/orgs/", $p1, "/issues"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/issues"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/assignees"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/labels"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
|
|
||||||
concat!("/emojis"),
|
|
||||||
concat!("/gitignore/templates"),
|
|
||||||
concat!("/gitignore/templates/", $p1),
|
|
||||||
concat!("/meta"),
|
|
||||||
concat!("/rate_limit"),
|
|
||||||
concat!("/users/", $p1, "/orgs"),
|
|
||||||
concat!("/user/orgs"),
|
|
||||||
concat!("/orgs/", $p1),
|
|
||||||
concat!("/orgs/", $p1, "/members"),
|
|
||||||
concat!("/orgs/", $p1, "/members", $p2),
|
|
||||||
concat!("/orgs/", $p1, "/public_members"),
|
|
||||||
concat!("/orgs/", $p1, "/public_members/", $p2),
|
|
||||||
concat!("/orgs/", $p1, "/teams"),
|
|
||||||
concat!("/teams/", $p1),
|
|
||||||
concat!("/teams/", $p1, "/members"),
|
|
||||||
concat!("/teams/", $p1, "/members", $p2),
|
|
||||||
concat!("/teams/", $p1, "/repos"),
|
|
||||||
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
|
|
||||||
concat!("/user/teams"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/pulls"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
|
|
||||||
concat!("/user/repos"),
|
|
||||||
concat!("/users/", $p1, "/repos"),
|
|
||||||
concat!("/orgs/", $p1, "/repos"),
|
|
||||||
concat!("/repositories"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/contributors"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/languages"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/teams"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/tags"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/branches"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/comments"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/commits"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/readme"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/keys"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/downloads"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/forks"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/hooks"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/releases"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
|
|
||||||
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
|
|
||||||
concat!("/search/repositories"),
|
|
||||||
concat!("/search/code"),
|
|
||||||
concat!("/search/issues"),
|
|
||||||
concat!("/search/users"),
|
|
||||||
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
|
|
||||||
concat!("/legacy/repos/search/", $p1),
|
|
||||||
concat!("/legacy/user/search/", $p1),
|
|
||||||
concat!("/legacy/user/email/", $p1),
|
|
||||||
concat!("/users/", $p1),
|
|
||||||
concat!("/user"),
|
|
||||||
concat!("/users"),
|
|
||||||
concat!("/user/emails"),
|
|
||||||
concat!("/users/", $p1, "/followers"),
|
|
||||||
concat!("/user/followers"),
|
|
||||||
concat!("/users/", $p1, "/following"),
|
|
||||||
concat!("/user/following"),
|
|
||||||
concat!("/user/following/", $p1),
|
|
||||||
concat!("/users/", $p1, "/following", $p2),
|
|
||||||
concat!("/users/", $p1, "/keys"),
|
|
||||||
concat!("/user/keys"),
|
|
||||||
concat!("/user/keys/", $p1),
|
|
||||||
];
|
|
||||||
|
|
||||||
arr.to_vec()
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
static PATHS: [&str; 5] = [
|
|
||||||
"/authorizations",
|
|
||||||
"/user/repos",
|
|
||||||
"/repos/rust-lang/rust/stargazers",
|
|
||||||
"/orgs/rust-lang/public_members/nikomatsakis",
|
|
||||||
"/repos/rust-lang/rust/releases/1.51.0",
|
|
||||||
];
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut router = actix_router::Router::<bool>::build();
|
|
||||||
|
|
||||||
for route in register!(brackets) {
|
|
||||||
router.path(route, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
let actix = router.finish();
|
|
||||||
|
|
||||||
if firestorm::enabled() {
|
|
||||||
firestorm::bench("target", || {
|
|
||||||
for &route in &PATHS {
|
|
||||||
let mut path = actix_router::Path::new(route);
|
|
||||||
actix.recognize(&mut path).unwrap();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
@ -7,7 +7,7 @@ use crate::path::{Path, PathIter};
|
|||||||
use crate::{Quoter, ResourcePath};
|
use crate::{Quoter, ResourcePath};
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static FULL_QUOTER: Quoter = Quoter::new(b"+/%", b"");
|
static FULL_QUOTER: Quoter = Quoter::new(b"", b"");
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! unsupported_type {
|
macro_rules! unsupported_type {
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::ops::{DerefMut, Index};
|
use std::ops::{DerefMut, Index};
|
||||||
|
|
||||||
use firestorm::profile_method;
|
|
||||||
use serde::de;
|
use serde::de;
|
||||||
|
|
||||||
use crate::{de::PathDeserializer, Resource, ResourcePath};
|
use crate::{de::PathDeserializer, Resource, ResourcePath};
|
||||||
@ -52,7 +51,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
/// Returns full path as a string.
|
/// Returns full path as a string.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn as_str(&self) -> &str {
|
||||||
profile_method!(as_str);
|
|
||||||
self.path.path()
|
self.path.path()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,7 +59,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
/// Returns empty string if no more is to be processed.
|
/// Returns empty string if no more is to be processed.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn unprocessed(&self) -> &str {
|
pub fn unprocessed(&self) -> &str {
|
||||||
profile_method!(unprocessed);
|
|
||||||
// clamp skip to path length
|
// clamp skip to path length
|
||||||
let skip = (self.skip as usize).min(self.as_str().len());
|
let skip = (self.skip as usize).min(self.as_str().len());
|
||||||
&self.path.path()[skip..]
|
&self.path.path()[skip..]
|
||||||
@ -72,8 +69,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
#[deprecated(since = "0.6.0", note = "Use `.as_str()` or `.unprocessed()`.")]
|
#[deprecated(since = "0.6.0", note = "Use `.as_str()` or `.unprocessed()`.")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn path(&self) -> &str {
|
pub fn path(&self) -> &str {
|
||||||
profile_method!(path);
|
|
||||||
|
|
||||||
let skip = self.skip as usize;
|
let skip = self.skip as usize;
|
||||||
let path = self.path.path();
|
let path = self.path.path();
|
||||||
if skip <= path.len() {
|
if skip <= path.len() {
|
||||||
@ -86,8 +81,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
/// Set new path.
|
/// Set new path.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set(&mut self, path: T) {
|
pub fn set(&mut self, path: T) {
|
||||||
profile_method!(set);
|
|
||||||
|
|
||||||
self.skip = 0;
|
self.skip = 0;
|
||||||
self.path = path;
|
self.path = path;
|
||||||
self.segments.clear();
|
self.segments.clear();
|
||||||
@ -96,8 +89,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
/// Reset state.
|
/// Reset state.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn reset(&mut self) {
|
pub fn reset(&mut self) {
|
||||||
profile_method!(reset);
|
|
||||||
|
|
||||||
self.skip = 0;
|
self.skip = 0;
|
||||||
self.segments.clear();
|
self.segments.clear();
|
||||||
}
|
}
|
||||||
@ -105,13 +96,10 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
/// Skip first `n` chars in path.
|
/// Skip first `n` chars in path.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn skip(&mut self, n: u16) {
|
pub fn skip(&mut self, n: u16) {
|
||||||
profile_method!(skip);
|
|
||||||
self.skip += n;
|
self.skip += n;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
|
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
|
||||||
profile_method!(add);
|
|
||||||
|
|
||||||
match value {
|
match value {
|
||||||
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
|
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
|
||||||
PathItem::Segment(begin, end) => self.segments.push((
|
PathItem::Segment(begin, end) => self.segments.push((
|
||||||
@ -127,8 +115,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
name: impl Into<Cow<'static, str>>,
|
name: impl Into<Cow<'static, str>>,
|
||||||
value: impl Into<Cow<'static, str>>,
|
value: impl Into<Cow<'static, str>>,
|
||||||
) {
|
) {
|
||||||
profile_method!(add_static);
|
|
||||||
|
|
||||||
self.segments
|
self.segments
|
||||||
.push((name.into(), PathItem::Static(value.into())));
|
.push((name.into(), PathItem::Static(value.into())));
|
||||||
}
|
}
|
||||||
@ -147,8 +133,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
|
|
||||||
/// Get matched parameter by name without type conversion
|
/// Get matched parameter by name without type conversion
|
||||||
pub fn get(&self, name: &str) -> Option<&str> {
|
pub fn get(&self, name: &str) -> Option<&str> {
|
||||||
profile_method!(get);
|
|
||||||
|
|
||||||
for (seg_name, val) in self.segments.iter() {
|
for (seg_name, val) in self.segments.iter() {
|
||||||
if name == seg_name {
|
if name == seg_name {
|
||||||
return match val {
|
return match val {
|
||||||
@ -167,8 +151,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
///
|
///
|
||||||
/// If keyed parameter is not available empty string is used as default value.
|
/// If keyed parameter is not available empty string is used as default value.
|
||||||
pub fn query(&self, key: &str) -> &str {
|
pub fn query(&self, key: &str) -> &str {
|
||||||
profile_method!(query);
|
|
||||||
|
|
||||||
if let Some(s) = self.get(key) {
|
if let Some(s) = self.get(key) {
|
||||||
s
|
s
|
||||||
} else {
|
} else {
|
||||||
@ -186,7 +168,6 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
|
|
||||||
/// Try to deserialize matching parameters to a specified type `U`
|
/// Try to deserialize matching parameters to a specified type `U`
|
||||||
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
|
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
|
||||||
profile_method!(load);
|
|
||||||
de::Deserialize::deserialize(PathDeserializer::new(self))
|
de::Deserialize::deserialize(PathDeserializer::new(self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,132 +1,89 @@
|
|||||||
#[allow(dead_code)]
|
/// Partial percent-decoding.
|
||||||
const GEN_DELIMS: &[u8] = b":/?#[]@";
|
///
|
||||||
|
/// Performs percent-decoding on a slice but can selectively skip decoding certain sequences.
|
||||||
#[allow(dead_code)]
|
///
|
||||||
const SUB_DELIMS_WITHOUT_QS: &[u8] = b"!$'()*,";
|
/// # Examples
|
||||||
|
/// ```
|
||||||
#[allow(dead_code)]
|
/// # use actix_router::Quoter;
|
||||||
const SUB_DELIMS: &[u8] = b"!$'()*,+?=;";
|
/// // + is set as a protected character and will not be decoded...
|
||||||
|
/// let q = Quoter::new(&[], b"+");
|
||||||
#[allow(dead_code)]
|
///
|
||||||
const RESERVED: &[u8] = b":/?#[]@!$'()*,+?=;";
|
/// // ...but the other encoded characters (like the hyphen below) will.
|
||||||
|
/// assert_eq!(q.requote(b"/a%2Db%2Bc").unwrap(), b"/a-b%2Bc");
|
||||||
#[allow(dead_code)]
|
/// ```
|
||||||
const UNRESERVED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
|
||||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
|
||||||
1234567890
|
|
||||||
-._~";
|
|
||||||
|
|
||||||
const ALLOWED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
|
||||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
|
||||||
1234567890
|
|
||||||
-._~
|
|
||||||
!$'()*,";
|
|
||||||
|
|
||||||
const QS: &[u8] = b"+&=;b";
|
|
||||||
|
|
||||||
/// A quoter
|
|
||||||
pub struct Quoter {
|
pub struct Quoter {
|
||||||
/// Simple bit-map of safe values in the 0-127 ASCII range.
|
|
||||||
safe_table: [u8; 16],
|
|
||||||
|
|
||||||
/// Simple bit-map of protected values in the 0-127 ASCII range.
|
/// Simple bit-map of protected values in the 0-127 ASCII range.
|
||||||
protected_table: [u8; 16],
|
protected_table: AsciiBitmap,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quoter {
|
impl Quoter {
|
||||||
pub fn new(safe: &[u8], protected: &[u8]) -> Quoter {
|
/// Constructs a new `Quoter` instance given a set of protected ASCII bytes.
|
||||||
let mut quoter = Quoter {
|
///
|
||||||
safe_table: [0; 16],
|
/// The first argument is ignored but is kept for backward compatibility.
|
||||||
protected_table: [0; 16],
|
///
|
||||||
};
|
/// # Panics
|
||||||
|
/// Panics if any of the `protected` bytes are not in the 0-127 ASCII range.
|
||||||
// prepare safe table
|
pub fn new(_: &[u8], protected: &[u8]) -> Quoter {
|
||||||
for ch in 0..128 {
|
let mut protected_table = AsciiBitmap::default();
|
||||||
if ALLOWED.contains(&ch) {
|
|
||||||
set_bit(&mut quoter.safe_table, ch);
|
|
||||||
}
|
|
||||||
|
|
||||||
if QS.contains(&ch) {
|
|
||||||
set_bit(&mut quoter.safe_table, ch);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for &ch in safe {
|
|
||||||
set_bit(&mut quoter.safe_table, ch)
|
|
||||||
}
|
|
||||||
|
|
||||||
// prepare protected table
|
// prepare protected table
|
||||||
for &ch in protected {
|
for &ch in protected {
|
||||||
set_bit(&mut quoter.safe_table, ch);
|
protected_table.set_bit(ch);
|
||||||
set_bit(&mut quoter.protected_table, ch);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
quoter
|
Quoter { protected_table }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decodes safe percent-encoded sequences from `val`.
|
/// Decodes the next escape sequence, if any, and advances `val`.
|
||||||
///
|
#[inline(always)]
|
||||||
/// Returns `None` when no modification to the original byte string was required.
|
fn decode_next<'a>(&self, val: &mut &'a [u8]) -> Option<(&'a [u8], u8)> {
|
||||||
///
|
for i in 0..val.len() {
|
||||||
/// Non-ASCII bytes are accepted as valid input.
|
if let (prev, [b'%', p1, p2, rem @ ..]) = val.split_at(i) {
|
||||||
///
|
if let Some(ch) = hex_pair_to_char(*p1, *p2)
|
||||||
/// Behavior for invalid/incomplete percent-encoding sequences is unspecified and may include
|
// ignore protected ascii bytes
|
||||||
/// removing the invalid sequence from the output or passing it as-is.
|
.filter(|&ch| !(ch < 128 && self.protected_table.bit_at(ch)))
|
||||||
pub fn requote(&self, val: &[u8]) -> Option<Vec<u8>> {
|
{
|
||||||
let mut has_pct = 0;
|
*val = rem;
|
||||||
let mut pct = [b'%', 0, 0];
|
return Some((prev, ch));
|
||||||
let mut idx = 0;
|
|
||||||
let mut cloned: Option<Vec<u8>> = None;
|
|
||||||
|
|
||||||
let len = val.len();
|
|
||||||
|
|
||||||
while idx < len {
|
|
||||||
let ch = val[idx];
|
|
||||||
|
|
||||||
if has_pct != 0 {
|
|
||||||
pct[has_pct] = val[idx];
|
|
||||||
has_pct += 1;
|
|
||||||
|
|
||||||
if has_pct == 3 {
|
|
||||||
has_pct = 0;
|
|
||||||
let buf = cloned.as_mut().unwrap();
|
|
||||||
|
|
||||||
if let Some(ch) = hex_pair_to_char(pct[1], pct[2]) {
|
|
||||||
if ch < 128 {
|
|
||||||
if bit_at(&self.protected_table, ch) {
|
|
||||||
buf.extend_from_slice(&pct);
|
|
||||||
idx += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if bit_at(&self.safe_table, ch) {
|
|
||||||
buf.push(ch);
|
|
||||||
idx += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
buf.push(ch);
|
|
||||||
} else {
|
|
||||||
buf.extend_from_slice(&pct[..]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else if ch == b'%' {
|
|
||||||
has_pct = 1;
|
|
||||||
|
|
||||||
if cloned.is_none() {
|
|
||||||
let mut c = Vec::with_capacity(len);
|
|
||||||
c.extend_from_slice(&val[..idx]);
|
|
||||||
cloned = Some(c);
|
|
||||||
}
|
|
||||||
} else if let Some(ref mut cloned) = cloned {
|
|
||||||
cloned.push(ch)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
idx += 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cloned
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Partially percent-decodes the given bytes.
|
||||||
|
///
|
||||||
|
/// Escape sequences of the protected set are *not* decoded.
|
||||||
|
///
|
||||||
|
/// Returns `None` when no modification to the original bytes was required.
|
||||||
|
///
|
||||||
|
/// Invalid/incomplete percent-encoding sequences are passed unmodified.
|
||||||
|
pub fn requote(&self, val: &[u8]) -> Option<Vec<u8>> {
|
||||||
|
let mut remaining = val;
|
||||||
|
|
||||||
|
// early return indicates that no percent-encoded sequences exist and we can skip allocation
|
||||||
|
let (pre, decoded_char) = self.decode_next(&mut remaining)?;
|
||||||
|
|
||||||
|
// decoded output will always be shorter than the input
|
||||||
|
let mut decoded = Vec::<u8>::with_capacity(val.len());
|
||||||
|
|
||||||
|
// push first segment and decoded char
|
||||||
|
decoded.extend_from_slice(pre);
|
||||||
|
decoded.push(decoded_char);
|
||||||
|
|
||||||
|
// decode and push rest of segments and decoded chars
|
||||||
|
while let Some((prev, ch)) = self.decode_next(&mut remaining) {
|
||||||
|
// this ugly conditional achieves +50% perf in cases where this is a tight loop.
|
||||||
|
if !prev.is_empty() {
|
||||||
|
decoded.extend_from_slice(prev);
|
||||||
|
}
|
||||||
|
decoded.push(ch);
|
||||||
|
}
|
||||||
|
|
||||||
|
decoded.extend_from_slice(remaining);
|
||||||
|
|
||||||
|
Some(decoded)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn requote_str_lossy(&self, val: &str) -> Option<String> {
|
pub(crate) fn requote_str_lossy(&self, val: &str) -> Option<String> {
|
||||||
@ -135,24 +92,6 @@ impl Quoter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts an ASCII character in the hex-encoded set (`0-9`, `A-F`, `a-f`) to its integer
|
|
||||||
/// representation from `0x0`–`0xF`.
|
|
||||||
///
|
|
||||||
/// - `0x30 ('0') => 0x0`
|
|
||||||
/// - `0x39 ('9') => 0x9`
|
|
||||||
/// - `0x41 ('a') => 0xA`
|
|
||||||
/// - `0x61 ('A') => 0xA`
|
|
||||||
/// - `0x46 ('f') => 0xF`
|
|
||||||
/// - `0x66 ('F') => 0xF`
|
|
||||||
fn from_ascii_hex(v: u8) -> Option<u8> {
|
|
||||||
match v {
|
|
||||||
b'0'..=b'9' => Some(v - 0x30), // ord('0') == 0x30
|
|
||||||
b'A'..=b'F' => Some(v - 0x41 + 10), // ord('A') == 0x41
|
|
||||||
b'a'..=b'f' => Some(v - 0x61 + 10), // ord('a') == 0x61
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decode a ASCII hex-encoded pair to an integer.
|
/// Decode a ASCII hex-encoded pair to an integer.
|
||||||
///
|
///
|
||||||
/// Returns `None` if either portion of the decoded pair does not evaluate to a valid hex value.
|
/// Returns `None` if either portion of the decoded pair does not evaluate to a valid hex value.
|
||||||
@ -160,64 +99,52 @@ fn from_ascii_hex(v: u8) -> Option<u8> {
|
|||||||
/// - `0x33 ('3'), 0x30 ('0') => 0x30 ('0')`
|
/// - `0x33 ('3'), 0x30 ('0') => 0x30 ('0')`
|
||||||
/// - `0x34 ('4'), 0x31 ('1') => 0x41 ('A')`
|
/// - `0x34 ('4'), 0x31 ('1') => 0x41 ('A')`
|
||||||
/// - `0x36 ('6'), 0x31 ('1') => 0x61 ('a')`
|
/// - `0x36 ('6'), 0x31 ('1') => 0x61 ('a')`
|
||||||
|
#[inline(always)]
|
||||||
fn hex_pair_to_char(d1: u8, d2: u8) -> Option<u8> {
|
fn hex_pair_to_char(d1: u8, d2: u8) -> Option<u8> {
|
||||||
let (d_high, d_low) = (from_ascii_hex(d1)?, from_ascii_hex(d2)?);
|
let d_high = char::from(d1).to_digit(16)?;
|
||||||
|
let d_low = char::from(d2).to_digit(16)?;
|
||||||
|
|
||||||
// left shift high nibble by 4 bits
|
// left shift high nibble by 4 bits
|
||||||
Some(d_high << 4 | d_low)
|
Some((d_high as u8) << 4 | (d_low as u8))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets bit in given bit-map to 1=true.
|
#[derive(Debug, Default, Clone)]
|
||||||
///
|
struct AsciiBitmap {
|
||||||
/// # Panics
|
array: [u8; 16],
|
||||||
/// Panics if `ch` index is out of bounds.
|
|
||||||
fn set_bit(array: &mut [u8], ch: u8) {
|
|
||||||
array[(ch >> 3) as usize] |= 0b1 << (ch & 0b111)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if bit to true in given bit-map.
|
impl AsciiBitmap {
|
||||||
///
|
/// Sets bit in given bit-map to 1=true.
|
||||||
/// # Panics
|
///
|
||||||
/// Panics if `ch` index is out of bounds.
|
/// # Panics
|
||||||
fn bit_at(array: &[u8], ch: u8) -> bool {
|
/// Panics if `ch` index is out of bounds.
|
||||||
array[(ch >> 3) as usize] & (0b1 << (ch & 0b111)) != 0
|
fn set_bit(&mut self, ch: u8) {
|
||||||
|
self.array[(ch >> 3) as usize] |= 0b1 << (ch & 0b111)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if bit to true in given bit-map.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
/// Panics if `ch` index is out of bounds.
|
||||||
|
fn bit_at(&self, ch: u8) -> bool {
|
||||||
|
self.array[(ch >> 3) as usize] & (0b1 << (ch & 0b111)) != 0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn hex_encoding() {
|
|
||||||
let hex = b"0123456789abcdefABCDEF";
|
|
||||||
|
|
||||||
for i in 0..256 {
|
|
||||||
let c = i as u8;
|
|
||||||
if hex.contains(&c) {
|
|
||||||
assert!(from_ascii_hex(c).is_some())
|
|
||||||
} else {
|
|
||||||
assert!(from_ascii_hex(c).is_none())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let expected = [
|
|
||||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15,
|
|
||||||
];
|
|
||||||
for i in 0..hex.len() {
|
|
||||||
assert_eq!(from_ascii_hex(hex[i]).unwrap(), expected[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn custom_quoter() {
|
fn custom_quoter() {
|
||||||
let q = Quoter::new(b"", b"+");
|
let q = Quoter::new(b"", b"+");
|
||||||
assert_eq!(q.requote(b"/a%25c").unwrap(), b"/a%c");
|
assert_eq!(q.requote(b"/a%25c").unwrap(), b"/a%c");
|
||||||
assert_eq!(q.requote(b"/a%2Bc").unwrap(), b"/a%2Bc");
|
assert_eq!(q.requote(b"/a%2Bc"), None);
|
||||||
|
|
||||||
let q = Quoter::new(b"%+", b"/");
|
let q = Quoter::new(b"%+", b"/");
|
||||||
assert_eq!(q.requote(b"/a%25b%2Bc").unwrap(), b"/a%b+c");
|
assert_eq!(q.requote(b"/a%25b%2Bc").unwrap(), b"/a%b+c");
|
||||||
assert_eq!(q.requote(b"/a%2fb").unwrap(), b"/a%2fb");
|
assert_eq!(q.requote(b"/a%2fb"), None);
|
||||||
assert_eq!(q.requote(b"/a%2Fb").unwrap(), b"/a%2Fb");
|
assert_eq!(q.requote(b"/a%2Fb"), None);
|
||||||
assert_eq!(q.requote(b"/a%0Ab").unwrap(), b"/a\nb");
|
assert_eq!(q.requote(b"/a%0Ab").unwrap(), b"/a\nb");
|
||||||
assert_eq!(q.requote(b"/a%FE\xffb").unwrap(), b"/a\xfe\xffb");
|
assert_eq!(q.requote(b"/a%FE\xffb").unwrap(), b"/a\xfe\xffb");
|
||||||
assert_eq!(q.requote(b"/a\xfe\xffb"), None);
|
assert_eq!(q.requote(b"/a\xfe\xffb"), None);
|
||||||
@ -233,7 +160,8 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn invalid_sequences() {
|
fn invalid_sequences() {
|
||||||
let q = Quoter::new(b"%+", b"/");
|
let q = Quoter::new(b"%+", b"/");
|
||||||
assert_eq!(q.requote(b"/a%2x%2X%%").unwrap(), b"/a%2x%2X");
|
assert_eq!(q.requote(b"/a%2x%2X%%"), None);
|
||||||
|
assert_eq!(q.requote(b"/a%20%2X%%").unwrap(), b"/a %2X%%");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -5,8 +5,8 @@ use std::{
|
|||||||
mem,
|
mem,
|
||||||
};
|
};
|
||||||
|
|
||||||
use firestorm::{profile_fn, profile_method, profile_section};
|
|
||||||
use regex::{escape, Regex, RegexSet};
|
use regex::{escape, Regex, RegexSet};
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
use crate::{path::PathItem, IntoPatterns, Patterns, Resource, ResourcePath};
|
use crate::{path::PathItem, IntoPatterns, Patterns, Resource, ResourcePath};
|
||||||
|
|
||||||
@ -271,7 +271,6 @@ impl ResourceDef {
|
|||||||
/// assert!(!resource.is_match("/foo"));
|
/// assert!(!resource.is_match("/foo"));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn new<T: IntoPatterns>(paths: T) -> Self {
|
pub fn new<T: IntoPatterns>(paths: T) -> Self {
|
||||||
profile_method!(new);
|
|
||||||
Self::construct(paths, false)
|
Self::construct(paths, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -299,7 +298,6 @@ impl ResourceDef {
|
|||||||
/// assert!(!resource.is_match("/foo"));
|
/// assert!(!resource.is_match("/foo"));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn prefix<T: IntoPatterns>(paths: T) -> Self {
|
pub fn prefix<T: IntoPatterns>(paths: T) -> Self {
|
||||||
profile_method!(prefix);
|
|
||||||
ResourceDef::construct(paths, true)
|
ResourceDef::construct(paths, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -324,7 +322,6 @@ impl ResourceDef {
|
|||||||
/// assert!(!resource.is_match("user/123"));
|
/// assert!(!resource.is_match("user/123"));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn root_prefix(path: &str) -> Self {
|
pub fn root_prefix(path: &str) -> Self {
|
||||||
profile_method!(root_prefix);
|
|
||||||
ResourceDef::prefix(insert_slash(path).into_owned())
|
ResourceDef::prefix(insert_slash(path).into_owned())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -548,8 +545,6 @@ impl ResourceDef {
|
|||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_match(&self, path: &str) -> bool {
|
pub fn is_match(&self, path: &str) -> bool {
|
||||||
profile_method!(is_match);
|
|
||||||
|
|
||||||
// this function could be expressed as:
|
// this function could be expressed as:
|
||||||
// `self.find_match(path).is_some()`
|
// `self.find_match(path).is_some()`
|
||||||
// but this skips some checks and uses potentially faster regex methods
|
// but this skips some checks and uses potentially faster regex methods
|
||||||
@ -597,8 +592,6 @@ impl ResourceDef {
|
|||||||
/// assert_eq!(resource.find_match("/profile/1234"), Some(13));
|
/// assert_eq!(resource.find_match("/profile/1234"), Some(13));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn find_match(&self, path: &str) -> Option<usize> {
|
pub fn find_match(&self, path: &str) -> Option<usize> {
|
||||||
profile_method!(find_match);
|
|
||||||
|
|
||||||
match &self.pat_type {
|
match &self.pat_type {
|
||||||
PatternType::Static(pattern) => self.static_match(pattern, path),
|
PatternType::Static(pattern) => self.static_match(pattern, path),
|
||||||
|
|
||||||
@ -633,7 +626,6 @@ impl ResourceDef {
|
|||||||
/// assert_eq!(path.unprocessed(), "");
|
/// assert_eq!(path.unprocessed(), "");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn capture_match_info<R: Resource>(&self, resource: &mut R) -> bool {
|
pub fn capture_match_info<R: Resource>(&self, resource: &mut R) -> bool {
|
||||||
profile_method!(capture_match_info);
|
|
||||||
self.capture_match_info_fn(resource, |_| true)
|
self.capture_match_info_fn(resource, |_| true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -657,7 +649,7 @@ impl ResourceDef {
|
|||||||
/// resource.capture_match_info_fn(
|
/// resource.capture_match_info_fn(
|
||||||
/// path,
|
/// path,
|
||||||
/// // when env var is not set, reject when path contains "admin"
|
/// // when env var is not set, reject when path contains "admin"
|
||||||
/// |res| !(!admin_allowed && res.path().contains("admin")),
|
/// |path| !(!admin_allowed && path.as_str().contains("admin")),
|
||||||
/// )
|
/// )
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
@ -679,56 +671,35 @@ impl ResourceDef {
|
|||||||
R: Resource,
|
R: Resource,
|
||||||
F: FnOnce(&R) -> bool,
|
F: FnOnce(&R) -> bool,
|
||||||
{
|
{
|
||||||
profile_method!(capture_match_info_fn);
|
|
||||||
|
|
||||||
let mut segments = <[PathItem; MAX_DYNAMIC_SEGMENTS]>::default();
|
let mut segments = <[PathItem; MAX_DYNAMIC_SEGMENTS]>::default();
|
||||||
let path = resource.resource_path();
|
let path = resource.resource_path();
|
||||||
let path_str = path.unprocessed();
|
let path_str = path.unprocessed();
|
||||||
|
|
||||||
let (matched_len, matched_vars) = match &self.pat_type {
|
let (matched_len, matched_vars) = match &self.pat_type {
|
||||||
PatternType::Static(pattern) => {
|
PatternType::Static(pattern) => match self.static_match(pattern, path_str) {
|
||||||
profile_section!(pattern_static_or_prefix);
|
Some(len) => (len, None),
|
||||||
|
None => return false,
|
||||||
match self.static_match(pattern, path_str) {
|
},
|
||||||
Some(len) => (len, None),
|
|
||||||
None => return false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
PatternType::Dynamic(re, names) => {
|
PatternType::Dynamic(re, names) => {
|
||||||
profile_section!(pattern_dynamic);
|
let captures = match re.captures(path.unprocessed()) {
|
||||||
|
Some(captures) => captures,
|
||||||
let captures = {
|
_ => return false,
|
||||||
profile_section!(pattern_dynamic_regex_exec);
|
|
||||||
|
|
||||||
match re.captures(path.unprocessed()) {
|
|
||||||
Some(captures) => captures,
|
|
||||||
_ => return false,
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
{
|
for (no, name) in names.iter().enumerate() {
|
||||||
profile_section!(pattern_dynamic_extract_captures);
|
if let Some(m) = captures.name(name) {
|
||||||
|
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
||||||
for (no, name) in names.iter().enumerate() {
|
} else {
|
||||||
if let Some(m) = captures.name(name) {
|
error!("Dynamic path match but not all segments found: {}", name);
|
||||||
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
return false;
|
||||||
} else {
|
|
||||||
log::error!(
|
|
||||||
"Dynamic path match but not all segments found: {}",
|
|
||||||
name
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
(captures[1].len(), Some(names))
|
(captures[1].len(), Some(names))
|
||||||
}
|
}
|
||||||
|
|
||||||
PatternType::DynamicSet(re, params) => {
|
PatternType::DynamicSet(re, params) => {
|
||||||
profile_section!(pattern_dynamic_set);
|
|
||||||
|
|
||||||
let path = path.unprocessed();
|
let path = path.unprocessed();
|
||||||
let (pattern, names) = match re.matches(path).into_iter().next() {
|
let (pattern, names) = match re.matches(path).into_iter().next() {
|
||||||
Some(idx) => ¶ms[idx],
|
Some(idx) => ¶ms[idx],
|
||||||
@ -744,7 +715,7 @@ impl ResourceDef {
|
|||||||
if let Some(m) = captures.name(name) {
|
if let Some(m) = captures.name(name) {
|
||||||
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
||||||
} else {
|
} else {
|
||||||
log::error!("Dynamic path match but not all segments found: {}", name);
|
error!("Dynamic path match but not all segments found: {}", name);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -811,7 +782,6 @@ impl ResourceDef {
|
|||||||
I: IntoIterator,
|
I: IntoIterator,
|
||||||
I::Item: AsRef<str>,
|
I::Item: AsRef<str>,
|
||||||
{
|
{
|
||||||
profile_method!(resource_path_from_iter);
|
|
||||||
let mut iter = values.into_iter();
|
let mut iter = values.into_iter();
|
||||||
self.build_resource_path(path, |_| iter.next())
|
self.build_resource_path(path, |_| iter.next())
|
||||||
}
|
}
|
||||||
@ -847,7 +817,6 @@ impl ResourceDef {
|
|||||||
V: AsRef<str>,
|
V: AsRef<str>,
|
||||||
S: BuildHasher,
|
S: BuildHasher,
|
||||||
{
|
{
|
||||||
profile_method!(resource_path_from_map);
|
|
||||||
self.build_resource_path(path, |name| values.get(name))
|
self.build_resource_path(path, |name| values.get(name))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -868,8 +837,6 @@ impl ResourceDef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn construct<T: IntoPatterns>(paths: T, is_prefix: bool) -> Self {
|
fn construct<T: IntoPatterns>(paths: T, is_prefix: bool) -> Self {
|
||||||
profile_method!(construct);
|
|
||||||
|
|
||||||
let patterns = paths.patterns();
|
let patterns = paths.patterns();
|
||||||
let (pat_type, segments) = match &patterns {
|
let (pat_type, segments) = match &patterns {
|
||||||
Patterns::Single(pattern) => ResourceDef::parse(pattern, is_prefix, false),
|
Patterns::Single(pattern) => ResourceDef::parse(pattern, is_prefix, false),
|
||||||
@ -928,8 +895,6 @@ impl ResourceDef {
|
|||||||
/// # Panics
|
/// # Panics
|
||||||
/// Panics if given patterns does not contain a dynamic segment.
|
/// Panics if given patterns does not contain a dynamic segment.
|
||||||
fn parse_param(pattern: &str) -> (PatternSegment, String, &str, bool) {
|
fn parse_param(pattern: &str) -> (PatternSegment, String, &str, bool) {
|
||||||
profile_method!(parse_param);
|
|
||||||
|
|
||||||
const DEFAULT_PATTERN: &str = "[^/]+";
|
const DEFAULT_PATTERN: &str = "[^/]+";
|
||||||
const DEFAULT_PATTERN_TAIL: &str = ".*";
|
const DEFAULT_PATTERN_TAIL: &str = ".*";
|
||||||
|
|
||||||
@ -999,8 +964,6 @@ impl ResourceDef {
|
|||||||
is_prefix: bool,
|
is_prefix: bool,
|
||||||
force_dynamic: bool,
|
force_dynamic: bool,
|
||||||
) -> (PatternType, Vec<PatternSegment>) {
|
) -> (PatternType, Vec<PatternSegment>) {
|
||||||
profile_method!(parse);
|
|
||||||
|
|
||||||
if !force_dynamic && pattern.find('{').is_none() && !pattern.ends_with('*') {
|
if !force_dynamic && pattern.find('{').is_none() && !pattern.ends_with('*') {
|
||||||
// pattern is static
|
// pattern is static
|
||||||
return (
|
return (
|
||||||
@ -1038,7 +1001,7 @@ impl ResourceDef {
|
|||||||
// tail segments in prefixes have no defined semantics
|
// tail segments in prefixes have no defined semantics
|
||||||
|
|
||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"Prefix resources should not have tail segments. \
|
"Prefix resources should not have tail segments. \
|
||||||
Use `ResourceDef::new` constructor. \
|
Use `ResourceDef::new` constructor. \
|
||||||
This may become a panic in the future."
|
This may become a panic in the future."
|
||||||
@ -1053,7 +1016,7 @@ impl ResourceDef {
|
|||||||
// unnamed tail segment
|
// unnamed tail segment
|
||||||
|
|
||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"Tail segments must have names. \
|
"Tail segments must have names. \
|
||||||
Consider `.../{{tail}}*`. \
|
Consider `.../{{tail}}*`. \
|
||||||
This may become a panic in the future."
|
This may become a panic in the future."
|
||||||
@ -1133,8 +1096,6 @@ impl From<String> for ResourceDef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> {
|
pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> {
|
||||||
profile_fn!(insert_slash);
|
|
||||||
|
|
||||||
if !path.is_empty() && !path.starts_with('/') {
|
if !path.is_empty() && !path.starts_with('/') {
|
||||||
let mut new_path = String::with_capacity(path.len() + 1);
|
let mut new_path = String::with_capacity(path.len() + 1);
|
||||||
new_path.push('/');
|
new_path.push('/');
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
use firestorm::profile_method;
|
|
||||||
|
|
||||||
use crate::{IntoPatterns, Resource, ResourceDef};
|
use crate::{IntoPatterns, Resource, ResourceDef};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||||
@ -30,7 +28,6 @@ impl<T, U> Router<T, U> {
|
|||||||
where
|
where
|
||||||
R: Resource,
|
R: Resource,
|
||||||
{
|
{
|
||||||
profile_method!(recognize);
|
|
||||||
self.recognize_fn(resource, |_, _| true)
|
self.recognize_fn(resource, |_, _| true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -39,7 +36,6 @@ impl<T, U> Router<T, U> {
|
|||||||
where
|
where
|
||||||
R: Resource,
|
R: Resource,
|
||||||
{
|
{
|
||||||
profile_method!(recognize_mut);
|
|
||||||
self.recognize_mut_fn(resource, |_, _| true)
|
self.recognize_mut_fn(resource, |_, _| true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -55,8 +51,6 @@ impl<T, U> Router<T, U> {
|
|||||||
R: Resource,
|
R: Resource,
|
||||||
F: FnMut(&R, &U) -> bool,
|
F: FnMut(&R, &U) -> bool,
|
||||||
{
|
{
|
||||||
profile_method!(recognize_checked);
|
|
||||||
|
|
||||||
for (rdef, val, ctx) in self.routes.iter() {
|
for (rdef, val, ctx) in self.routes.iter() {
|
||||||
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
|
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
|
||||||
return Some((val, ResourceId(rdef.id())));
|
return Some((val, ResourceId(rdef.id())));
|
||||||
@ -77,8 +71,6 @@ impl<T, U> Router<T, U> {
|
|||||||
R: Resource,
|
R: Resource,
|
||||||
F: FnMut(&R, &U) -> bool,
|
F: FnMut(&R, &U) -> bool,
|
||||||
{
|
{
|
||||||
profile_method!(recognize_mut_checked);
|
|
||||||
|
|
||||||
for (rdef, val, ctx) in self.routes.iter_mut() {
|
for (rdef, val, ctx) in self.routes.iter_mut() {
|
||||||
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
|
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
|
||||||
return Some((val, ResourceId(rdef.id())));
|
return Some((val, ResourceId(rdef.id())));
|
||||||
@ -104,7 +96,6 @@ impl<T, U> RouterBuilder<T, U> {
|
|||||||
val: T,
|
val: T,
|
||||||
ctx: U,
|
ctx: U,
|
||||||
) -> (&mut ResourceDef, &mut T, &mut U) {
|
) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||||
profile_method!(push);
|
|
||||||
self.routes.push((rdef, val, ctx));
|
self.routes.push((rdef, val, ctx));
|
||||||
self.routes
|
self.routes
|
||||||
.last_mut()
|
.last_mut()
|
||||||
@ -131,7 +122,6 @@ where
|
|||||||
path: impl IntoPatterns,
|
path: impl IntoPatterns,
|
||||||
val: T,
|
val: T,
|
||||||
) -> (&mut ResourceDef, &mut T, &mut U) {
|
) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||||
profile_method!(path);
|
|
||||||
self.push(ResourceDef::new(path), val, U::default())
|
self.push(ResourceDef::new(path), val, U::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,13 +131,11 @@ where
|
|||||||
prefix: impl IntoPatterns,
|
prefix: impl IntoPatterns,
|
||||||
val: T,
|
val: T,
|
||||||
) -> (&mut ResourceDef, &mut T, &mut U) {
|
) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||||
profile_method!(prefix);
|
|
||||||
self.push(ResourceDef::prefix(prefix), val, U::default())
|
self.push(ResourceDef::prefix(prefix), val, U::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Registers resource for [`ResourceDef`].
|
/// Registers resource for [`ResourceDef`].
|
||||||
pub fn rdef(&mut self, rdef: ResourceDef, val: T) -> (&mut ResourceDef, &mut T, &mut U) {
|
pub fn rdef(&mut self, rdef: ResourceDef, val: T) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||||
profile_method!(rdef);
|
|
||||||
self.push(rdef, val, U::default())
|
self.push(rdef, val, U::default())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ use crate::ResourcePath;
|
|||||||
use crate::Quoter;
|
use crate::Quoter;
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static DEFAULT_QUOTER: Quoter = Quoter::new(b"@:", b"%/+");
|
static DEFAULT_QUOTER: Quoter = Quoter::new(b"", b"%/+");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
|
||||||
## 0.1.0-beta.13 - 2022-02-16
|
## 0.1.0-beta.13 - 2022-02-16
|
||||||
|
@ -29,13 +29,13 @@ openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-codec = "0.5"
|
actix-codec = "0.5"
|
||||||
actix-http = "3.0.0"
|
actix-http = "3"
|
||||||
actix-http-test = "3.0.0-beta.13"
|
actix-http-test = "3.0.0-beta.13"
|
||||||
actix-rt = "2.1"
|
actix-rt = "2.1"
|
||||||
actix-service = "2.0.0"
|
actix-service = "2"
|
||||||
actix-utils = "3.0.0"
|
actix-utils = "3"
|
||||||
actix-web = { version = "4.0.0", default-features = false, features = ["cookies"] }
|
actix-web = { version = "4", default-features = false, features = ["cookies"] }
|
||||||
awc = { version = "3.0.0-beta.21", default-features = false, features = ["cookies"] }
|
awc = { version = "3", default-features = false, features = ["cookies"] }
|
||||||
|
|
||||||
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
|
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||||
futures-util = { version = "0.3.7", default-features = false, features = [] }
|
futures-util = { version = "0.3.7", default-features = false, features = [] }
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
|
||||||
## 4.1.0 - 2022-03-02
|
## 4.1.0 - 2022-03-02
|
||||||
|
@ -28,7 +28,10 @@ tokio = { version = "1.13.1", features = ["sync"] }
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.2"
|
actix-rt = "2.2"
|
||||||
actix-test = "0.1.0-beta.13"
|
actix-test = "0.1.0-beta.13"
|
||||||
awc = { version = "3.0.0-beta.21", default-features = false }
|
awc = { version = "3", default-features = false }
|
||||||
|
actix-web = { version = "4", features = ["macros"] }
|
||||||
|
|
||||||
|
mime = "0.3"
|
||||||
|
|
||||||
env_logger = "0.9"
|
env_logger = "0.9"
|
||||||
futures-util = { version = "0.3.7", default-features = false }
|
futures-util = { version = "0.3.7", default-features = false }
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
[](https://crates.io/crates/actix-web-actors)
|
[](https://crates.io/crates/actix-web-actors)
|
||||||
[](https://docs.rs/actix-web-actors/4.1.0)
|
[](https://docs.rs/actix-web-actors/4.1.0)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-web-actors/4.1.0)
|
[](https://deps.rs/crate/actix-web-actors/4.1.0)
|
||||||
|
@ -14,6 +14,58 @@ use futures_core::Stream;
|
|||||||
use tokio::sync::oneshot::Sender;
|
use tokio::sync::oneshot::Sender;
|
||||||
|
|
||||||
/// Execution context for HTTP actors
|
/// Execution context for HTTP actors
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// A demonstration of [server-sent events](https://developer.mozilla.org/docs/Web/API/Server-sent_events) using actors:
|
||||||
|
///
|
||||||
|
/// ```no_run
|
||||||
|
/// use std::time::Duration;
|
||||||
|
///
|
||||||
|
/// use actix::{Actor, AsyncContext};
|
||||||
|
/// use actix_web::{get, http::header, App, HttpResponse, HttpServer};
|
||||||
|
/// use actix_web_actors::HttpContext;
|
||||||
|
/// use bytes::Bytes;
|
||||||
|
///
|
||||||
|
/// struct MyActor {
|
||||||
|
/// count: usize,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// impl Actor for MyActor {
|
||||||
|
/// type Context = HttpContext<Self>;
|
||||||
|
///
|
||||||
|
/// fn started(&mut self, ctx: &mut Self::Context) {
|
||||||
|
/// ctx.run_later(Duration::from_millis(100), Self::write);
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// impl MyActor {
|
||||||
|
/// fn write(&mut self, ctx: &mut HttpContext<Self>) {
|
||||||
|
/// self.count += 1;
|
||||||
|
/// if self.count > 3 {
|
||||||
|
/// ctx.write_eof()
|
||||||
|
/// } else {
|
||||||
|
/// ctx.write(Bytes::from(format!("event: count\ndata: {}\n\n", self.count)));
|
||||||
|
/// ctx.run_later(Duration::from_millis(100), Self::write);
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[get("/")]
|
||||||
|
/// async fn index() -> HttpResponse {
|
||||||
|
/// HttpResponse::Ok()
|
||||||
|
/// .insert_header(header::ContentType(mime::TEXT_EVENT_STREAM))
|
||||||
|
/// .streaming(HttpContext::create(MyActor { count: 0 }))
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// #[actix_web::main]
|
||||||
|
/// async fn main() -> std::io::Result<()> {
|
||||||
|
/// HttpServer::new(|| App::new().service(index))
|
||||||
|
/// .bind(("127.0.0.1", 8080))?
|
||||||
|
/// .run()
|
||||||
|
/// .await
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
pub struct HttpContext<A>
|
pub struct HttpContext<A>
|
||||||
where
|
where
|
||||||
A: Actor<Context = HttpContext<A>>,
|
A: Actor<Context = HttpContext<A>>,
|
||||||
@ -210,7 +262,7 @@ mod tests {
|
|||||||
type Context = HttpContext<Self>;
|
type Context = HttpContext<Self>;
|
||||||
|
|
||||||
fn started(&mut self, ctx: &mut Self::Context) {
|
fn started(&mut self, ctx: &mut Self::Context) {
|
||||||
ctx.run_later(Duration::from_millis(100), |slf, ctx| slf.write(ctx));
|
ctx.run_later(Duration::from_millis(100), Self::write);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -221,7 +273,7 @@ mod tests {
|
|||||||
ctx.write_eof()
|
ctx.write_eof()
|
||||||
} else {
|
} else {
|
||||||
ctx.write(Bytes::from(format!("LINE-{}", self.count)));
|
ctx.write(Bytes::from(format!("LINE-{}", self.count)));
|
||||||
ctx.run_later(Duration::from_millis(100), |slf, ctx| slf.write(ctx));
|
ctx.run_later(Duration::from_millis(100), Self::write);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,59 @@
|
|||||||
//! Actix actors support for Actix Web.
|
//! Actix actors support for Actix Web.
|
||||||
|
//!
|
||||||
|
//! # Examples
|
||||||
|
//!
|
||||||
|
//! ```no_run
|
||||||
|
//! use actix::{Actor, StreamHandler};
|
||||||
|
//! use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
|
||||||
|
//! use actix_web_actors::ws;
|
||||||
|
//!
|
||||||
|
//! /// Define Websocket actor
|
||||||
|
//! struct MyWs;
|
||||||
|
//!
|
||||||
|
//! impl Actor for MyWs {
|
||||||
|
//! type Context = ws::WebsocketContext<Self>;
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! /// Handler for ws::Message message
|
||||||
|
//! impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
|
||||||
|
//! fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
|
||||||
|
//! match msg {
|
||||||
|
//! Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
|
||||||
|
//! Ok(ws::Message::Text(text)) => ctx.text(text),
|
||||||
|
//! Ok(ws::Message::Binary(bin)) => ctx.binary(bin),
|
||||||
|
//! _ => (),
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! #[get("/ws")]
|
||||||
|
//! async fn index(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||||
|
//! ws::start(MyWs, &req, stream)
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! #[actix_web::main]
|
||||||
|
//! async fn main() -> std::io::Result<()> {
|
||||||
|
//! HttpServer::new(|| App::new().service(index))
|
||||||
|
//! .bind(("127.0.0.1", 8080))?
|
||||||
|
//! .run()
|
||||||
|
//! .await
|
||||||
|
//! }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! # Documentation & Community Resources
|
||||||
|
//! In addition to this API documentation, several other resources are available:
|
||||||
|
//!
|
||||||
|
//! * [Website & User Guide](https://actix.rs/)
|
||||||
|
//! * [Documentation for `actix_web`](actix_web)
|
||||||
|
//! * [Examples Repository](https://github.com/actix/examples)
|
||||||
|
//! * [Community Chat on Discord](https://discord.gg/NWpN5mmg3x)
|
||||||
|
//!
|
||||||
|
//! To get started navigating the API docs, you may consider looking at the following pages first:
|
||||||
|
//!
|
||||||
|
//! * [`ws`]: This module provides actor support for WebSockets.
|
||||||
|
//!
|
||||||
|
//! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses.
|
||||||
|
//!
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible)]
|
#![warn(future_incompatible)]
|
||||||
|
@ -1,4 +1,60 @@
|
|||||||
//! Websocket integration.
|
//! Websocket integration.
|
||||||
|
//!
|
||||||
|
//! # Examples
|
||||||
|
//!
|
||||||
|
//! ```no_run
|
||||||
|
//! use actix::{Actor, StreamHandler};
|
||||||
|
//! use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
|
||||||
|
//! use actix_web_actors::ws;
|
||||||
|
//!
|
||||||
|
//! /// Define Websocket actor
|
||||||
|
//! struct MyWs;
|
||||||
|
//!
|
||||||
|
//! impl Actor for MyWs {
|
||||||
|
//! type Context = ws::WebsocketContext<Self>;
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! /// Handler for ws::Message message
|
||||||
|
//! impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
|
||||||
|
//! fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
|
||||||
|
//! match msg {
|
||||||
|
//! Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
|
||||||
|
//! Ok(ws::Message::Text(text)) => ctx.text(text),
|
||||||
|
//! Ok(ws::Message::Binary(bin)) => ctx.binary(bin),
|
||||||
|
//! _ => (),
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! #[get("/ws")]
|
||||||
|
//! async fn websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||||
|
//! ws::start(MyWs, &req, stream)
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! const MAX_FRAME_SIZE: usize = 16_384; // 16KiB
|
||||||
|
//!
|
||||||
|
//! #[get("/custom-ws")]
|
||||||
|
//! async fn custom_websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||||
|
//! // Create a Websocket session with a specific max frame size, and protocols.
|
||||||
|
//! ws::WsResponseBuilder::new(MyWs, &req, stream)
|
||||||
|
//! .frame_size(MAX_FRAME_SIZE)
|
||||||
|
//! .protocols(&["A", "B"])
|
||||||
|
//! .start()
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! #[actix_web::main]
|
||||||
|
//! async fn main() -> std::io::Result<()> {
|
||||||
|
//! HttpServer::new(|| {
|
||||||
|
//! App::new()
|
||||||
|
//! .service(websocket)
|
||||||
|
//! .service(custom_websocket)
|
||||||
|
//! })
|
||||||
|
//! .bind(("127.0.0.1", 8080))?
|
||||||
|
//! .run()
|
||||||
|
//! .await
|
||||||
|
//! }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::VecDeque,
|
collections::VecDeque,
|
||||||
@ -41,20 +97,51 @@ use tokio::sync::oneshot;
|
|||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// Create a Websocket session response with default configuration.
|
/// ```no_run
|
||||||
/// ```ignore
|
/// # use actix::{Actor, StreamHandler};
|
||||||
/// WsResponseBuilder::new(WsActor, &req, stream).start()
|
/// # use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
|
||||||
/// ```
|
/// # use actix_web_actors::ws;
|
||||||
|
/// #
|
||||||
|
/// # struct MyWs;
|
||||||
|
/// #
|
||||||
|
/// # impl Actor for MyWs {
|
||||||
|
/// # type Context = ws::WebsocketContext<Self>;
|
||||||
|
/// # }
|
||||||
|
/// #
|
||||||
|
/// # /// Handler for ws::Message message
|
||||||
|
/// # impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
|
||||||
|
/// # fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {}
|
||||||
|
/// # }
|
||||||
|
/// #
|
||||||
|
/// #[get("/ws")]
|
||||||
|
/// async fn websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||||
|
/// ws::WsResponseBuilder::new(MyWs, &req, stream).start()
|
||||||
|
/// }
|
||||||
///
|
///
|
||||||
/// Create a Websocket session with a specific max frame size, [`Codec`], and protocols.
|
|
||||||
/// ```ignore
|
|
||||||
/// const MAX_FRAME_SIZE: usize = 16_384; // 16KiB
|
/// const MAX_FRAME_SIZE: usize = 16_384; // 16KiB
|
||||||
///
|
///
|
||||||
/// ws::WsResponseBuilder::new(WsActor, &req, stream)
|
/// #[get("/custom-ws")]
|
||||||
/// .codec(Codec::new())
|
/// async fn custom_websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||||
/// .protocols(&["A", "B"])
|
/// // Create a Websocket session with a specific max frame size, codec, and protocols.
|
||||||
/// .frame_size(MAX_FRAME_SIZE)
|
/// ws::WsResponseBuilder::new(MyWs, &req, stream)
|
||||||
/// .start()
|
/// .codec(actix_http::ws::Codec::new())
|
||||||
|
/// // This will overwrite the codec's max frame-size
|
||||||
|
/// .frame_size(MAX_FRAME_SIZE)
|
||||||
|
/// .protocols(&["A", "B"])
|
||||||
|
/// .start()
|
||||||
|
/// }
|
||||||
|
/// #
|
||||||
|
/// # #[actix_web::main]
|
||||||
|
/// # async fn main() -> std::io::Result<()> {
|
||||||
|
/// # HttpServer::new(|| {
|
||||||
|
/// # App::new()
|
||||||
|
/// # .service(websocket)
|
||||||
|
/// # .service(custom_websocket)
|
||||||
|
/// # })
|
||||||
|
/// # .bind(("127.0.0.1", 8080))?
|
||||||
|
/// # .run()
|
||||||
|
/// # .await
|
||||||
|
/// # }
|
||||||
/// ```
|
/// ```
|
||||||
pub struct WsResponseBuilder<'a, A, T>
|
pub struct WsResponseBuilder<'a, A, T>
|
||||||
where
|
where
|
||||||
|
@ -1,6 +1,14 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
|
||||||
|
## 4.0.1 - 2022-06-11
|
||||||
|
- Fix support for guard paths in route handler macros. [#2771]
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||||
|
|
||||||
|
[#2771]: https://github.com/actix/actix-web/pull/2771
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0 - 2022-02-24
|
## 4.0.0 - 2022-02-24
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-web-codegen"
|
name = "actix-web-codegen"
|
||||||
version = "4.0.0"
|
version = "4.0.1"
|
||||||
description = "Routing and runtime macros for Actix Web"
|
description = "Routing and runtime macros for Actix Web"
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web.git"
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
> Routing and runtime macros for Actix Web.
|
> Routing and runtime macros for Actix Web.
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-web-codegen)
|
[](https://crates.io/crates/actix-web-codegen)
|
||||||
[](https://docs.rs/actix-web-codegen/4.0.0)
|
[](https://docs.rs/actix-web-codegen/4.0.1)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-web-codegen/4.0.0)
|
[](https://deps.rs/crate/actix-web-codegen/4.0.1)
|
||||||
[](https://crates.io/crates/actix-web-codegen)
|
[](https://crates.io/crates/actix-web-codegen)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ use actix_router::ResourceDef;
|
|||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use proc_macro2::{Span, TokenStream as TokenStream2};
|
use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||||
use quote::{format_ident, quote, ToTokens, TokenStreamExt};
|
use quote::{format_ident, quote, ToTokens, TokenStreamExt};
|
||||||
use syn::{parse_macro_input, AttributeArgs, Ident, LitStr, NestedMeta};
|
use syn::{parse_macro_input, AttributeArgs, Ident, LitStr, NestedMeta, Path};
|
||||||
|
|
||||||
enum ResourceType {
|
enum ResourceType {
|
||||||
Async,
|
Async,
|
||||||
@ -77,7 +77,7 @@ impl TryFrom<&syn::LitStr> for MethodType {
|
|||||||
struct Args {
|
struct Args {
|
||||||
path: syn::LitStr,
|
path: syn::LitStr,
|
||||||
resource_name: Option<syn::LitStr>,
|
resource_name: Option<syn::LitStr>,
|
||||||
guards: Vec<Ident>,
|
guards: Vec<Path>,
|
||||||
wrappers: Vec<syn::Type>,
|
wrappers: Vec<syn::Type>,
|
||||||
methods: HashSet<MethodType>,
|
methods: HashSet<MethodType>,
|
||||||
}
|
}
|
||||||
@ -121,7 +121,7 @@ impl Args {
|
|||||||
}
|
}
|
||||||
} else if nv.path.is_ident("guard") {
|
} else if nv.path.is_ident("guard") {
|
||||||
if let syn::Lit::Str(lit) = nv.lit {
|
if let syn::Lit::Str(lit) = nv.lit {
|
||||||
guards.push(Ident::new(&lit.value(), Span::call_site()));
|
guards.push(lit.parse::<Path>()?);
|
||||||
} else {
|
} else {
|
||||||
return Err(syn::Error::new_spanned(
|
return Err(syn::Error::new_spanned(
|
||||||
nv.lit,
|
nv.lit,
|
||||||
|
@ -96,6 +96,21 @@ async fn custom_resource_name_test<'a>(req: actix_web::HttpRequest) -> impl Resp
|
|||||||
HttpResponse::Ok()
|
HttpResponse::Ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mod guard_module {
|
||||||
|
use actix_web::{guard::GuardContext, http::header};
|
||||||
|
|
||||||
|
pub fn guard(ctx: &GuardContext) -> bool {
|
||||||
|
ctx.header::<header::Accept>()
|
||||||
|
.map(|h| h.preference() == "image/*")
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/test/guard", guard = "guard_module::guard")]
|
||||||
|
async fn guard_test() -> impl Responder {
|
||||||
|
HttpResponse::Ok()
|
||||||
|
}
|
||||||
|
|
||||||
pub struct ChangeStatusCode;
|
pub struct ChangeStatusCode;
|
||||||
|
|
||||||
impl<S, B> Transform<S, ServiceRequest> for ChangeStatusCode
|
impl<S, B> Transform<S, ServiceRequest> for ChangeStatusCode
|
||||||
@ -187,6 +202,7 @@ async fn test_body() {
|
|||||||
.service(test_handler)
|
.service(test_handler)
|
||||||
.service(route_test)
|
.service(route_test)
|
||||||
.service(custom_resource_name_test)
|
.service(custom_resource_name_test)
|
||||||
|
.service(guard_test)
|
||||||
});
|
});
|
||||||
let request = srv.request(http::Method::GET, srv.url("/test"));
|
let request = srv.request(http::Method::GET, srv.url("/test"));
|
||||||
let response = request.send().await.unwrap();
|
let response = request.send().await.unwrap();
|
||||||
@ -245,6 +261,12 @@ async fn test_body() {
|
|||||||
let request = srv.request(http::Method::GET, srv.url("/custom_resource_name"));
|
let request = srv.request(http::Method::GET, srv.url("/custom_resource_name"));
|
||||||
let response = request.send().await.unwrap();
|
let response = request.send().await.unwrap();
|
||||||
assert!(response.status().is_success());
|
assert!(response.status().is_success());
|
||||||
|
|
||||||
|
let request = srv
|
||||||
|
.request(http::Method::GET, srv.url("/test/guard"))
|
||||||
|
.insert_header(("Accept", "image/*"));
|
||||||
|
let response = request.send().await.unwrap();
|
||||||
|
assert!(response.status().is_success());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#[rustversion::stable(1.54)] // MSRV
|
#[rustversion::stable(1.57)] // MSRV
|
||||||
#[test]
|
#[test]
|
||||||
fn compile_macros() {
|
fn compile_macros() {
|
||||||
let t = trybuild::TestCases::new();
|
let t = trybuild::TestCases::new();
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
error: HTTP method defined more than once: `GET`
|
error: HTTP method defined more than once: `GET`
|
||||||
--> $DIR/route-duplicate-method-fail.rs:3:35
|
--> tests/trybuild/route-duplicate-method-fail.rs:3:35
|
||||||
|
|
|
|
||||||
3 | #[route("/", method="GET", method="GET")]
|
3 | #[route("/", method="GET", method="GET")]
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
|
|
||||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||||
--> $DIR/route-duplicate-method-fail.rs:12:55
|
--> tests/trybuild/route-duplicate-method-fail.rs:12:55
|
||||||
|
|
|
|
||||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
| ------- ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
@ -10,4 +10,6 @@ error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpSer
|
|||||||
--> tests/trybuild/route-missing-method-fail.rs:12:55
|
--> tests/trybuild/route-missing-method-fail.rs:12:55
|
||||||
|
|
|
|
||||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
| ------- ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
error: Unexpected HTTP method: `UNEXPECTED`
|
error: Unexpected HTTP method: `UNEXPECTED`
|
||||||
--> $DIR/route-unexpected-method-fail.rs:3:21
|
--> tests/trybuild/route-unexpected-method-fail.rs:3:21
|
||||||
|
|
|
|
||||||
3 | #[route("/", method="UNEXPECTED")]
|
3 | #[route("/", method="UNEXPECTED")]
|
||||||
| ^^^^^^^^^^^^
|
| ^^^^^^^^^^^^
|
||||||
|
|
||||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||||
--> $DIR/route-unexpected-method-fail.rs:12:55
|
--> tests/trybuild/route-unexpected-method-fail.rs:12:55
|
||||||
|
|
|
|
||||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
| ------- ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
@ -1,6 +1,35 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2022-xx-xx
|
||||||
|
### Added
|
||||||
|
- Add `ServiceRequest::{parts, request}()` getter methods. [#2786]
|
||||||
|
- Add configuration options for TLS handshake timeout via `HttpServer::{rustls, openssl}_with_config` methods. [#2752]
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
[#2752]: https://github.com/actix/actix-web/pull/2752
|
||||||
|
[#2786]: https://github.com/actix/actix-web/pull/2786
|
||||||
|
|
||||||
|
|
||||||
|
## 4.1.0 - 2022-06-11
|
||||||
|
### Added
|
||||||
|
- Add `ServiceRequest::extract()` to make it easier to use extractors when writing middlewares. [#2647]
|
||||||
|
- Add `Route::wrap()` to allow individual routes to use middleware. [#2725]
|
||||||
|
- Add `ServiceConfig::default_service()`. [#2338] [#2743]
|
||||||
|
- Implement `ResponseError` for `std::convert::Infallible`
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Clear connection-level data on `HttpRequest` drop. [#2742]
|
||||||
|
|
||||||
|
[#2338]: https://github.com/actix/actix-web/pull/2338
|
||||||
|
[#2647]: https://github.com/actix/actix-web/pull/2647
|
||||||
|
[#2725]: https://github.com/actix/actix-web/pull/2725
|
||||||
|
[#2742]: https://github.com/actix/actix-web/pull/2742
|
||||||
|
[#2743]: https://github.com/actix/actix-web/pull/2743
|
||||||
|
|
||||||
|
|
||||||
## 4.0.1 - 2022-02-25
|
## 4.0.1 - 2022-02-25
|
||||||
@ -15,7 +44,7 @@
|
|||||||
- Updated `cookie` to `0.16`. [#2555]
|
- Updated `cookie` to `0.16`. [#2555]
|
||||||
- Updated `language-tags` to `0.3`.
|
- Updated `language-tags` to `0.3`.
|
||||||
- Updated `rand` to `0.8`.
|
- Updated `rand` to `0.8`.
|
||||||
- Updated `rustls` to `0.20.0`. [#2414]
|
- Updated `rustls` to `0.20`. [#2414]
|
||||||
- Updated `tokio` to `1`.
|
- Updated `tokio` to `1`.
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
@ -716,7 +745,7 @@
|
|||||||
### Removed
|
### Removed
|
||||||
- Public modules `middleware::{normalize, err_handlers}`. All necessary middleware types are now
|
- Public modules `middleware::{normalize, err_handlers}`. All necessary middleware types are now
|
||||||
exposed directly by the `middleware` module.
|
exposed directly by the `middleware` module.
|
||||||
- Remove `actix-threadpool` as dependency. `actix_threadpool::BlockingError` error type can be imported
|
- Remove `actix-threadpool` as dependency. `actix_threadpool::BlockingError` error type can be imported
|
||||||
from `actix_web::error` module. [#1878]
|
from `actix_web::error` module. [#1878]
|
||||||
|
|
||||||
[#1812]: https://github.com/actix/actix-web/pull/1812
|
[#1812]: https://github.com/actix/actix-web/pull/1812
|
||||||
@ -818,7 +847,7 @@
|
|||||||
|
|
||||||
## 3.0.0-beta.4 - 2020-09-09
|
## 3.0.0-beta.4 - 2020-09-09
|
||||||
### Added
|
### Added
|
||||||
- `middleware::NormalizePath` now has configurable behavior for either always having a trailing
|
- `middleware::NormalizePath` now has configurable behavior for either always having a trailing
|
||||||
slash, or as the new addition, always trimming trailing slashes. [#1639]
|
slash, or as the new addition, always trimming trailing slashes. [#1639]
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-web"
|
name = "actix-web"
|
||||||
version = "4.0.1"
|
version = "4.1.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
@ -59,7 +59,7 @@ rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
|||||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||||
__compress = []
|
__compress = []
|
||||||
|
|
||||||
# io-uring feature only avaiable for Linux OSes.
|
# io-uring feature only available for Linux OSes.
|
||||||
experimental-io-uring = ["actix-server/io-uring"]
|
experimental-io-uring = ["actix-server/io-uring"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
@ -71,9 +71,9 @@ actix-service = "2"
|
|||||||
actix-utils = "3"
|
actix-utils = "3"
|
||||||
actix-tls = { version = "3", default-features = false, optional = true }
|
actix-tls = { version = "3", default-features = false, optional = true }
|
||||||
|
|
||||||
actix-http = { version = "3.0.0", features = ["http2", "ws"] }
|
actix-http = { version = "3", features = ["http2", "ws"] }
|
||||||
actix-router = "0.5.0"
|
actix-router = "0.5"
|
||||||
actix-web-codegen = { version = "4.0.0", optional = true }
|
actix-web-codegen = { version = "4", optional = true }
|
||||||
|
|
||||||
ahash = "0.7"
|
ahash = "0.7"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
@ -90,7 +90,7 @@ once_cell = "1.5"
|
|||||||
log = "0.4"
|
log = "0.4"
|
||||||
mime = "0.3"
|
mime = "0.3"
|
||||||
pin-project-lite = "0.2.7"
|
pin-project-lite = "0.2.7"
|
||||||
regex = "1.4"
|
regex = "1.5.5"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde_urlencoded = "0.7"
|
serde_urlencoded = "0.7"
|
||||||
@ -100,25 +100,25 @@ time = { version = "0.3", default-features = false, features = ["formatting"] }
|
|||||||
url = "2.1"
|
url = "2.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-files = "0.6.0"
|
actix-files = "0.6"
|
||||||
actix-test = { version = "0.1.0-beta.13", features = ["openssl", "rustls"] }
|
actix-test = { version = "0.1.0-beta.13", features = ["openssl", "rustls"] }
|
||||||
awc = { version = "3.0.0-beta.21", features = ["openssl"] }
|
awc = { version = "3", features = ["openssl"] }
|
||||||
|
|
||||||
brotli = "3.3.3"
|
brotli = "3.3.3"
|
||||||
const-str = "0.3"
|
const-str = "0.4"
|
||||||
criterion = { version = "0.3", features = ["html_reports"] }
|
criterion = { version = "0.3", features = ["html_reports"] }
|
||||||
env_logger = "0.9"
|
env_logger = "0.9"
|
||||||
flate2 = "1.0.13"
|
flate2 = "1.0.13"
|
||||||
futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
|
futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
rcgen = "0.8"
|
rcgen = "0.9"
|
||||||
rustls-pemfile = "0.2"
|
rustls-pemfile = "1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
static_assertions = "1"
|
static_assertions = "1"
|
||||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
tls-rustls = { package = "rustls", version = "0.20.0" }
|
||||||
tokio = { version = "1.13.1", features = ["rt-multi-thread", "macros"] }
|
tokio = { version = "1.13.1", features = ["rt-multi-thread", "macros"] }
|
||||||
zstd = "0.10"
|
zstd = "0.11"
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "test_server"
|
name = "test_server"
|
||||||
|
@ -31,6 +31,7 @@ Headings marked with :warning: are **breaking behavioral changes**. They will pr
|
|||||||
- [Returning `HttpResponse` synchronously](#returning-httpresponse-synchronously)
|
- [Returning `HttpResponse` synchronously](#returning-httpresponse-synchronously)
|
||||||
- [`#[actix_web::main]` and `#[tokio::main]`](#actix_webmain-and-tokiomain)
|
- [`#[actix_web::main]` and `#[tokio::main]`](#actix_webmain-and-tokiomain)
|
||||||
- [`web::block`](#webblock)
|
- [`web::block`](#webblock)
|
||||||
|
-
|
||||||
|
|
||||||
## MSRV
|
## MSRV
|
||||||
|
|
||||||
@ -483,3 +484,24 @@ The `web::block` helper has changed return type from roughly `async fn(fn() -> R
|
|||||||
- let n: u32 = web::block(|| Ok(123)).await?;
|
- let n: u32 = web::block(|| Ok(123)).await?;
|
||||||
+ let n: u32 = web::block(|| Ok(123)).await??;
|
+ let n: u32 = web::block(|| Ok(123)).await??;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## `HttpResponse` as a `ResponseError`
|
||||||
|
|
||||||
|
The implementation of `ResponseError` for `HttpResponse` has been removed.
|
||||||
|
|
||||||
|
It was common in v3 to use `HttpResponse` as an error type in fallible handlers. The problem is that `HttpResponse` contains no knowledge or reference to the source error. Being able to guarantee that an "error" response actually contains an error reference makes middleware and other parts of Actix Web more effective.
|
||||||
|
|
||||||
|
The error response builders in the `error` module were available in v3 but are now the best method for simple error responses without requiring you to implement the trait on your own custom error types. These builders can receive simple strings and third party errors that can not implement the `ResponseError` trait.
|
||||||
|
|
||||||
|
A few common patterns are affected by this change:
|
||||||
|
|
||||||
|
```diff
|
||||||
|
- Err(HttpResponse::InternalServerError().finish())
|
||||||
|
+ Err(error::ErrorInternalServerError("reason"))
|
||||||
|
|
||||||
|
- Err(HttpResponse::InternalServerError().body(third_party_error.to_string()))
|
||||||
|
+ Err(error::ErrorInternalServerError(err))
|
||||||
|
|
||||||
|
- .map_err(|err| HttpResponse::InternalServerError().finish())?
|
||||||
|
+ .map_err(error::ErrorInternalServerError)?
|
||||||
|
```
|
||||||
|
@ -6,10 +6,10 @@
|
|||||||
<p>
|
<p>
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-web)
|
[](https://crates.io/crates/actix-web)
|
||||||
[](https://docs.rs/actix-web/4.0.1)
|
[](https://docs.rs/actix-web/4.1.0)
|
||||||

|

|
||||||

|

|
||||||
[](https://deps.rs/crate/actix-web/4.0.1)
|
[](https://deps.rs/crate/actix-web/4.1.0)
|
||||||
<br />
|
<br />
|
||||||
[](https://github.com/actix/actix-web/actions/workflows/ci.yml)
|
[](https://github.com/actix/actix-web/actions/workflows/ci.yml)
|
||||||
[](https://codecov.io/gh/actix/actix-web)
|
[](https://codecov.io/gh/actix/actix-web)
|
||||||
@ -33,7 +33,7 @@
|
|||||||
- SSL support using OpenSSL or Rustls
|
- SSL support using OpenSSL or Rustls
|
||||||
- Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
- Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
||||||
- Integrates with the [`awc` HTTP client](https://docs.rs/awc/)
|
- Integrates with the [`awc` HTTP client](https://docs.rs/awc/)
|
||||||
- Runs on stable Rust 1.54+
|
- Runs on stable Rust 1.57+
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
@ -56,18 +56,19 @@ Code:
|
|||||||
```rust
|
```rust
|
||||||
use actix_web::{get, web, App, HttpServer, Responder};
|
use actix_web::{get, web, App, HttpServer, Responder};
|
||||||
|
|
||||||
#[get("/{id}/{name}/index.html")]
|
#[get("/hello/{name}")]
|
||||||
async fn index(params: web::Path<(u32, String)>) -> impl Responder {
|
async fn greet(name: web::Path<String>) -> impl Responder {
|
||||||
let (id, name) = params.into_inner();
|
format!("Hello {name}!")
|
||||||
format!("Hello {}! id:{}", name, id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_web::main] // or #[tokio::main]
|
#[actix_web::main] // or #[tokio::main]
|
||||||
async fn main() -> std::io::Result<()> {
|
async fn main() -> std::io::Result<()> {
|
||||||
HttpServer::new(|| App::new().service(index))
|
HttpServer::new(|| {
|
||||||
.bind(("127.0.0.1", 8080))?
|
App::new().service(greet)
|
||||||
.run()
|
})
|
||||||
.await
|
.bind(("127.0.0.1", 8080))?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -78,6 +79,7 @@ async fn main() -> std::io::Result<()> {
|
|||||||
- [Application State](https://github.com/actix/examples/tree/master/basics/state)
|
- [Application State](https://github.com/actix/examples/tree/master/basics/state)
|
||||||
- [JSON Handling](https://github.com/actix/examples/tree/master/json/json)
|
- [JSON Handling](https://github.com/actix/examples/tree/master/json/json)
|
||||||
- [Multipart Streams](https://github.com/actix/examples/tree/master/forms/multipart)
|
- [Multipart Streams](https://github.com/actix/examples/tree/master/forms/multipart)
|
||||||
|
- [MongoDB Integration](https://github.com/actix/examples/tree/master/databases/mongodb)
|
||||||
- [Diesel Integration](https://github.com/actix/examples/tree/master/databases/diesel)
|
- [Diesel Integration](https://github.com/actix/examples/tree/master/databases/diesel)
|
||||||
- [SQLite Integration](https://github.com/actix/examples/tree/master/databases/sqlite)
|
- [SQLite Integration](https://github.com/actix/examples/tree/master/databases/sqlite)
|
||||||
- [Postgres Integration](https://github.com/actix/examples/tree/master/databases/postgres)
|
- [Postgres Integration](https://github.com/actix/examples/tree/master/databases/postgres)
|
||||||
|
@ -60,7 +60,7 @@ where
|
|||||||
/// [`HttpRequest::app_data`](crate::HttpRequest::app_data) method at runtime.
|
/// [`HttpRequest::app_data`](crate::HttpRequest::app_data) method at runtime.
|
||||||
///
|
///
|
||||||
/// # [`Data<T>`]
|
/// # [`Data<T>`]
|
||||||
/// Any [`Data<T>`] type added here can utilize it's extractor implementation in handlers.
|
/// Any [`Data<T>`] type added here can utilize its extractor implementation in handlers.
|
||||||
/// Types not wrapped in `Data<T>` cannot use this extractor. See [its docs](Data<T>) for more
|
/// Types not wrapped in `Data<T>` cannot use this extractor. See [its docs](Data<T>) for more
|
||||||
/// about its usage and patterns.
|
/// about its usage and patterns.
|
||||||
///
|
///
|
||||||
@ -185,10 +185,17 @@ where
|
|||||||
F: FnOnce(&mut ServiceConfig),
|
F: FnOnce(&mut ServiceConfig),
|
||||||
{
|
{
|
||||||
let mut cfg = ServiceConfig::new();
|
let mut cfg = ServiceConfig::new();
|
||||||
|
|
||||||
f(&mut cfg);
|
f(&mut cfg);
|
||||||
|
|
||||||
self.services.extend(cfg.services);
|
self.services.extend(cfg.services);
|
||||||
self.external.extend(cfg.external);
|
self.external.extend(cfg.external);
|
||||||
self.extensions.extend(cfg.app_data);
|
self.extensions.extend(cfg.app_data);
|
||||||
|
|
||||||
|
if let Some(default) = cfg.default {
|
||||||
|
self.default = Some(default);
|
||||||
|
}
|
||||||
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -267,7 +274,6 @@ where
|
|||||||
{
|
{
|
||||||
let svc = svc
|
let svc = svc
|
||||||
.into_factory()
|
.into_factory()
|
||||||
.map(|res| res.map_into_boxed_body())
|
|
||||||
.map_init_err(|e| log::error!("Can not construct default service: {:?}", e));
|
.map_init_err(|e| log::error!("Can not construct default service: {:?}", e));
|
||||||
|
|
||||||
self.default = Some(Rc::new(boxed::factory(svc)));
|
self.default = Some(Rc::new(boxed::factory(svc)));
|
||||||
@ -308,7 +314,7 @@ where
|
|||||||
|
|
||||||
/// Registers an app-wide middleware.
|
/// Registers an app-wide middleware.
|
||||||
///
|
///
|
||||||
/// Registers middleware, in the form of a middleware compo nen t (type), that runs during
|
/// Registers middleware, in the form of a middleware component (type), that runs during
|
||||||
/// inbound and/or outbound processing in the request life-cycle (request -> response),
|
/// inbound and/or outbound processing in the request life-cycle (request -> response),
|
||||||
/// modifying request/response as necessary, across all requests managed by the `App`.
|
/// modifying request/response as necessary, across all requests managed by the `App`.
|
||||||
///
|
///
|
||||||
|
@ -257,7 +257,7 @@ impl ServiceFactory<ServiceRequest> for AppRoutingFactory {
|
|||||||
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
|
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
|
||||||
|
|
||||||
fn new_service(&self, _: ()) -> Self::Future {
|
fn new_service(&self, _: ()) -> Self::Future {
|
||||||
// construct all services factory future with it's resource def and guards.
|
// construct all services factory future with its resource def and guards.
|
||||||
let factory_fut = join_all(self.services.iter().map(|(path, factory, guards)| {
|
let factory_fut = join_all(self.services.iter().map(|(path, factory, guards)| {
|
||||||
let path = path.clone();
|
let path = path.clone();
|
||||||
let guards = guards.borrow_mut().take().unwrap_or_default();
|
let guards = guards.borrow_mut().take().unwrap_or_default();
|
||||||
|
@ -1,33 +1,32 @@
|
|||||||
use std::net::SocketAddr;
|
use std::{net::SocketAddr, rc::Rc};
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use actix_http::Extensions;
|
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt as _};
|
||||||
use actix_router::ResourceDef;
|
|
||||||
use actix_service::{boxed, IntoServiceFactory, ServiceFactory};
|
|
||||||
|
|
||||||
use crate::data::Data;
|
use crate::{
|
||||||
use crate::error::Error;
|
data::Data,
|
||||||
use crate::guard::Guard;
|
dev::{Extensions, ResourceDef},
|
||||||
use crate::resource::Resource;
|
error::Error,
|
||||||
use crate::rmap::ResourceMap;
|
guard::Guard,
|
||||||
use crate::route::Route;
|
resource::Resource,
|
||||||
use crate::service::{
|
rmap::ResourceMap,
|
||||||
AppServiceFactory, HttpServiceFactory, ServiceFactoryWrapper, ServiceRequest,
|
route::Route,
|
||||||
ServiceResponse,
|
service::{
|
||||||
|
AppServiceFactory, BoxedHttpServiceFactory, HttpServiceFactory, ServiceFactoryWrapper,
|
||||||
|
ServiceRequest, ServiceResponse,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
type Guards = Vec<Box<dyn Guard>>;
|
type Guards = Vec<Box<dyn Guard>>;
|
||||||
type HttpNewService = boxed::BoxServiceFactory<(), ServiceRequest, ServiceResponse, Error, ()>;
|
|
||||||
|
|
||||||
/// Application configuration
|
/// Application configuration
|
||||||
pub struct AppService {
|
pub struct AppService {
|
||||||
config: AppConfig,
|
config: AppConfig,
|
||||||
root: bool,
|
root: bool,
|
||||||
default: Rc<HttpNewService>,
|
default: Rc<BoxedHttpServiceFactory>,
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
services: Vec<(
|
services: Vec<(
|
||||||
ResourceDef,
|
ResourceDef,
|
||||||
HttpNewService,
|
BoxedHttpServiceFactory,
|
||||||
Option<Guards>,
|
Option<Guards>,
|
||||||
Option<Rc<ResourceMap>>,
|
Option<Rc<ResourceMap>>,
|
||||||
)>,
|
)>,
|
||||||
@ -35,7 +34,7 @@ pub struct AppService {
|
|||||||
|
|
||||||
impl AppService {
|
impl AppService {
|
||||||
/// Crate server settings instance.
|
/// Crate server settings instance.
|
||||||
pub(crate) fn new(config: AppConfig, default: Rc<HttpNewService>) -> Self {
|
pub(crate) fn new(config: AppConfig, default: Rc<BoxedHttpServiceFactory>) -> Self {
|
||||||
AppService {
|
AppService {
|
||||||
config,
|
config,
|
||||||
default,
|
default,
|
||||||
@ -56,7 +55,7 @@ impl AppService {
|
|||||||
AppConfig,
|
AppConfig,
|
||||||
Vec<(
|
Vec<(
|
||||||
ResourceDef,
|
ResourceDef,
|
||||||
HttpNewService,
|
BoxedHttpServiceFactory,
|
||||||
Option<Guards>,
|
Option<Guards>,
|
||||||
Option<Rc<ResourceMap>>,
|
Option<Rc<ResourceMap>>,
|
||||||
)>,
|
)>,
|
||||||
@ -81,7 +80,7 @@ impl AppService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns default handler factory.
|
/// Returns default handler factory.
|
||||||
pub fn default_service(&self) -> Rc<HttpNewService> {
|
pub fn default_service(&self) -> Rc<BoxedHttpServiceFactory> {
|
||||||
self.default.clone()
|
self.default.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -154,6 +153,16 @@ impl AppConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Default for AppConfig {
|
impl Default for AppConfig {
|
||||||
|
/// Returns the default AppConfig.
|
||||||
|
/// Note: The included socket address is "127.0.0.1".
|
||||||
|
///
|
||||||
|
/// 127.0.0.1: non-routable meta address that denotes an unknown, invalid or non-applicable target.
|
||||||
|
/// If you need a service only accessed by itself, use a loopback address.
|
||||||
|
/// A loopback address for IPv4 is any loopback address that begins with "127".
|
||||||
|
/// Loopback addresses should be only used to test your application locally.
|
||||||
|
/// The default configuration provides a loopback address.
|
||||||
|
///
|
||||||
|
/// 0.0.0.0: if configured to use this special address, the application will listen to any IP address configured on the machine.
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
AppConfig::new(
|
AppConfig::new(
|
||||||
false,
|
false,
|
||||||
@ -187,6 +196,7 @@ pub struct ServiceConfig {
|
|||||||
pub(crate) services: Vec<Box<dyn AppServiceFactory>>,
|
pub(crate) services: Vec<Box<dyn AppServiceFactory>>,
|
||||||
pub(crate) external: Vec<ResourceDef>,
|
pub(crate) external: Vec<ResourceDef>,
|
||||||
pub(crate) app_data: Extensions,
|
pub(crate) app_data: Extensions,
|
||||||
|
pub(crate) default: Option<Rc<BoxedHttpServiceFactory>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ServiceConfig {
|
impl ServiceConfig {
|
||||||
@ -195,6 +205,7 @@ impl ServiceConfig {
|
|||||||
services: Vec::new(),
|
services: Vec::new(),
|
||||||
external: Vec::new(),
|
external: Vec::new(),
|
||||||
app_data: Extensions::new(),
|
app_data: Extensions::new(),
|
||||||
|
default: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -215,6 +226,29 @@ impl ServiceConfig {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Default service to be used if no matching resource could be found.
|
||||||
|
///
|
||||||
|
/// Counterpart to [`App::default_service()`](crate::App::default_service).
|
||||||
|
pub fn default_service<F, U>(&mut self, f: F) -> &mut Self
|
||||||
|
where
|
||||||
|
F: IntoServiceFactory<U, ServiceRequest>,
|
||||||
|
U: ServiceFactory<
|
||||||
|
ServiceRequest,
|
||||||
|
Config = (),
|
||||||
|
Response = ServiceResponse,
|
||||||
|
Error = Error,
|
||||||
|
> + 'static,
|
||||||
|
U::InitError: std::fmt::Debug,
|
||||||
|
{
|
||||||
|
let svc = f
|
||||||
|
.into_factory()
|
||||||
|
.map_init_err(|err| log::error!("Can not construct default service: {:?}", err));
|
||||||
|
|
||||||
|
self.default = Some(Rc::new(boxed::factory(svc)));
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Run external configuration as part of the application building process
|
/// Run external configuration as part of the application building process
|
||||||
///
|
///
|
||||||
/// Counterpart to [`App::configure()`](crate::App::configure) that allows for easy nesting.
|
/// Counterpart to [`App::configure()`](crate::App::configure) that allows for easy nesting.
|
||||||
@ -322,6 +356,38 @@ mod tests {
|
|||||||
assert_eq!(body, Bytes::from_static(b"https://youtube.com/watch/12345"));
|
assert_eq!(body, Bytes::from_static(b"https://youtube.com/watch/12345"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn registers_default_service() {
|
||||||
|
let srv = init_service(
|
||||||
|
App::new()
|
||||||
|
.configure(|cfg| {
|
||||||
|
cfg.default_service(
|
||||||
|
web::get().to(|| HttpResponse::NotFound().body("four oh four")),
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.service(web::scope("/scoped").configure(|cfg| {
|
||||||
|
cfg.default_service(
|
||||||
|
web::get().to(|| HttpResponse::NotFound().body("scoped four oh four")),
|
||||||
|
);
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// app registers default service
|
||||||
|
let req = TestRequest::with_uri("/path/i/did/not-configure").to_request();
|
||||||
|
let resp = call_service(&srv, req).await;
|
||||||
|
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||||
|
let body = read_body(resp).await;
|
||||||
|
assert_eq!(body, Bytes::from_static(b"four oh four"));
|
||||||
|
|
||||||
|
// scope registers default service
|
||||||
|
let req = TestRequest::with_uri("/scoped/path/i/did/not-configure").to_request();
|
||||||
|
let resp = call_service(&srv, req).await;
|
||||||
|
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||||
|
let body = read_body(resp).await;
|
||||||
|
assert_eq!(body, Bytes::from_static(b"scoped four oh four"));
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_service() {
|
async fn test_service() {
|
||||||
let srv = init_service(App::new().configure(|cfg| {
|
let srv = init_service(App::new().configure(|cfg| {
|
||||||
|
@ -5,10 +5,7 @@ use actix_utils::future::{err, ok, Ready};
|
|||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::{
|
use crate::{dev::Payload, error, Error, FromRequest, HttpRequest};
|
||||||
dev::Payload, error::ErrorInternalServerError, extract::FromRequest, request::HttpRequest,
|
|
||||||
Error,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Data factory.
|
/// Data factory.
|
||||||
pub(crate) trait DataFactory {
|
pub(crate) trait DataFactory {
|
||||||
@ -160,7 +157,7 @@ impl<T: ?Sized + 'static> FromRequest for Data<T> {
|
|||||||
req.match_name().unwrap_or_else(|| req.path())
|
req.match_name().unwrap_or_else(|| req.path())
|
||||||
);
|
);
|
||||||
|
|
||||||
err(ErrorInternalServerError(
|
err(error::ErrorInternalServerError(
|
||||||
"Requested application data is not configured correctly. \
|
"Requested application data is not configured correctly. \
|
||||||
View/enable debug logs for more details.",
|
View/enable debug logs for more details.",
|
||||||
))
|
))
|
||||||
|
@ -51,12 +51,6 @@ impl StdError for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<std::convert::Infallible> for Error {
|
|
||||||
fn from(val: std::convert::Infallible) -> Self {
|
|
||||||
match val {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `Error` for any error that implements `ResponseError`
|
/// `Error` for any error that implements `ResponseError`
|
||||||
impl<T: ResponseError + 'static> From<T> for Error {
|
impl<T: ResponseError + 'static> From<T> for Error {
|
||||||
fn from(err: T) -> Error {
|
fn from(err: T) -> Error {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
//! `ResponseError` trait and foreign impls.
|
//! `ResponseError` trait and foreign impls.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
convert::Infallible,
|
||||||
error::Error as StdError,
|
error::Error as StdError,
|
||||||
fmt,
|
fmt,
|
||||||
io::{self, Write as _},
|
io::{self, Write as _},
|
||||||
@ -54,6 +55,15 @@ downcast_dyn!(ResponseError);
|
|||||||
|
|
||||||
impl ResponseError for Box<dyn StdError + 'static> {}
|
impl ResponseError for Box<dyn StdError + 'static> {}
|
||||||
|
|
||||||
|
impl ResponseError for Infallible {
|
||||||
|
fn status_code(&self) -> StatusCode {
|
||||||
|
match *self {}
|
||||||
|
}
|
||||||
|
fn error_response(&self) -> HttpResponse<BoxBody> {
|
||||||
|
match *self {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
impl ResponseError for actix_tls::accept::openssl::reexports::Error {}
|
impl ResponseError for actix_tls::accept::openssl::reexports::Error {}
|
||||||
|
|
||||||
|
@ -18,9 +18,11 @@ use crate::{dev::Payload, Error, HttpRequest};
|
|||||||
/// A type that implements [`FromRequest`] is called an **extractor** and can extract data from
|
/// A type that implements [`FromRequest`] is called an **extractor** and can extract data from
|
||||||
/// the request. Some types that implement this trait are: [`Json`], [`Header`], and [`Path`].
|
/// the request. Some types that implement this trait are: [`Json`], [`Header`], and [`Path`].
|
||||||
///
|
///
|
||||||
|
/// Check out [`ServiceRequest::extract`](crate::dev::ServiceRequest::extract) if you want to
|
||||||
|
/// leverage extractors when implementing middlewares.
|
||||||
|
///
|
||||||
/// # Configuration
|
/// # Configuration
|
||||||
/// An extractor can be customized by injecting the corresponding configuration with one of:
|
/// An extractor can be customized by injecting the corresponding configuration with one of:
|
||||||
///
|
|
||||||
/// - [`App::app_data()`][crate::App::app_data]
|
/// - [`App::app_data()`][crate::App::app_data]
|
||||||
/// - [`Scope::app_data()`][crate::Scope::app_data]
|
/// - [`Scope::app_data()`][crate::Scope::app_data]
|
||||||
/// - [`Resource::app_data()`][crate::Resource::app_data]
|
/// - [`Resource::app_data()`][crate::Resource::app_data]
|
||||||
@ -64,13 +66,29 @@ pub trait FromRequest: Sized {
|
|||||||
/// The associated error which can be returned.
|
/// The associated error which can be returned.
|
||||||
type Error: Into<Error>;
|
type Error: Into<Error>;
|
||||||
|
|
||||||
/// Future that resolves to a Self.
|
/// Future that resolves to a `Self`.
|
||||||
|
///
|
||||||
|
/// To use an async function or block, the futures must be boxed. The following snippet will be
|
||||||
|
/// common when creating async/await extractors (that do not consume the body).
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
|
||||||
|
/// // or
|
||||||
|
/// type Future = futures_util::future::LocalBoxFuture<'static, Result<Self, Self::Error>>;
|
||||||
|
///
|
||||||
|
/// fn from_request(req: HttpRequest, ...) -> Self::Future {
|
||||||
|
/// let req = req.clone();
|
||||||
|
/// Box::pin(async move {
|
||||||
|
/// ...
|
||||||
|
/// })
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
type Future: Future<Output = Result<Self, Self::Error>>;
|
type Future: Future<Output = Result<Self, Self::Error>>;
|
||||||
|
|
||||||
/// Create a Self from request parts asynchronously.
|
/// Create a `Self` from request parts asynchronously.
|
||||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future;
|
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future;
|
||||||
|
|
||||||
/// Create a Self from request head asynchronously.
|
/// Create a `Self` from request head asynchronously.
|
||||||
///
|
///
|
||||||
/// This method is short for `T::from_request(req, &mut Payload::None)`.
|
/// This method is short for `T::from_request(req, &mut Payload::None)`.
|
||||||
fn extract(req: &HttpRequest) -> Self::Future {
|
fn extract(req: &HttpRequest) -> Self::Future {
|
||||||
@ -78,9 +96,9 @@ pub trait FromRequest: Sized {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Optionally extract a field from the request
|
/// Optionally extract from the request.
|
||||||
///
|
///
|
||||||
/// If the FromRequest for T fails, return None rather than returning an error response
|
/// If the inner `T::from_request` returns an error, handler will receive `None` instead.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
@ -165,9 +183,10 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Optionally extract a field from the request or extract the Error if unsuccessful
|
/// Extract from the request, passing error type through to handler.
|
||||||
///
|
///
|
||||||
/// If the `FromRequest` for T fails, inject Err into handler rather than returning an error response
|
/// If the inner `T::from_request` returns an error, allow handler to receive the error rather than
|
||||||
|
/// immediately returning an error response.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -254,7 +254,7 @@ impl Guard for AllGuard {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Wraps a guard and inverts the outcome of it's `Guard` implementation.
|
/// Wraps a guard and inverts the outcome of its `Guard` implementation.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// The handler below will be called for any request method apart from `GET`.
|
/// The handler below will be called for any request method apart from `GET`.
|
||||||
@ -459,7 +459,7 @@ impl Guard for HostGuard {
|
|||||||
return scheme == req_host_uri_scheme;
|
return scheme == req_host_uri_scheme;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: is the the correct behavior?
|
// TODO: is this the correct behavior?
|
||||||
// falls through if scheme cannot be determined
|
// falls through if scheme cannot be determined
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ use crate::{
|
|||||||
/// Thanks to Rust's type system, Actix Web can infer the function parameter types. During the
|
/// Thanks to Rust's type system, Actix Web can infer the function parameter types. During the
|
||||||
/// extraction step, the parameter types are described as a tuple type, [`from_request`] is run on
|
/// extraction step, the parameter types are described as a tuple type, [`from_request`] is run on
|
||||||
/// that tuple, and the `Handler::call` implementation for that particular function arity
|
/// that tuple, and the `Handler::call` implementation for that particular function arity
|
||||||
/// destructures the tuple into it's component types and calls your handler function with them.
|
/// destructures the tuple into its component types and calls your handler function with them.
|
||||||
///
|
///
|
||||||
/// In pseudo-code the process looks something like this:
|
/// In pseudo-code the process looks something like this:
|
||||||
/// ```ignore
|
/// ```ignore
|
||||||
|
@ -4,18 +4,19 @@
|
|||||||
//! ```no_run
|
//! ```no_run
|
||||||
//! use actix_web::{get, web, App, HttpServer, Responder};
|
//! use actix_web::{get, web, App, HttpServer, Responder};
|
||||||
//!
|
//!
|
||||||
//! #[get("/{id}/{name}/index.html")]
|
//! #[get("/hello/{name}")]
|
||||||
//! async fn index(path: web::Path<(u32, String)>) -> impl Responder {
|
//! async fn greet(name: web::Path<String>) -> impl Responder {
|
||||||
//! let (id, name) = path.into_inner();
|
//! format!("Hello {}!", name)
|
||||||
//! format!("Hello {}! id:{}", name, id)
|
|
||||||
//! }
|
//! }
|
||||||
//!
|
//!
|
||||||
//! #[actix_web::main]
|
//! #[actix_web::main] // or #[tokio::main]
|
||||||
//! async fn main() -> std::io::Result<()> {
|
//! async fn main() -> std::io::Result<()> {
|
||||||
//! HttpServer::new(|| App::new().service(index))
|
//! HttpServer::new(|| {
|
||||||
//! .bind("127.0.0.1:8080")?
|
//! App::new().service(greet)
|
||||||
//! .run()
|
//! })
|
||||||
//! .await
|
//! .bind(("127.0.0.1", 8080))?
|
||||||
|
//! .run()
|
||||||
|
//! .await
|
||||||
//! }
|
//! }
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
|
@ -11,3 +11,6 @@
|
|||||||
## Error Propagation
|
## Error Propagation
|
||||||
|
|
||||||
## When To (Not) Use Middleware
|
## When To (Not) Use Middleware
|
||||||
|
|
||||||
|
## Author's References
|
||||||
|
- `EitherBody` + when is middleware appropriate: https://discord.com/channels/771444961383153695/952016890723729428
|
||||||
|
@ -251,6 +251,8 @@ static SUPPORTED_ENCODINGS: Lazy<Vec<Encoding>> = Lazy::new(|| {
|
|||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{middleware::DefaultHeaders, test, web, App};
|
use crate::{middleware::DefaultHeaders, test, web, App};
|
||||||
|
|
||||||
@ -305,4 +307,27 @@ mod tests {
|
|||||||
let bytes = test::read_body(res).await;
|
let bytes = test::read_body(res).await;
|
||||||
assert_eq!(gzip_decode(bytes), DATA.as_bytes());
|
assert_eq!(gzip_decode(bytes), DATA.as_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn retains_previously_set_vary_header() {
|
||||||
|
let app = test::init_service({
|
||||||
|
App::new()
|
||||||
|
.wrap(Compress::default())
|
||||||
|
.default_service(web::to(move || {
|
||||||
|
HttpResponse::Ok()
|
||||||
|
.insert_header((header::VARY, "x-test"))
|
||||||
|
.finish()
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = test::TestRequest::default()
|
||||||
|
.insert_header((header::ACCEPT_ENCODING, "gzip"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
let vary_headers = res.headers().get_all(header::VARY).collect::<HashSet<_>>();
|
||||||
|
assert!(vary_headers.contains(&HeaderValue::from_static("x-test")));
|
||||||
|
assert!(vary_headers.contains(&HeaderValue::from_static("accept-encoding")));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -381,12 +381,16 @@ impl Drop for HttpRequest {
|
|||||||
inner.app_data.truncate(1);
|
inner.app_data.truncate(1);
|
||||||
|
|
||||||
// Inner is borrowed mut here and; get req data mutably to reduce borrow check. Also
|
// Inner is borrowed mut here and; get req data mutably to reduce borrow check. Also
|
||||||
// we know the req_data Rc will not have any cloned at this point to unwrap is okay.
|
// we know the req_data Rc will not have any clones at this point to unwrap is okay.
|
||||||
Rc::get_mut(&mut inner.extensions)
|
Rc::get_mut(&mut inner.extensions)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.get_mut()
|
.get_mut()
|
||||||
.clear();
|
.clear();
|
||||||
|
|
||||||
|
// We can't use the same trick as req data because the conn_data is held by the
|
||||||
|
// dispatcher, too.
|
||||||
|
inner.conn_data = None;
|
||||||
|
|
||||||
// a re-borrow of pool is necessary here.
|
// a re-borrow of pool is necessary here.
|
||||||
let req = Rc::clone(&self.inner);
|
let req = Rc::clone(&self.inner);
|
||||||
self.app_state().pool().push(req);
|
self.app_state().pool().push(req);
|
||||||
@ -761,10 +765,8 @@ mod tests {
|
|||||||
assert_eq!(body, Bytes::from_static(b"1"));
|
assert_eq!(body, Bytes::from_static(b"1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// allow deprecated App::data
|
|
||||||
#[allow(deprecated)]
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_extensions_dropped() {
|
async fn test_app_data_dropped() {
|
||||||
struct Tracker {
|
struct Tracker {
|
||||||
pub dropped: bool,
|
pub dropped: bool,
|
||||||
}
|
}
|
||||||
@ -780,7 +782,7 @@ mod tests {
|
|||||||
let tracker = Rc::new(RefCell::new(Tracker { dropped: false }));
|
let tracker = Rc::new(RefCell::new(Tracker { dropped: false }));
|
||||||
{
|
{
|
||||||
let tracker2 = Rc::clone(&tracker);
|
let tracker2 = Rc::clone(&tracker);
|
||||||
let srv = init_service(App::new().data(10u32).service(web::resource("/").to(
|
let srv = init_service(App::new().service(web::resource("/").to(
|
||||||
move |req: HttpRequest| {
|
move |req: HttpRequest| {
|
||||||
req.extensions_mut().insert(Foo {
|
req.extensions_mut().insert(Foo {
|
||||||
tracker: Rc::clone(&tracker2),
|
tracker: Rc::clone(&tracker2),
|
||||||
|
@ -343,7 +343,7 @@ mod response_fut_impl {
|
|||||||
|
|
||||||
// Future is only implemented for BoxBody payload type because it's the most useful for making
|
// Future is only implemented for BoxBody payload type because it's the most useful for making
|
||||||
// simple handlers without async blocks. Making it generic over all MessageBody types requires a
|
// simple handlers without async blocks. Making it generic over all MessageBody types requires a
|
||||||
// future impl on Response which would cause it's body field to be, undesirably, Option<B>.
|
// future impl on Response which would cause its body field to be, undesirably, Option<B>.
|
||||||
//
|
//
|
||||||
// This impl is not particularly efficient due to the Response construction and should probably
|
// This impl is not particularly efficient due to the Response construction and should probably
|
||||||
// not be invoked if performance is important. Prefer an async fn/block in such cases.
|
// not be invoked if performance is important. Prefer an async fn/block in such cases.
|
||||||
|
@ -1,15 +1,17 @@
|
|||||||
use std::{mem, rc::Rc};
|
use std::{mem, rc::Rc};
|
||||||
|
|
||||||
use actix_http::Method;
|
use actix_http::{body::MessageBody, Method};
|
||||||
use actix_service::{
|
use actix_service::{
|
||||||
|
apply,
|
||||||
boxed::{self, BoxService},
|
boxed::{self, BoxService},
|
||||||
fn_service, Service, ServiceFactory, ServiceFactoryExt,
|
fn_service, Service, ServiceFactory, ServiceFactoryExt, Transform,
|
||||||
};
|
};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
guard::{self, Guard},
|
guard::{self, Guard},
|
||||||
handler::{handler_service, Handler},
|
handler::{handler_service, Handler},
|
||||||
|
middleware::Compat,
|
||||||
service::{BoxedHttpServiceFactory, ServiceRequest, ServiceResponse},
|
service::{BoxedHttpServiceFactory, ServiceRequest, ServiceResponse},
|
||||||
Error, FromRequest, HttpResponse, Responder,
|
Error, FromRequest, HttpResponse, Responder,
|
||||||
};
|
};
|
||||||
@ -35,6 +37,31 @@ impl Route {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Registers a route middleware.
|
||||||
|
///
|
||||||
|
/// `mw` is a middleware component (type), that can modify the requests and responses handled by
|
||||||
|
/// this `Route`.
|
||||||
|
///
|
||||||
|
/// See [`App::wrap`](crate::App::wrap) for more details.
|
||||||
|
#[doc(alias = "middleware")]
|
||||||
|
#[doc(alias = "use")] // nodejs terminology
|
||||||
|
pub fn wrap<M, B>(self, mw: M) -> Route
|
||||||
|
where
|
||||||
|
M: Transform<
|
||||||
|
BoxService<ServiceRequest, ServiceResponse, Error>,
|
||||||
|
ServiceRequest,
|
||||||
|
Response = ServiceResponse<B>,
|
||||||
|
Error = Error,
|
||||||
|
InitError = (),
|
||||||
|
> + 'static,
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
{
|
||||||
|
Route {
|
||||||
|
service: boxed::factory(apply(Compat::new(mw), self.service)),
|
||||||
|
guards: self.guards,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn take_guards(&mut self) -> Vec<Box<dyn Guard>> {
|
pub(crate) fn take_guards(&mut self) -> Vec<Box<dyn Guard>> {
|
||||||
mem::take(Rc::get_mut(&mut self.guards).unwrap())
|
mem::take(Rc::get_mut(&mut self.guards).unwrap())
|
||||||
}
|
}
|
||||||
@ -246,11 +273,15 @@ mod tests {
|
|||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::dev::{always_ready, fn_factory, fn_service, Service};
|
use crate::{
|
||||||
use crate::http::{header, Method, StatusCode};
|
dev::{always_ready, fn_factory, fn_service, Service},
|
||||||
use crate::service::{ServiceRequest, ServiceResponse};
|
error,
|
||||||
use crate::test::{call_service, init_service, read_body, TestRequest};
|
http::{header, Method, StatusCode},
|
||||||
use crate::{error, web, App, HttpResponse};
|
middleware::{DefaultHeaders, Logger},
|
||||||
|
service::{ServiceRequest, ServiceResponse},
|
||||||
|
test::{call_service, init_service, read_body, TestRequest},
|
||||||
|
web, App, HttpResponse,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Serialize, PartialEq, Debug)]
|
#[derive(Serialize, PartialEq, Debug)]
|
||||||
struct MyObject {
|
struct MyObject {
|
||||||
@ -323,6 +354,44 @@ mod tests {
|
|||||||
assert_eq!(body, Bytes::from_static(b"{\"name\":\"test\"}"));
|
assert_eq!(body, Bytes::from_static(b"{\"name\":\"test\"}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn route_middleware() {
|
||||||
|
let srv = init_service(
|
||||||
|
App::new()
|
||||||
|
.route("/", web::get().to(HttpResponse::Ok).wrap(Logger::default()))
|
||||||
|
.service(
|
||||||
|
web::resource("/test")
|
||||||
|
.route(web::get().to(HttpResponse::Ok))
|
||||||
|
.route(
|
||||||
|
web::post()
|
||||||
|
.to(HttpResponse::Created)
|
||||||
|
.wrap(DefaultHeaders::new().add(("x-test", "x-posted"))),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
web::delete()
|
||||||
|
.to(HttpResponse::Accepted)
|
||||||
|
// logger changes body type, proving Compat is not needed
|
||||||
|
.wrap(Logger::default()),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = TestRequest::get().uri("/test").to_request();
|
||||||
|
let res = call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
assert!(!res.headers().contains_key("x-test"));
|
||||||
|
|
||||||
|
let req = TestRequest::post().uri("/test").to_request();
|
||||||
|
let res = call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::CREATED);
|
||||||
|
assert_eq!(res.headers().get("x-test").unwrap(), "x-posted");
|
||||||
|
|
||||||
|
let req = TestRequest::delete().uri("/test").to_request();
|
||||||
|
let res = call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::ACCEPTED);
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_service_handler() {
|
async fn test_service_handler() {
|
||||||
struct HelloWorld;
|
struct HelloWorld;
|
||||||
|
@ -198,6 +198,10 @@ where
|
|||||||
.get_or_insert_with(Extensions::new)
|
.get_or_insert_with(Extensions::new)
|
||||||
.extend(cfg.app_data);
|
.extend(cfg.app_data);
|
||||||
|
|
||||||
|
if let Some(default) = cfg.default {
|
||||||
|
self.default = Some(default);
|
||||||
|
}
|
||||||
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,6 +18,9 @@ use actix_tls::accept::openssl::reexports::{AlpnError, SslAcceptor, SslAcceptorB
|
|||||||
#[cfg(feature = "rustls")]
|
#[cfg(feature = "rustls")]
|
||||||
use actix_tls::accept::rustls::reexports::ServerConfig as RustlsServerConfig;
|
use actix_tls::accept::rustls::reexports::ServerConfig as RustlsServerConfig;
|
||||||
|
|
||||||
|
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||||
|
use actix_http::TlsAcceptorConfig;
|
||||||
|
|
||||||
use crate::{config::AppConfig, Error};
|
use crate::{config::AppConfig, Error};
|
||||||
|
|
||||||
struct Socket {
|
struct Socket {
|
||||||
@ -30,6 +33,8 @@ struct Config {
|
|||||||
keep_alive: KeepAlive,
|
keep_alive: KeepAlive,
|
||||||
client_request_timeout: Duration,
|
client_request_timeout: Duration,
|
||||||
client_disconnect_timeout: Duration,
|
client_disconnect_timeout: Duration,
|
||||||
|
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||||
|
tls_handshake_timeout: Option<Duration>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An HTTP Server.
|
/// An HTTP Server.
|
||||||
@ -92,6 +97,8 @@ where
|
|||||||
keep_alive: KeepAlive::default(),
|
keep_alive: KeepAlive::default(),
|
||||||
client_request_timeout: Duration::from_secs(5),
|
client_request_timeout: Duration::from_secs(5),
|
||||||
client_disconnect_timeout: Duration::from_secs(1),
|
client_disconnect_timeout: Duration::from_secs(1),
|
||||||
|
#[cfg(any(feature = "rustls", feature = "openssl"))]
|
||||||
|
tls_handshake_timeout: None,
|
||||||
})),
|
})),
|
||||||
backlog: 1024,
|
backlog: 1024,
|
||||||
sockets: Vec::new(),
|
sockets: Vec::new(),
|
||||||
@ -225,6 +232,24 @@ where
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set TLS handshake timeout.
|
||||||
|
///
|
||||||
|
/// Defines a timeout for TLS handshake. If the TLS handshake does not complete
|
||||||
|
/// within this time, the connection is closed.
|
||||||
|
///
|
||||||
|
/// By default handshake timeout is set to 3000 milliseconds.
|
||||||
|
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||||
|
#[cfg_attr(docsrs, doc(cfg(any(feature = "openssl", feature = "rustls"))))]
|
||||||
|
pub fn tls_handshake_timeout(self, dur: Duration) -> Self {
|
||||||
|
self.config
|
||||||
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.tls_handshake_timeout
|
||||||
|
.replace(dur);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[deprecated(since = "4.0.0", note = "Renamed to `client_disconnect_timeout`.")]
|
#[deprecated(since = "4.0.0", note = "Renamed to `client_disconnect_timeout`.")]
|
||||||
pub fn client_shutdown(self, dur: u64) -> Self {
|
pub fn client_shutdown(self, dur: u64) -> Self {
|
||||||
@ -367,9 +392,7 @@ where
|
|||||||
.local_addr(addr);
|
.local_addr(addr);
|
||||||
|
|
||||||
let svc = if let Some(handler) = on_connect_fn.clone() {
|
let svc = if let Some(handler) = on_connect_fn.clone() {
|
||||||
svc.on_connect_ext(move |io: &_, ext: _| {
|
svc.on_connect_ext(move |io: &_, ext: _| (handler)(io as &dyn Any, ext))
|
||||||
(&*handler)(io as &dyn Any, ext)
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
svc
|
svc
|
||||||
};
|
};
|
||||||
@ -378,10 +401,15 @@ where
|
|||||||
.into_factory()
|
.into_factory()
|
||||||
.map_err(|err| err.into().error_response());
|
.map_err(|err| err.into().error_response());
|
||||||
|
|
||||||
|
let acceptor_config = match c.tls_handshake_timeout {
|
||||||
|
Some(dur) => TlsAcceptorConfig::default().handshake_timeout(dur),
|
||||||
|
None => TlsAcceptorConfig::default(),
|
||||||
|
};
|
||||||
|
|
||||||
svc.finish(map_config(fac, move |_| {
|
svc.finish(map_config(fac, move |_| {
|
||||||
AppConfig::new(true, host.clone(), addr)
|
AppConfig::new(true, host.clone(), addr)
|
||||||
}))
|
}))
|
||||||
.openssl(acceptor.clone())
|
.openssl_with_config(acceptor.clone(), acceptor_config)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
@ -436,10 +464,15 @@ where
|
|||||||
.into_factory()
|
.into_factory()
|
||||||
.map_err(|err| err.into().error_response());
|
.map_err(|err| err.into().error_response());
|
||||||
|
|
||||||
|
let acceptor_config = match c.tls_handshake_timeout {
|
||||||
|
Some(dur) => TlsAcceptorConfig::default().handshake_timeout(dur),
|
||||||
|
None => TlsAcceptorConfig::default(),
|
||||||
|
};
|
||||||
|
|
||||||
svc.finish(map_config(fac, move |_| {
|
svc.finish(map_config(fac, move |_| {
|
||||||
AppConfig::new(true, host.clone(), addr)
|
AppConfig::new(true, host.clone(), addr)
|
||||||
}))
|
}))
|
||||||
.rustls(config.clone())
|
.rustls_with_config(config.clone(), acceptor_config)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(self)
|
Ok(self)
|
||||||
@ -555,7 +588,7 @@ where
|
|||||||
|
|
||||||
if let Some(handler) = on_connect_fn.clone() {
|
if let Some(handler) = on_connect_fn.clone() {
|
||||||
svc = svc
|
svc = svc
|
||||||
.on_connect_ext(move |io: &_, ext: _| (&*handler)(io as &dyn Any, ext));
|
.on_connect_ext(move |io: &_, ext: _| (handler)(io as &dyn Any, ext));
|
||||||
}
|
}
|
||||||
|
|
||||||
let fac = factory()
|
let fac = factory()
|
||||||
|
@ -24,7 +24,7 @@ use crate::{
|
|||||||
guard::{Guard, GuardContext},
|
guard::{Guard, GuardContext},
|
||||||
info::ConnectionInfo,
|
info::ConnectionInfo,
|
||||||
rmap::ResourceMap,
|
rmap::ResourceMap,
|
||||||
Error, HttpRequest, HttpResponse,
|
Error, FromRequest, HttpRequest, HttpResponse,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) type BoxedHttpService = BoxService<ServiceRequest, ServiceResponse<BoxBody>, Error>;
|
pub(crate) type BoxedHttpService = BoxService<ServiceRequest, ServiceResponse<BoxBody>, Error>;
|
||||||
@ -78,23 +78,60 @@ pub struct ServiceRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ServiceRequest {
|
impl ServiceRequest {
|
||||||
/// Construct service request
|
/// Construct `ServiceRequest` from parts.
|
||||||
pub(crate) fn new(req: HttpRequest, payload: Payload) -> Self {
|
pub(crate) fn new(req: HttpRequest, payload: Payload) -> Self {
|
||||||
Self { req, payload }
|
Self { req, payload }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deconstruct request into parts
|
/// Deconstruct `ServiceRequest` into inner parts.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn into_parts(self) -> (HttpRequest, Payload) {
|
pub fn into_parts(self) -> (HttpRequest, Payload) {
|
||||||
(self.req, self.payload)
|
(self.req, self.payload)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get mutable access to inner `HttpRequest` and `Payload`
|
/// Returns mutable accessors to inner parts.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn parts_mut(&mut self) -> (&mut HttpRequest, &mut Payload) {
|
pub fn parts_mut(&mut self) -> (&mut HttpRequest, &mut Payload) {
|
||||||
(&mut self.req, &mut self.payload)
|
(&mut self.req, &mut self.payload)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns immutable accessors to inner parts.
|
||||||
|
#[inline]
|
||||||
|
pub fn parts(&self) -> (&HttpRequest, &Payload) {
|
||||||
|
(&self.req, &self.payload)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns immutable accessor to inner [`HttpRequest`].
|
||||||
|
#[inline]
|
||||||
|
pub fn request(&self) -> &HttpRequest {
|
||||||
|
&self.req
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Derives a type from this request using an [extractor](crate::FromRequest).
|
||||||
|
///
|
||||||
|
/// Returns the `T` extractor's `Future` type which can be `await`ed. This is particularly handy
|
||||||
|
/// when you want to use an extractor in a middleware implementation.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
/// ```
|
||||||
|
/// use actix_web::{
|
||||||
|
/// dev::{ServiceRequest, ServiceResponse},
|
||||||
|
/// web::Path, Error
|
||||||
|
/// };
|
||||||
|
///
|
||||||
|
/// async fn my_helper(mut srv_req: ServiceRequest) -> Result<ServiceResponse, Error> {
|
||||||
|
/// let path = srv_req.extract::<Path<(String, u32)>>().await?;
|
||||||
|
/// // [...]
|
||||||
|
/// # todo!()
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
pub fn extract<T>(&mut self) -> <T as FromRequest>::Future
|
||||||
|
where
|
||||||
|
T: FromRequest,
|
||||||
|
{
|
||||||
|
T::from_request(&self.req, &mut self.payload)
|
||||||
|
}
|
||||||
|
|
||||||
/// Construct request from parts.
|
/// Construct request from parts.
|
||||||
pub fn from_parts(req: HttpRequest, payload: Payload) -> Self {
|
pub fn from_parts(req: HttpRequest, payload: Payload) -> Self {
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
@ -105,9 +142,7 @@ impl ServiceRequest {
|
|||||||
Self { req, payload }
|
Self { req, payload }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct request from request.
|
/// Construct `ServiceRequest` with no payload from given `HttpRequest`.
|
||||||
///
|
|
||||||
/// The returned `ServiceRequest` would have no payload.
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn from_request(req: HttpRequest) -> Self {
|
pub fn from_request(req: HttpRequest) -> Self {
|
||||||
ServiceRequest {
|
ServiceRequest {
|
||||||
@ -116,63 +151,63 @@ impl ServiceRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create service response
|
/// Create `ServiceResponse` from this request and given response.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn into_response<B, R: Into<Response<B>>>(self, res: R) -> ServiceResponse<B> {
|
pub fn into_response<B, R: Into<Response<B>>>(self, res: R) -> ServiceResponse<B> {
|
||||||
let res = HttpResponse::from(res.into());
|
let res = HttpResponse::from(res.into());
|
||||||
ServiceResponse::new(self.req, res)
|
ServiceResponse::new(self.req, res)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create service response for error
|
/// Create `ServiceResponse` from this request and given error.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn error_response<E: Into<Error>>(self, err: E) -> ServiceResponse {
|
pub fn error_response<E: Into<Error>>(self, err: E) -> ServiceResponse {
|
||||||
let res = HttpResponse::from_error(err.into());
|
let res = HttpResponse::from_error(err.into());
|
||||||
ServiceResponse::new(self.req, res)
|
ServiceResponse::new(self.req, res)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This method returns reference to the request head
|
/// Returns a reference to the request head.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn head(&self) -> &RequestHead {
|
pub fn head(&self) -> &RequestHead {
|
||||||
self.req.head()
|
self.req.head()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This method returns reference to the request head
|
/// Returns a mutable reference to the request head.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn head_mut(&mut self) -> &mut RequestHead {
|
pub fn head_mut(&mut self) -> &mut RequestHead {
|
||||||
self.req.head_mut()
|
self.req.head_mut()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Request's uri.
|
/// Returns the request URI.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn uri(&self) -> &Uri {
|
pub fn uri(&self) -> &Uri {
|
||||||
&self.head().uri
|
&self.head().uri
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the Request method.
|
/// Returns the request method.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn method(&self) -> &Method {
|
pub fn method(&self) -> &Method {
|
||||||
&self.head().method
|
&self.head().method
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the Request Version.
|
/// Returns the request version.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn version(&self) -> Version {
|
pub fn version(&self) -> Version {
|
||||||
self.head().version
|
self.head().version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to request headers.
|
||||||
#[inline]
|
#[inline]
|
||||||
/// Returns request's headers.
|
|
||||||
pub fn headers(&self) -> &HeaderMap {
|
pub fn headers(&self) -> &HeaderMap {
|
||||||
&self.head().headers
|
&self.head().headers
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a mutable reference to request headers.
|
||||||
#[inline]
|
#[inline]
|
||||||
/// Returns mutable request's headers.
|
|
||||||
pub fn headers_mut(&mut self) -> &mut HeaderMap {
|
pub fn headers_mut(&mut self) -> &mut HeaderMap {
|
||||||
&mut self.head_mut().headers
|
&mut self.head_mut().headers
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The target path of this Request.
|
/// Returns request path.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn path(&self) -> &str {
|
pub fn path(&self) -> &str {
|
||||||
self.head().uri.path()
|
self.head().uri.path()
|
||||||
@ -184,7 +219,7 @@ impl ServiceRequest {
|
|||||||
self.req.query_string()
|
self.req.query_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Peer socket address.
|
/// Returns peer's socket address.
|
||||||
///
|
///
|
||||||
/// Peer address is the directly connected peer's socket address. If a proxy is used in front of
|
/// Peer address is the directly connected peer's socket address. If a proxy is used in front of
|
||||||
/// the Actix Web server, then it would be address of this proxy.
|
/// the Actix Web server, then it would be address of this proxy.
|
||||||
@ -197,24 +232,23 @@ impl ServiceRequest {
|
|||||||
self.head().peer_addr
|
self.head().peer_addr
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get *ConnectionInfo* for the current request.
|
/// Returns a reference to connection info.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn connection_info(&self) -> Ref<'_, ConnectionInfo> {
|
pub fn connection_info(&self) -> Ref<'_, ConnectionInfo> {
|
||||||
self.req.connection_info()
|
self.req.connection_info()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a reference to the Path parameters.
|
/// Returns reference to the Path parameters.
|
||||||
///
|
///
|
||||||
/// Params is a container for URL parameters.
|
/// Params is a container for URL parameters. A variable segment is specified in the form
|
||||||
/// A variable segment is specified in the form `{identifier}`,
|
/// `{identifier}`, where the identifier can be used later in a request handler to access the
|
||||||
/// where the identifier can be used later in a request handler to
|
/// matched value for that segment.
|
||||||
/// access the matched value for that segment.
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn match_info(&self) -> &Path<Url> {
|
pub fn match_info(&self) -> &Path<Url> {
|
||||||
self.req.match_info()
|
self.req.match_info()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a mutable reference to the Path parameters.
|
/// Returns a mutable reference to the path match information.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn match_info_mut(&mut self) -> &mut Path<Url> {
|
pub fn match_info_mut(&mut self) -> &mut Path<Url> {
|
||||||
self.req.match_info_mut()
|
self.req.match_info_mut()
|
||||||
@ -232,13 +266,13 @@ impl ServiceRequest {
|
|||||||
self.req.match_pattern()
|
self.req.match_pattern()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a reference to a `ResourceMap` of current application.
|
/// Returns a reference to the application's resource map.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn resource_map(&self) -> &ResourceMap {
|
pub fn resource_map(&self) -> &ResourceMap {
|
||||||
self.req.resource_map()
|
self.req.resource_map()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Service configuration
|
/// Returns a reference to the application's configuration.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn app_config(&self) -> &AppConfig {
|
pub fn app_config(&self) -> &AppConfig {
|
||||||
self.req.app_config()
|
self.req.app_config()
|
||||||
@ -262,6 +296,7 @@ impl ServiceRequest {
|
|||||||
self.req.conn_data()
|
self.req.conn_data()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return request cookies.
|
||||||
#[cfg(feature = "cookies")]
|
#[cfg(feature = "cookies")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn cookies(&self) -> Result<Ref<'_, Vec<Cookie<'static>>>, CookieParseError> {
|
pub fn cookies(&self) -> Result<Ref<'_, Vec<Cookie<'static>>>, CookieParseError> {
|
||||||
|
@ -33,7 +33,7 @@ use crate::cookie::{Cookie, CookieJar};
|
|||||||
/// use actix_web::{test, HttpRequest, HttpResponse, HttpMessage};
|
/// use actix_web::{test, HttpRequest, HttpResponse, HttpMessage};
|
||||||
/// use actix_web::http::{header, StatusCode};
|
/// use actix_web::http::{header, StatusCode};
|
||||||
///
|
///
|
||||||
/// async fn index(req: HttpRequest) -> HttpResponse {
|
/// async fn handler(req: HttpRequest) -> HttpResponse {
|
||||||
/// if let Some(hdr) = req.headers().get(header::CONTENT_TYPE) {
|
/// if let Some(hdr) = req.headers().get(header::CONTENT_TYPE) {
|
||||||
/// HttpResponse::Ok().into()
|
/// HttpResponse::Ok().into()
|
||||||
/// } else {
|
/// } else {
|
||||||
@ -45,14 +45,15 @@ use crate::cookie::{Cookie, CookieJar};
|
|||||||
/// # // force rustdoc to display the correct thing and also compile check the test
|
/// # // force rustdoc to display the correct thing and also compile check the test
|
||||||
/// # async fn _test() {}
|
/// # async fn _test() {}
|
||||||
/// async fn test_index() {
|
/// async fn test_index() {
|
||||||
/// let req = test::TestRequest::default().insert_header(header::ContentType::plaintext())
|
/// let req = test::TestRequest::default()
|
||||||
|
/// .insert_header(header::ContentType::plaintext())
|
||||||
/// .to_http_request();
|
/// .to_http_request();
|
||||||
///
|
///
|
||||||
/// let resp = index(req).await;
|
/// let resp = handler(req).await;
|
||||||
/// assert_eq!(resp.status(), StatusCode::OK);
|
/// assert_eq!(resp.status(), StatusCode::OK);
|
||||||
///
|
///
|
||||||
/// let req = test::TestRequest::default().to_http_request();
|
/// let req = test::TestRequest::default().to_http_request();
|
||||||
/// let resp = index(req).await;
|
/// let resp = handler(req).await;
|
||||||
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -49,7 +49,7 @@ use crate::{
|
|||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// # Responder
|
/// # Responder
|
||||||
/// It may be desireable to use a concrete type for a response with multiple branches. As long as
|
/// It may be desirable to use a concrete type for a response with multiple branches. As long as
|
||||||
/// both types implement `Responder`, so will the `Either` type, enabling it to be used as a
|
/// both types implement `Responder`, so will the `Either` type, enabling it to be used as a
|
||||||
/// handler's return type.
|
/// handler's return type.
|
||||||
///
|
///
|
||||||
|
@ -183,6 +183,7 @@ mod tests {
|
|||||||
assert!(Path::<MyStruct>::from_request(&req, &mut pl).await.is_err());
|
assert!(Path::<MyStruct>::from_request(&req, &mut pl).await.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::let_unit_value)]
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_tuple_extract() {
|
async fn test_tuple_extract() {
|
||||||
let resource = ResourceDef::new("/{key}/{value}/");
|
let resource = ResourceDef::new("/{key}/{value}/");
|
||||||
|
@ -113,7 +113,7 @@ pub struct BytesExtractFut {
|
|||||||
body_fut: HttpMessageBody,
|
body_fut: HttpMessageBody,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Future for BytesExtractFut {
|
impl Future for BytesExtractFut {
|
||||||
type Output = Result<Bytes, Error>;
|
type Output = Result<Bytes, Error>;
|
||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
@ -167,7 +167,7 @@ pub struct StringExtractFut {
|
|||||||
encoding: &'static Encoding,
|
encoding: &'static Encoding,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Future for StringExtractFut {
|
impl Future for StringExtractFut {
|
||||||
type Output = Result<String, Error>;
|
type Output = Result<String, Error>;
|
||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
|
@ -20,7 +20,7 @@ use crate::{
|
|||||||
/// Stream that reads request line by line.
|
/// Stream that reads request line by line.
|
||||||
pub struct Readlines<T: HttpMessage> {
|
pub struct Readlines<T: HttpMessage> {
|
||||||
stream: Payload<T::Stream>,
|
stream: Payload<T::Stream>,
|
||||||
buff: BytesMut,
|
buf: BytesMut,
|
||||||
limit: usize,
|
limit: usize,
|
||||||
checked_buff: bool,
|
checked_buff: bool,
|
||||||
encoding: &'static Encoding,
|
encoding: &'static Encoding,
|
||||||
@ -41,7 +41,7 @@ where
|
|||||||
|
|
||||||
Readlines {
|
Readlines {
|
||||||
stream: req.take_payload(),
|
stream: req.take_payload(),
|
||||||
buff: BytesMut::with_capacity(262_144),
|
buf: BytesMut::with_capacity(262_144),
|
||||||
limit: 262_144,
|
limit: 262_144,
|
||||||
checked_buff: true,
|
checked_buff: true,
|
||||||
err: None,
|
err: None,
|
||||||
@ -58,7 +58,7 @@ where
|
|||||||
fn err(err: ReadlinesError) -> Self {
|
fn err(err: ReadlinesError) -> Self {
|
||||||
Readlines {
|
Readlines {
|
||||||
stream: Payload::None,
|
stream: Payload::None,
|
||||||
buff: BytesMut::new(),
|
buf: BytesMut::new(),
|
||||||
limit: 262_144,
|
limit: 262_144,
|
||||||
checked_buff: true,
|
checked_buff: true,
|
||||||
encoding: UTF_8,
|
encoding: UTF_8,
|
||||||
@ -84,7 +84,7 @@ where
|
|||||||
// check if there is a newline in the buffer
|
// check if there is a newline in the buffer
|
||||||
if !this.checked_buff {
|
if !this.checked_buff {
|
||||||
let mut found: Option<usize> = None;
|
let mut found: Option<usize> = None;
|
||||||
for (ind, b) in this.buff.iter().enumerate() {
|
for (ind, b) in this.buf.iter().enumerate() {
|
||||||
if *b == b'\n' {
|
if *b == b'\n' {
|
||||||
found = Some(ind);
|
found = Some(ind);
|
||||||
break;
|
break;
|
||||||
@ -96,13 +96,13 @@ where
|
|||||||
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
||||||
}
|
}
|
||||||
let line = if this.encoding == UTF_8 {
|
let line = if this.encoding == UTF_8 {
|
||||||
str::from_utf8(&this.buff.split_to(ind + 1))
|
str::from_utf8(&this.buf.split_to(ind + 1))
|
||||||
.map_err(|_| ReadlinesError::EncodingError)?
|
.map_err(|_| ReadlinesError::EncodingError)?
|
||||||
.to_owned()
|
.to_owned()
|
||||||
} else {
|
} else {
|
||||||
this.encoding
|
this.encoding
|
||||||
.decode_without_bom_handling_and_without_replacement(
|
.decode_without_bom_handling_and_without_replacement(
|
||||||
&this.buff.split_to(ind + 1),
|
&this.buf.split_to(ind + 1),
|
||||||
)
|
)
|
||||||
.map(Cow::into_owned)
|
.map(Cow::into_owned)
|
||||||
.ok_or(ReadlinesError::EncodingError)?
|
.ok_or(ReadlinesError::EncodingError)?
|
||||||
@ -141,32 +141,32 @@ where
|
|||||||
.ok_or(ReadlinesError::EncodingError)?
|
.ok_or(ReadlinesError::EncodingError)?
|
||||||
};
|
};
|
||||||
// extend buffer with rest of the bytes;
|
// extend buffer with rest of the bytes;
|
||||||
this.buff.extend_from_slice(&bytes);
|
this.buf.extend_from_slice(&bytes);
|
||||||
this.checked_buff = false;
|
this.checked_buff = false;
|
||||||
return Poll::Ready(Some(Ok(line)));
|
return Poll::Ready(Some(Ok(line)));
|
||||||
}
|
}
|
||||||
this.buff.extend_from_slice(&bytes);
|
this.buf.extend_from_slice(&bytes);
|
||||||
Poll::Pending
|
Poll::Pending
|
||||||
}
|
}
|
||||||
|
|
||||||
None => {
|
None => {
|
||||||
if this.buff.is_empty() {
|
if this.buf.is_empty() {
|
||||||
return Poll::Ready(None);
|
return Poll::Ready(None);
|
||||||
}
|
}
|
||||||
if this.buff.len() > this.limit {
|
if this.buf.len() > this.limit {
|
||||||
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
||||||
}
|
}
|
||||||
let line = if this.encoding == UTF_8 {
|
let line = if this.encoding == UTF_8 {
|
||||||
str::from_utf8(&this.buff)
|
str::from_utf8(&this.buf)
|
||||||
.map_err(|_| ReadlinesError::EncodingError)?
|
.map_err(|_| ReadlinesError::EncodingError)?
|
||||||
.to_owned()
|
.to_owned()
|
||||||
} else {
|
} else {
|
||||||
this.encoding
|
this.encoding
|
||||||
.decode_without_bom_handling_and_without_replacement(&this.buff)
|
.decode_without_bom_handling_and_without_replacement(&this.buf)
|
||||||
.map(Cow::into_owned)
|
.map(Cow::into_owned)
|
||||||
.ok_or(ReadlinesError::EncodingError)?
|
.ok_or(ReadlinesError::EncodingError)?
|
||||||
};
|
};
|
||||||
this.buff.clear();
|
this.buf.clear();
|
||||||
Poll::Ready(Some(Ok(line)))
|
Poll::Ready(Some(Ok(line)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user