mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-04 18:06:23 +02:00
Compare commits
77 Commits
files-v0.6
...
improve-ty
Author | SHA1 | Date | |
---|---|---|---|
30da2e5618 | |||
43a0e8102f | |||
183fbdfd74 | |||
57ee49a618 | |||
075d871e63 | |||
c4b20df56a | |||
a75212695a | |||
2f9c97461a | |||
0df275c478 | |||
a0eb0d22be | |||
303843dcda | |||
75b026b740 | |||
697238fadc | |||
e045418038 | |||
a978b417f3 | |||
fa82b698b7 | |||
fc4cdf81eb | |||
654dc64a09 | |||
cf54388534 | |||
39243095b5 | |||
89c6d62656 | |||
52bbbd1d73 | |||
3e6e9779dc | |||
9bdd334bb4 | |||
bcbbc115aa | |||
ab5eb7c1aa | |||
18b8ef0765 | |||
b806b4773c | |||
0062d99b6f | |||
99e6a9c26d | |||
5f5bd2184e | |||
88e074879d | |||
e7987e7429 | |||
a172f5968d | |||
a2a42ec152 | |||
dd347e0bd0 | |||
194a691537 | |||
56ee97f722 | |||
66620a1012 | |||
e33618ed6d | |||
1fe309bcc6 | |||
168a7284d3 | |||
68a3acb9c2 | |||
84c6d25fd3 | |||
0a135c7dc9 | |||
668a33c793 | |||
d8cbb879dd | |||
13cf5a9e44 | |||
4df1cd78b7 | |||
e8a0e16863 | |||
a2f59c02f7 | |||
2754608f3c | |||
c020cedb63 | |||
5e554dca35 | |||
6ec2d7b909 | |||
ec6d284a8e | |||
be9530eb72 | |||
855e260fdb | |||
d13854505f | |||
d40b6748bc | |||
c79b9a0df3 | |||
4af414064b | |||
9abe166d52 | |||
c09ec6af4c | |||
37f2bf5625 | |||
4f6f0b0137 | |||
591abc37c3 | |||
ad22cc4e7f | |||
efdf3ab1c3 | |||
6b3ea4fc61 | |||
99985fc4ec | |||
a6707fb7ee | |||
a3806cde19 | |||
efefa0d0ce | |||
450ff5fa1d | |||
8ae278cb68 | |||
46699e3429 |
@ -1,9 +1,12 @@
|
||||
[alias]
|
||||
chk = "check --workspace --all-features --tests --examples --bins"
|
||||
lint = "clippy --workspace --all-features --tests --examples --bins"
|
||||
ci-min = "hack check --workspace --no-default-features"
|
||||
ci-min-test = "hack check --workspace --no-default-features --tests --examples"
|
||||
ci-default = "check --workspace --bins --tests --examples"
|
||||
ci-full = "check --workspace --all-features --bins --tests --examples"
|
||||
ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture"
|
||||
lint = "clippy --workspace --tests --examples --bins -- -Dclippy::todo"
|
||||
lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dclippy::todo"
|
||||
|
||||
# lib checking
|
||||
ci-check-min = "hack --workspace check --no-default-features"
|
||||
ci-check-default = "hack --workspace check"
|
||||
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,io-uring check"
|
||||
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
|
||||
|
||||
# testing
|
||||
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
||||
|
135
.github/workflows/ci.yml
vendored
135
.github/workflows/ci.yml
vendored
@ -14,9 +14,9 @@ jobs:
|
||||
target:
|
||||
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
|
||||
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
||||
version:
|
||||
- 1.51.0 # MSRV
|
||||
- 1.52.0 # MSRV
|
||||
- stable
|
||||
- nightly
|
||||
|
||||
@ -32,6 +32,8 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# install OpenSSL on Windows
|
||||
# TODO: GitHub actions docs state that OpenSSL is
|
||||
# already installed on these Windows machines somewhere
|
||||
- name: Set vcpkg root
|
||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
||||
run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
@ -48,8 +50,7 @@ jobs:
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: generate-lockfile
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
@ -61,53 +62,105 @@ jobs:
|
||||
|
||||
- name: check minimal
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-min }
|
||||
|
||||
- name: check minimal + tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-min-test }
|
||||
with: { command: ci-check-min }
|
||||
|
||||
- name: check default
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-default }
|
||||
|
||||
- name: check full
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-full }
|
||||
with: { command: ci-check-default }
|
||||
|
||||
- name: tests
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with:
|
||||
command: ci-test
|
||||
args: --skip=test_reading_deflate_encoding_large_random_rustls
|
||||
|
||||
- name: Generate coverage file
|
||||
if: >
|
||||
matrix.target.os == 'ubuntu-latest'
|
||||
&& matrix.version == 'stable'
|
||||
&& github.ref == 'refs/heads/master'
|
||||
timeout-minutes: 60
|
||||
run: |
|
||||
cargo install cargo-tarpaulin --vers "^0.13"
|
||||
cargo tarpaulin --out Xml --verbose
|
||||
- name: Upload to Codecov
|
||||
if: >
|
||||
matrix.target.os == 'ubuntu-latest'
|
||||
&& matrix.version == 'stable'
|
||||
&& github.ref == 'refs/heads/master'
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
file: cobertura.xml
|
||||
cargo test --lib --tests -p=actix-router --all-features
|
||||
cargo test --lib --tests -p=actix-http --all-features
|
||||
cargo test --lib --tests -p=actix-web --features=rustls,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
|
||||
cargo test --lib --tests -p=actix-web-codegen --all-features
|
||||
cargo test --lib --tests -p=awc --all-features
|
||||
cargo test --lib --tests -p=actix-http-test --all-features
|
||||
cargo test --lib --tests -p=actix-test --all-features
|
||||
cargo test --lib --tests -p=actix-files
|
||||
cargo test --lib --tests -p=actix-multipart --all-features
|
||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
||||
|
||||
- name: tests (io-uring)
|
||||
if: matrix.target.os == 'ubuntu-latest'
|
||||
timeout-minutes: 60
|
||||
run: >
|
||||
sudo bash -c "ulimit -Sl 512
|
||||
&& ulimit -Hl 512
|
||||
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
||||
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
|
||||
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
|
||||
rustdoc:
|
||||
name: rustdoc
|
||||
ci_feature_powerset_check:
|
||||
name: Verify Feature Combinations
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: check feature combinations
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-check-all-feature-powerset }
|
||||
|
||||
- name: check feature combinations
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-check-all-feature-powerset-linux }
|
||||
|
||||
coverage:
|
||||
name: coverage
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Generate coverage file
|
||||
if: github.ref == 'refs/heads/master'
|
||||
run: |
|
||||
cargo install cargo-tarpaulin --vers "^0.13"
|
||||
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
|
||||
- name: Upload to Codecov
|
||||
if: github.ref == 'refs/heads/master'
|
||||
uses: codecov/codecov-action@v1
|
||||
with: { file: cobertura.xml }
|
||||
|
||||
rustdoc:
|
||||
name: doc tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
@ -124,13 +177,7 @@ jobs:
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: doc tests
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
timeout-minutes: 60
|
||||
with: { command: ci-doctest }
|
||||
|
68
CHANGES.md
68
CHANGES.md
@ -1,6 +1,74 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
### Added
|
||||
* Methods on `AcceptLanguage`: `ranked` and `preference`. [#2480]
|
||||
|
||||
### Changed
|
||||
* Rename `Accept::{mime_precedence => ranked}`. [#2480]
|
||||
* Rename `Accept::{mime_preference => preference}`. [#2480]
|
||||
|
||||
### Fixed
|
||||
* Accept wildcard `*` items in `AcceptLanguage`. [#2480]
|
||||
|
||||
[#2480]: https://github.com/actix/actix-web/pull/2480
|
||||
|
||||
|
||||
## 4.0.0-beta.13 - 2021-11-30
|
||||
### Changed
|
||||
* Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
||||
|
||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
||||
|
||||
|
||||
## 4.0.0-beta.12 - 2021-11-22
|
||||
### Changed
|
||||
* Compress middleware's response type is now `AnyBody<Encoder<B>>`. [#2448]
|
||||
|
||||
### Fixed
|
||||
* Relax `Unpin` bound on `S` (stream) parameter of `HttpResponseBuilder::streaming`. [#2448]
|
||||
|
||||
### Removed
|
||||
* `dev::ResponseBody` re-export; is function is replaced by the new `dev::AnyBody` enum. [#2446]
|
||||
|
||||
[#2446]: https://github.com/actix/actix-web/pull/2446
|
||||
[#2448]: https://github.com/actix/actix-web/pull/2448
|
||||
|
||||
|
||||
## 4.0.0-beta.11 - 2021-11-15
|
||||
### Added
|
||||
* Re-export `dev::ServerHandle` from `actix-server`. [#2442]
|
||||
|
||||
### Changed
|
||||
* `ContentType::html` now produces `text/html; charset=utf-8` instead of `text/html`. [#2423]
|
||||
* Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
||||
|
||||
[#2423]: https://github.com/actix/actix-web/pull/2423
|
||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||
|
||||
|
||||
## 4.0.0-beta.10 - 2021-10-20
|
||||
### Added
|
||||
* Option to allow `Json` extractor to work without a `Content-Type` header present. [#2362]
|
||||
* `#[actix_web::test]` macro for setting up tests with a runtime. [#2409]
|
||||
|
||||
### Changed
|
||||
* Associated type `FromRequest::Config` was removed. [#2233]
|
||||
* Inner field made private on `web::Payload`. [#2384]
|
||||
* `Data::into_inner` and `Data::get_ref` no longer requires `T: Sized`. [#2403]
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
### Removed
|
||||
* Useless `ServiceResponse::checked_expr` method. [#2401]
|
||||
|
||||
[#2233]: https://github.com/actix/actix-web/pull/2233
|
||||
[#2362]: https://github.com/actix/actix-web/pull/2362
|
||||
[#2384]: https://github.com/actix/actix-web/pull/2384
|
||||
[#2401]: https://github.com/actix/actix-web/pull/2401
|
||||
[#2403]: https://github.com/actix/actix-web/pull/2403
|
||||
[#2409]: https://github.com/actix/actix-web/pull/2409
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 4.0.0-beta.9 - 2021-09-09
|
||||
|
53
Cargo.toml
53
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web"
|
||||
version = "4.0.0-beta.9"
|
||||
version = "4.0.0-beta.13"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
@ -11,13 +11,14 @@ categories = [
|
||||
"web-programming::websocket"
|
||||
]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lib]
|
||||
name = "actix_web"
|
||||
@ -37,8 +38,6 @@ members = [
|
||||
"actix-test",
|
||||
"actix-router",
|
||||
]
|
||||
# enable when MSRV is 1.51+
|
||||
# resolver = "2"
|
||||
|
||||
[features]
|
||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
@ -62,22 +61,25 @@ openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
||||
# rustls
|
||||
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
# io-uring feature only avaiable for Linux OSes.
|
||||
experimental-io-uring = ["actix-server/io-uring"]
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-macros = "0.2.1"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-codec = "0.4.1"
|
||||
actix-macros = "0.2.3"
|
||||
actix-rt = "2.3"
|
||||
actix-server = "2.0.0-beta.9"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true }
|
||||
actix-tls = { version = "3.0.0-rc.1", default-features = false, optional = true }
|
||||
|
||||
actix-web-codegen = "0.5.0-beta.4"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-http = "3.0.0-beta.14"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
actix-web-codegen = "0.5.0-beta.5"
|
||||
|
||||
ahash = "0.7"
|
||||
bytes = "1"
|
||||
@ -94,29 +96,31 @@ once_cell = "1.5"
|
||||
log = "0.4"
|
||||
mime = "0.3"
|
||||
paste = "1"
|
||||
pin-project = "1.0.0"
|
||||
pin-project-lite = "0.2.7"
|
||||
regex = "1.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
smallvec = "1.6.1"
|
||||
socket2 = "0.4.0"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
time = { version = "0.3", default-features = false, features = ["formatting"] }
|
||||
url = "2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.8", features = ["openssl"] }
|
||||
actix-test = { version = "0.1.0-beta.7", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.11", features = ["openssl"] }
|
||||
|
||||
brotli2 = "0.3.2"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
env_logger = "0.8"
|
||||
env_logger = "0.9"
|
||||
flate2 = "1.0.13"
|
||||
zstd = "0.7"
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
rand = "0.8"
|
||||
rcgen = "0.8"
|
||||
rustls-pemfile = "0.2"
|
||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||
tls-rustls = { package = "rustls", version = "0.19.0" }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
||||
zstd = "0.9"
|
||||
|
||||
[profile.dev]
|
||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||
@ -139,6 +143,15 @@ actix-web-actors = { path = "actix-web-actors" }
|
||||
actix-web-codegen = { path = "actix-web-codegen" }
|
||||
awc = { path = "awc" }
|
||||
|
||||
# uncomment for quick testing against local actix-net repo
|
||||
# actix-service = { path = "../actix-net/actix-service" }
|
||||
# actix-macros = { path = "../actix-net/actix-macros" }
|
||||
# actix-rt = { path = "../actix-net/actix-rt" }
|
||||
# actix-codec = { path = "../actix-net/actix-codec" }
|
||||
# actix-utils = { path = "../actix-net/actix-utils" }
|
||||
# actix-tls = { path = "../actix-net/actix-tls" }
|
||||
# actix-server = { path = "../actix-net/actix-server" }
|
||||
|
||||
[[test]]
|
||||
name = "test_server"
|
||||
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
@ -11,6 +11,8 @@
|
||||
|
||||
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
|
||||
|
||||
* The `type Config` of `FromRequest` was removed.
|
||||
|
||||
* Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
|
||||
By default all compression algorithms are enabled.
|
||||
To select algorithm you want to include with `middleware::Compress` use following flags:
|
||||
|
@ -6,10 +6,10 @@
|
||||
<p>
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.9)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.13)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.9)
|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.13)
|
||||
<br />
|
||||
[](https://github.com/actix/actix-web/actions)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||
@ -32,7 +32,7 @@
|
||||
* SSL support using OpenSSL or Rustls
|
||||
* Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
||||
* Includes an async [HTTP client](https://docs.rs/awc/)
|
||||
* Runs on stable Rust 1.51+
|
||||
* Runs on stable Rust 1.52+
|
||||
|
||||
## Documentation
|
||||
|
||||
|
@ -3,6 +3,22 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.6.0-beta.9 - 2021-11-22
|
||||
* Add crate feature `experimental-io-uring`, enabling async file I/O to be utilized. This feature is only available on Linux OSes with recent kernel versions. This feature is semver-exempt. [#2408]
|
||||
* Add `NamedFile::open_async`. [#2408]
|
||||
* Fix 304 Not Modified responses to omit the Content-Length header, as per the spec. [#2453]
|
||||
* The `Responder` impl for `NamedFile` now has a boxed future associated type. [#2408]
|
||||
* The `Service` impl for `NamedFileService` now has a boxed future associated type. [#2408]
|
||||
* Add `impl Clone` for `FilesService`. [#2408]
|
||||
|
||||
[#2408]: https://github.com/actix/actix-web/pull/2408
|
||||
[#2453]: https://github.com/actix/actix-web/pull/2453
|
||||
|
||||
|
||||
## 0.6.0-beta.8 - 2021-10-20
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 0.6.0-beta.7 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
@ -1,7 +1,11 @@
|
||||
[package]
|
||||
name = "actix-files"
|
||||
version = "0.6.0-beta.7"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
version = "0.6.0-beta.9"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"fakeshadow <24548779@qq.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
description = "Static file serving for Actix Web"
|
||||
keywords = ["actix", "http", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
@ -14,11 +18,14 @@ edition = "2018"
|
||||
name = "actix_files"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[features]
|
||||
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = { version = "4.0.0-beta.11", default-features = false }
|
||||
actix-http = "3.0.0-beta.14"
|
||||
actix-service = "2"
|
||||
actix-utils = "3"
|
||||
|
||||
askama_escape = "0.10"
|
||||
bitflags = "1"
|
||||
@ -30,8 +37,11 @@ log = "0.4"
|
||||
mime = "0.3"
|
||||
mime_guess = "2.0.1"
|
||||
percent-encoding = "2.1"
|
||||
pin-project-lite = "0.2.7"
|
||||
|
||||
tokio-uring = { version = "0.1", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-web = "4.0.0-beta.9"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
actix-web = "4.0.0-beta.11"
|
||||
actix-test = "0.1.0-beta.7"
|
||||
|
@ -3,11 +3,11 @@
|
||||
> Static file serving for Actix Web
|
||||
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.9)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.7)
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.9)
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
@ -15,4 +15,4 @@
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-files/)
|
||||
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
|
||||
- Minimum supported Rust version: 1.51 or later
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -1,98 +1,278 @@
|
||||
use std::{
|
||||
cmp, fmt,
|
||||
fs::File,
|
||||
future::Future,
|
||||
io::{self, Read, Seek},
|
||||
io,
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use actix_web::{
|
||||
error::{BlockingError, Error},
|
||||
rt::task::{spawn_blocking, JoinHandle},
|
||||
};
|
||||
use actix_web::error::Error;
|
||||
use bytes::Bytes;
|
||||
use futures_core::{ready, Stream};
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
#[doc(hidden)]
|
||||
/// A helper created from a `std::fs::File` which reads the file
|
||||
/// chunk-by-chunk on a `ThreadPool`.
|
||||
pub struct ChunkedReadFile {
|
||||
size: u64,
|
||||
offset: u64,
|
||||
state: ChunkedReadFileState,
|
||||
counter: u64,
|
||||
}
|
||||
use super::named::File;
|
||||
|
||||
enum ChunkedReadFileState {
|
||||
File(Option<File>),
|
||||
Future(JoinHandle<Result<(File, Bytes), io::Error>>),
|
||||
}
|
||||
|
||||
impl ChunkedReadFile {
|
||||
pub(crate) fn new(size: u64, offset: u64, file: File) -> Self {
|
||||
Self {
|
||||
size,
|
||||
offset,
|
||||
state: ChunkedReadFileState::File(Some(file)),
|
||||
counter: 0,
|
||||
}
|
||||
pin_project! {
|
||||
/// Adapter to read a `std::file::File` in chunks.
|
||||
#[doc(hidden)]
|
||||
pub struct ChunkedReadFile<F, Fut> {
|
||||
size: u64,
|
||||
offset: u64,
|
||||
#[pin]
|
||||
state: ChunkedReadFileState<Fut>,
|
||||
counter: u64,
|
||||
callback: F,
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for ChunkedReadFile {
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
pin_project! {
|
||||
#[project = ChunkedReadFileStateProj]
|
||||
#[project_replace = ChunkedReadFileStateProjReplace]
|
||||
enum ChunkedReadFileState<Fut> {
|
||||
File { file: Option<File>, },
|
||||
Future { #[pin] fut: Fut },
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
pin_project! {
|
||||
#[project = ChunkedReadFileStateProj]
|
||||
#[project_replace = ChunkedReadFileStateProjReplace]
|
||||
enum ChunkedReadFileState<Fut> {
|
||||
File { file: Option<(File, BytesMut)> },
|
||||
Future { #[pin] fut: Fut },
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, Fut> fmt::Debug for ChunkedReadFile<F, Fut> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("ChunkedReadFile")
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for ChunkedReadFile {
|
||||
pub(crate) fn new_chunked_read(
|
||||
size: u64,
|
||||
offset: u64,
|
||||
file: File,
|
||||
) -> impl Stream<Item = Result<Bytes, Error>> {
|
||||
ChunkedReadFile {
|
||||
size,
|
||||
offset,
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
state: ChunkedReadFileState::File { file: Some(file) },
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
state: ChunkedReadFileState::File {
|
||||
file: Some((file, BytesMut::new())),
|
||||
},
|
||||
counter: 0,
|
||||
callback: chunked_read_file_callback,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
async fn chunked_read_file_callback(
|
||||
mut file: File,
|
||||
offset: u64,
|
||||
max_bytes: usize,
|
||||
) -> Result<(File, Bytes), Error> {
|
||||
use io::{Read as _, Seek as _};
|
||||
|
||||
let res = actix_web::rt::task::spawn_blocking(move || {
|
||||
let mut buf = Vec::with_capacity(max_bytes);
|
||||
|
||||
file.seek(io::SeekFrom::Start(offset))?;
|
||||
|
||||
let n_bytes = file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
|
||||
|
||||
if n_bytes == 0 {
|
||||
Err(io::Error::from(io::ErrorKind::UnexpectedEof))
|
||||
} else {
|
||||
Ok((file, Bytes::from(buf)))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|_| actix_web::error::BlockingError)??;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
async fn chunked_read_file_callback(
|
||||
file: File,
|
||||
offset: u64,
|
||||
max_bytes: usize,
|
||||
mut bytes_mut: BytesMut,
|
||||
) -> io::Result<(File, Bytes, BytesMut)> {
|
||||
bytes_mut.reserve(max_bytes);
|
||||
|
||||
let (res, mut bytes_mut) = file.read_at(bytes_mut, offset).await;
|
||||
let n_bytes = res?;
|
||||
|
||||
if n_bytes == 0 {
|
||||
return Err(io::ErrorKind::UnexpectedEof.into());
|
||||
}
|
||||
|
||||
let bytes = bytes_mut.split_to(n_bytes).freeze();
|
||||
|
||||
Ok((file, bytes, bytes_mut))
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
impl<F, Fut> Stream for ChunkedReadFile<F, Fut>
|
||||
where
|
||||
F: Fn(File, u64, usize, BytesMut) -> Fut,
|
||||
Fut: Future<Output = io::Result<(File, Bytes, BytesMut)>>,
|
||||
{
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let this = self.as_mut().get_mut();
|
||||
match this.state {
|
||||
ChunkedReadFileState::File(ref mut file) => {
|
||||
let size = this.size;
|
||||
let offset = this.offset;
|
||||
let counter = this.counter;
|
||||
let mut this = self.as_mut().project();
|
||||
match this.state.as_mut().project() {
|
||||
ChunkedReadFileStateProj::File { file } => {
|
||||
let size = *this.size;
|
||||
let offset = *this.offset;
|
||||
let counter = *this.counter;
|
||||
|
||||
if size == counter {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
let mut file = file
|
||||
let max_bytes = cmp::min(size.saturating_sub(counter), 65_536) as usize;
|
||||
|
||||
let (file, bytes_mut) = file
|
||||
.take()
|
||||
.expect("ChunkedReadFile polled after completion");
|
||||
|
||||
let fut = spawn_blocking(move || {
|
||||
let max_bytes = cmp::min(size.saturating_sub(counter), 65_536) as usize;
|
||||
let fut = (this.callback)(file, offset, max_bytes, bytes_mut);
|
||||
|
||||
let mut buf = Vec::with_capacity(max_bytes);
|
||||
file.seek(io::SeekFrom::Start(offset))?;
|
||||
this.state
|
||||
.project_replace(ChunkedReadFileState::Future { fut });
|
||||
|
||||
let n_bytes =
|
||||
file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
|
||||
|
||||
if n_bytes == 0 {
|
||||
return Err(io::ErrorKind::UnexpectedEof.into());
|
||||
}
|
||||
|
||||
Ok((file, Bytes::from(buf)))
|
||||
});
|
||||
this.state = ChunkedReadFileState::Future(fut);
|
||||
self.poll_next(cx)
|
||||
}
|
||||
}
|
||||
ChunkedReadFileState::Future(ref mut fut) => {
|
||||
let (file, bytes) =
|
||||
ready!(Pin::new(fut).poll(cx)).map_err(|_| BlockingError)??;
|
||||
this.state = ChunkedReadFileState::File(Some(file));
|
||||
ChunkedReadFileStateProj::Future { fut } => {
|
||||
let (file, bytes, bytes_mut) = ready!(fut.poll(cx))?;
|
||||
|
||||
this.offset += bytes.len() as u64;
|
||||
this.counter += bytes.len() as u64;
|
||||
this.state.project_replace(ChunkedReadFileState::File {
|
||||
file: Some((file, bytes_mut)),
|
||||
});
|
||||
|
||||
*this.offset += bytes.len() as u64;
|
||||
*this.counter += bytes.len() as u64;
|
||||
|
||||
Poll::Ready(Some(Ok(bytes)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
impl<F, Fut> Stream for ChunkedReadFile<F, Fut>
|
||||
where
|
||||
F: Fn(File, u64, usize) -> Fut,
|
||||
Fut: Future<Output = Result<(File, Bytes), Error>>,
|
||||
{
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let mut this = self.as_mut().project();
|
||||
match this.state.as_mut().project() {
|
||||
ChunkedReadFileStateProj::File { file } => {
|
||||
let size = *this.size;
|
||||
let offset = *this.offset;
|
||||
let counter = *this.counter;
|
||||
|
||||
if size == counter {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
let max_bytes = cmp::min(size.saturating_sub(counter), 65_536) as usize;
|
||||
|
||||
let file = file
|
||||
.take()
|
||||
.expect("ChunkedReadFile polled after completion");
|
||||
|
||||
let fut = (this.callback)(file, offset, max_bytes);
|
||||
|
||||
this.state
|
||||
.project_replace(ChunkedReadFileState::Future { fut });
|
||||
|
||||
self.poll_next(cx)
|
||||
}
|
||||
}
|
||||
ChunkedReadFileStateProj::Future { fut } => {
|
||||
let (file, bytes) = ready!(fut.poll(cx))?;
|
||||
|
||||
this.state
|
||||
.project_replace(ChunkedReadFileState::File { file: Some(file) });
|
||||
|
||||
*this.offset += bytes.len() as u64;
|
||||
*this.counter += bytes.len() as u64;
|
||||
|
||||
Poll::Ready(Some(Ok(bytes)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
use bytes_mut::BytesMut;
|
||||
|
||||
// TODO: remove new type and use bytes::BytesMut directly
|
||||
#[doc(hidden)]
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
mod bytes_mut {
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use tokio_uring::buf::{IoBuf, IoBufMut};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BytesMut(bytes::BytesMut);
|
||||
|
||||
impl BytesMut {
|
||||
pub(super) fn new() -> Self {
|
||||
Self(bytes::BytesMut::new())
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for BytesMut {
|
||||
type Target = bytes::BytesMut;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for BytesMut {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl IoBuf for BytesMut {
|
||||
fn stable_ptr(&self) -> *const u8 {
|
||||
self.0.as_ptr()
|
||||
}
|
||||
|
||||
fn bytes_init(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
fn bytes_total(&self) -> usize {
|
||||
self.0.capacity()
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl IoBufMut for BytesMut {
|
||||
fn stable_mut_ptr(&mut self) -> *mut u8 {
|
||||
self.0.as_mut_ptr()
|
||||
}
|
||||
|
||||
unsafe fn set_init(&mut self, init_len: usize) {
|
||||
if self.len() < init_len {
|
||||
self.0.set_len(init_len);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
|
||||
use actix_utils::future::ok;
|
||||
use actix_web::{
|
||||
dev::{
|
||||
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest,
|
||||
@ -20,8 +19,9 @@ use actix_web::{
|
||||
use futures_core::future::LocalBoxFuture;
|
||||
|
||||
use crate::{
|
||||
directory_listing, named, Directory, DirectoryRenderer, FilesService, HttpNewService,
|
||||
MimeOverride, PathFilter,
|
||||
directory_listing, named,
|
||||
service::{FilesService, FilesServiceInner},
|
||||
Directory, DirectoryRenderer, HttpNewService, MimeOverride, PathFilter,
|
||||
};
|
||||
|
||||
/// Static files handling service.
|
||||
@ -283,11 +283,17 @@ impl Files {
|
||||
/// Setting a fallback static file handler:
|
||||
/// ```
|
||||
/// use actix_files::{Files, NamedFile};
|
||||
/// use actix_web::dev::{ServiceRequest, ServiceResponse, fn_service};
|
||||
///
|
||||
/// # fn run() -> Result<(), actix_web::Error> {
|
||||
/// let files = Files::new("/", "./static")
|
||||
/// .index_file("index.html")
|
||||
/// .default_handler(NamedFile::open("./static/404.html")?);
|
||||
/// .default_handler(fn_service(|req: ServiceRequest| async {
|
||||
/// let (req, _) = req.into_parts();
|
||||
/// let file = NamedFile::open_async("./static/404.html").await?;
|
||||
/// let res = file.into_response(&req);
|
||||
/// Ok(ServiceResponse::new(req, res))
|
||||
/// }));
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
@ -353,7 +359,7 @@ impl ServiceFactory<ServiceRequest> for Files {
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
|
||||
|
||||
fn new_service(&self, _: ()) -> Self::Future {
|
||||
let mut srv = FilesService {
|
||||
let mut inner = FilesServiceInner {
|
||||
directory: self.directory.clone(),
|
||||
index: self.index.clone(),
|
||||
show_index: self.show_index,
|
||||
@ -372,14 +378,14 @@ impl ServiceFactory<ServiceRequest> for Files {
|
||||
Box::pin(async {
|
||||
match fut.await {
|
||||
Ok(default) => {
|
||||
srv.default = Some(default);
|
||||
Ok(srv)
|
||||
inner.default = Some(default);
|
||||
Ok(FilesService(Rc::new(inner)))
|
||||
}
|
||||
Err(_) => Err(()),
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Box::pin(ok(srv))
|
||||
Box::pin(async move { Ok(FilesService(Rc::new(inner))) })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -33,12 +33,12 @@ mod path_buf;
|
||||
mod range;
|
||||
mod service;
|
||||
|
||||
pub use crate::chunked::ChunkedReadFile;
|
||||
pub use crate::directory::Directory;
|
||||
pub use crate::files::Files;
|
||||
pub use crate::named::NamedFile;
|
||||
pub use crate::range::HttpRange;
|
||||
pub use crate::service::FilesService;
|
||||
pub use self::chunked::ChunkedReadFile;
|
||||
pub use self::directory::Directory;
|
||||
pub use self::files::Files;
|
||||
pub use self::named::NamedFile;
|
||||
pub use self::range::HttpRange;
|
||||
pub use self::service::FilesService;
|
||||
|
||||
use self::directory::{directory_listing, DirectoryRenderer};
|
||||
use self::error::FilesError;
|
||||
@ -62,13 +62,12 @@ type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
fs::{self},
|
||||
ops::Add,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use actix_service::ServiceFactory;
|
||||
use actix_utils::future::ok;
|
||||
use actix_web::{
|
||||
guard,
|
||||
http::{
|
||||
@ -82,8 +81,9 @@ mod tests {
|
||||
};
|
||||
|
||||
use super::*;
|
||||
use crate::named::File;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_file_extension_to_mime() {
|
||||
let m = file_extension_to_mime("");
|
||||
assert_eq!(m, mime::APPLICATION_OCTET_STREAM);
|
||||
@ -100,7 +100,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_if_modified_since_without_if_none_match() {
|
||||
let file = NamedFile::open("Cargo.toml").unwrap();
|
||||
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let since = header::HttpDate::from(SystemTime::now().add(Duration::from_secs(60)));
|
||||
|
||||
let req = TestRequest::default()
|
||||
@ -112,7 +112,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_if_modified_since_without_if_none_match_same() {
|
||||
let file = NamedFile::open("Cargo.toml").unwrap();
|
||||
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let since = file.last_modified().unwrap();
|
||||
|
||||
let req = TestRequest::default()
|
||||
@ -124,7 +124,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_if_modified_since_with_if_none_match() {
|
||||
let file = NamedFile::open("Cargo.toml").unwrap();
|
||||
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let since = header::HttpDate::from(SystemTime::now().add(Duration::from_secs(60)));
|
||||
|
||||
let req = TestRequest::default()
|
||||
@ -137,7 +137,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_if_unmodified_since() {
|
||||
let file = NamedFile::open("Cargo.toml").unwrap();
|
||||
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let since = file.last_modified().unwrap();
|
||||
|
||||
let req = TestRequest::default()
|
||||
@ -149,7 +149,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_if_unmodified_since_failed() {
|
||||
let file = NamedFile::open("Cargo.toml").unwrap();
|
||||
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let since = header::HttpDate::from(SystemTime::UNIX_EPOCH);
|
||||
|
||||
let req = TestRequest::default()
|
||||
@ -161,8 +161,8 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_text() {
|
||||
assert!(NamedFile::open("test--").is_err());
|
||||
let mut file = NamedFile::open("Cargo.toml").unwrap();
|
||||
assert!(NamedFile::open_async("test--").await.is_err());
|
||||
let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
{
|
||||
file.file();
|
||||
let _f: &File = &file;
|
||||
@ -185,8 +185,8 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_content_disposition() {
|
||||
assert!(NamedFile::open("test--").is_err());
|
||||
let mut file = NamedFile::open("Cargo.toml").unwrap();
|
||||
assert!(NamedFile::open_async("test--").await.is_err());
|
||||
let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
{
|
||||
file.file();
|
||||
let _f: &File = &file;
|
||||
@ -202,7 +202,8 @@ mod tests {
|
||||
"inline; filename=\"Cargo.toml\""
|
||||
);
|
||||
|
||||
let file = NamedFile::open("Cargo.toml")
|
||||
let file = NamedFile::open_async("Cargo.toml")
|
||||
.await
|
||||
.unwrap()
|
||||
.disable_content_disposition();
|
||||
let req = TestRequest::default().to_http_request();
|
||||
@ -212,8 +213,19 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_non_ascii_file_name() {
|
||||
let mut file =
|
||||
NamedFile::from_file(File::open("Cargo.toml").unwrap(), "貨物.toml").unwrap();
|
||||
let file = {
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
{
|
||||
crate::named::File::open("Cargo.toml").await.unwrap()
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
{
|
||||
crate::named::File::open("Cargo.toml").unwrap()
|
||||
}
|
||||
};
|
||||
|
||||
let mut file = NamedFile::from_file(file, "貨物.toml").unwrap();
|
||||
{
|
||||
file.file();
|
||||
let _f: &File = &file;
|
||||
@ -236,7 +248,8 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_set_content_type() {
|
||||
let mut file = NamedFile::open("Cargo.toml")
|
||||
let mut file = NamedFile::open_async("Cargo.toml")
|
||||
.await
|
||||
.unwrap()
|
||||
.set_content_type(mime::TEXT_XML);
|
||||
{
|
||||
@ -261,7 +274,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_image() {
|
||||
let mut file = NamedFile::open("tests/test.png").unwrap();
|
||||
let mut file = NamedFile::open_async("tests/test.png").await.unwrap();
|
||||
{
|
||||
file.file();
|
||||
let _f: &File = &file;
|
||||
@ -284,7 +297,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_javascript() {
|
||||
let file = NamedFile::open("tests/test.js").unwrap();
|
||||
let file = NamedFile::open_async("tests/test.js").await.unwrap();
|
||||
|
||||
let req = TestRequest::default().to_http_request();
|
||||
let resp = file.respond_to(&req).await.unwrap();
|
||||
@ -304,7 +317,8 @@ mod tests {
|
||||
disposition: DispositionType::Attachment,
|
||||
parameters: vec![DispositionParam::Filename(String::from("test.png"))],
|
||||
};
|
||||
let mut file = NamedFile::open("tests/test.png")
|
||||
let mut file = NamedFile::open_async("tests/test.png")
|
||||
.await
|
||||
.unwrap()
|
||||
.set_content_disposition(cd);
|
||||
{
|
||||
@ -329,7 +343,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_binary() {
|
||||
let mut file = NamedFile::open("tests/test.binary").unwrap();
|
||||
let mut file = NamedFile::open_async("tests/test.binary").await.unwrap();
|
||||
{
|
||||
file.file();
|
||||
let _f: &File = &file;
|
||||
@ -352,7 +366,8 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_status_code_text() {
|
||||
let mut file = NamedFile::open("Cargo.toml")
|
||||
let mut file = NamedFile::open_async("Cargo.toml")
|
||||
.await
|
||||
.unwrap()
|
||||
.set_status_code(StatusCode::NOT_FOUND);
|
||||
{
|
||||
@ -568,7 +583,8 @@ mod tests {
|
||||
async fn test_named_file_content_encoding() {
|
||||
let srv = test::init_service(App::new().wrap(Compress::default()).service(
|
||||
web::resource("/").to(|| async {
|
||||
NamedFile::open("Cargo.toml")
|
||||
NamedFile::open_async("Cargo.toml")
|
||||
.await
|
||||
.unwrap()
|
||||
.set_content_encoding(header::ContentEncoding::Identity)
|
||||
}),
|
||||
@ -588,7 +604,8 @@ mod tests {
|
||||
async fn test_named_file_content_encoding_gzip() {
|
||||
let srv = test::init_service(App::new().wrap(Compress::default()).service(
|
||||
web::resource("/").to(|| async {
|
||||
NamedFile::open("Cargo.toml")
|
||||
NamedFile::open_async("Cargo.toml")
|
||||
.await
|
||||
.unwrap()
|
||||
.set_content_encoding(header::ContentEncoding::Gzip)
|
||||
}),
|
||||
@ -614,7 +631,7 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_allowed_method() {
|
||||
let req = TestRequest::default().method(Method::GET).to_http_request();
|
||||
let file = NamedFile::open("Cargo.toml").unwrap();
|
||||
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let resp = file.respond_to(&req).await.unwrap();
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
}
|
||||
@ -705,8 +722,8 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_default_handler_file_missing() {
|
||||
let st = Files::new("/", ".")
|
||||
.default_handler(|req: ServiceRequest| {
|
||||
ok(req.into_response(HttpResponse::Ok().body("default content")))
|
||||
.default_handler(|req: ServiceRequest| async {
|
||||
Ok(req.into_response(HttpResponse::Ok().body("default content")))
|
||||
})
|
||||
.new_service(())
|
||||
.await
|
||||
@ -789,9 +806,8 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_serve_named_file() {
|
||||
let srv =
|
||||
test::init_service(App::new().service(NamedFile::open("Cargo.toml").unwrap()))
|
||||
.await;
|
||||
let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let srv = test::init_service(App::new().service(factory)).await;
|
||||
|
||||
let req = TestRequest::get().uri("/Cargo.toml").to_request();
|
||||
let res = test::call_service(&srv, req).await;
|
||||
@ -808,11 +824,9 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_serve_named_file_prefix() {
|
||||
let srv = test::init_service(
|
||||
App::new()
|
||||
.service(web::scope("/test").service(NamedFile::open("Cargo.toml").unwrap())),
|
||||
)
|
||||
.await;
|
||||
let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let srv =
|
||||
test::init_service(App::new().service(web::scope("/test").service(factory))).await;
|
||||
|
||||
let req = TestRequest::get().uri("/test/Cargo.toml").to_request();
|
||||
let res = test::call_service(&srv, req).await;
|
||||
@ -829,10 +843,8 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_default_service() {
|
||||
let srv = test::init_service(
|
||||
App::new().default_service(NamedFile::open("Cargo.toml").unwrap()),
|
||||
)
|
||||
.await;
|
||||
let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let srv = test::init_service(App::new().default_service(factory)).await;
|
||||
|
||||
for route in ["/foobar", "/baz", "/"].iter() {
|
||||
let req = TestRequest::get().uri(route).to_request();
|
||||
@ -847,8 +859,9 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_default_handler_named_file() {
|
||||
let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
let st = Files::new("/", ".")
|
||||
.default_handler(NamedFile::open("Cargo.toml").unwrap())
|
||||
.default_handler(factory)
|
||||
.new_service(())
|
||||
.await
|
||||
.unwrap();
|
||||
@ -926,8 +939,8 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_default_handler_filter() {
|
||||
let st = Files::new("/", ".")
|
||||
.default_handler(|req: ServiceRequest| {
|
||||
ok(req.into_response(HttpResponse::Ok().body("default content")))
|
||||
.default_handler(|req: ServiceRequest| async {
|
||||
Ok(req.into_response(HttpResponse::Ok().body("default content")))
|
||||
})
|
||||
.path_filter(|path, _| path.extension() == Some("png".as_ref()))
|
||||
.new_service(())
|
||||
|
@ -1,17 +1,22 @@
|
||||
use actix_service::{Service, ServiceFactory};
|
||||
use actix_utils::future::{ok, ready, Ready};
|
||||
use actix_web::dev::{AppService, HttpServiceFactory, ResourceDef};
|
||||
use std::fs::{File, Metadata};
|
||||
use std::io;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use std::{
|
||||
fmt,
|
||||
fs::Metadata,
|
||||
io,
|
||||
ops::{Deref, DerefMut},
|
||||
path::{Path, PathBuf},
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
|
||||
use actix_http::body::AnyBody;
|
||||
use actix_service::{Service, ServiceFactory};
|
||||
use actix_web::{
|
||||
dev::{BodyEncoding, ServiceRequest, ServiceResponse, SizedStream},
|
||||
dev::{
|
||||
AppService, BodyEncoding, HttpServiceFactory, ResourceDef, ServiceRequest,
|
||||
ServiceResponse, SizedStream,
|
||||
},
|
||||
http::{
|
||||
header::{
|
||||
self, Charset, ContentDisposition, DispositionParam, DispositionType, ExtendedValue,
|
||||
@ -21,9 +26,9 @@ use actix_web::{
|
||||
Error, HttpMessage, HttpRequest, HttpResponse, Responder,
|
||||
};
|
||||
use bitflags::bitflags;
|
||||
use futures_core::future::LocalBoxFuture;
|
||||
use mime_guess::from_path;
|
||||
|
||||
use crate::ChunkedReadFile;
|
||||
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
||||
|
||||
bitflags! {
|
||||
@ -48,9 +53,9 @@ impl Default for Flags {
|
||||
/// use actix_web::App;
|
||||
/// use actix_files::NamedFile;
|
||||
///
|
||||
/// # fn run() -> Result<(), Box<dyn std::error::Error>> {
|
||||
/// let app = App::new()
|
||||
/// .service(NamedFile::open("./static/index.html")?);
|
||||
/// # async fn run() -> Result<(), Box<dyn std::error::Error>> {
|
||||
/// let file = NamedFile::open_async("./static/index.html").await?;
|
||||
/// let app = App::new().service(file);
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
@ -62,10 +67,9 @@ impl Default for Flags {
|
||||
///
|
||||
/// #[get("/")]
|
||||
/// async fn index() -> impl Responder {
|
||||
/// NamedFile::open("./static/index.html")
|
||||
/// NamedFile::open_async("./static/index.html").await
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub struct NamedFile {
|
||||
path: PathBuf,
|
||||
file: File,
|
||||
@ -78,6 +82,37 @@ pub struct NamedFile {
|
||||
pub(crate) encoding: Option<ContentEncoding>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for NamedFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("NamedFile")
|
||||
.field("path", &self.path)
|
||||
.field(
|
||||
"file",
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
{
|
||||
&"tokio_uring::File"
|
||||
},
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
{
|
||||
&self.file
|
||||
},
|
||||
)
|
||||
.field("modified", &self.modified)
|
||||
.field("md", &self.md)
|
||||
.field("flags", &self.flags)
|
||||
.field("status_code", &self.status_code)
|
||||
.field("content_type", &self.content_type)
|
||||
.field("content_disposition", &self.content_disposition)
|
||||
.field("encoding", &self.encoding)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
pub(crate) use std::fs::File;
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
pub(crate) use tokio_uring::fs::File;
|
||||
|
||||
impl NamedFile {
|
||||
/// Creates an instance from a previously opened file.
|
||||
///
|
||||
@ -85,8 +120,7 @@ impl NamedFile {
|
||||
/// `ContentDisposition` headers.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// use actix_files::NamedFile;
|
||||
/// use std::io::{self, Write};
|
||||
/// use std::env;
|
||||
@ -147,7 +181,30 @@ impl NamedFile {
|
||||
(ct, cd)
|
||||
};
|
||||
|
||||
let md = file.metadata()?;
|
||||
let md = {
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
{
|
||||
file.metadata()?
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
{
|
||||
use std::os::unix::prelude::{AsRawFd, FromRawFd};
|
||||
|
||||
let fd = file.as_raw_fd();
|
||||
|
||||
// SAFETY: fd is borrowed and lives longer than the unsafe block
|
||||
unsafe {
|
||||
let file = std::fs::File::from_raw_fd(fd);
|
||||
let md = file.metadata();
|
||||
// SAFETY: forget the fd before exiting block in success or error case but don't
|
||||
// run destructor (that would close file handle)
|
||||
std::mem::forget(file);
|
||||
md?
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let modified = md.modified().ok();
|
||||
let encoding = None;
|
||||
|
||||
@ -164,17 +221,45 @@ impl NamedFile {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
/// Attempts to open a file in read-only mode.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use actix_files::NamedFile;
|
||||
///
|
||||
/// let file = NamedFile::open("foo.txt");
|
||||
/// ```
|
||||
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> {
|
||||
Self::from_file(File::open(&path)?, path)
|
||||
let file = File::open(&path)?;
|
||||
Self::from_file(file, path)
|
||||
}
|
||||
|
||||
/// Attempts to open a file asynchronously in read-only mode.
|
||||
///
|
||||
/// When the `experimental-io-uring` crate feature is enabled, this will be async.
|
||||
/// Otherwise, it will be just like [`open`][Self::open].
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use actix_files::NamedFile;
|
||||
/// # async fn open() {
|
||||
/// let file = NamedFile::open_async("foo.txt").await.unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
pub async fn open_async<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> {
|
||||
let file = {
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
{
|
||||
File::open(&path)?
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental-io-uring")]
|
||||
{
|
||||
File::open(&path).await?
|
||||
}
|
||||
};
|
||||
|
||||
Self::from_file(file, path)
|
||||
}
|
||||
|
||||
/// Returns reference to the underlying `File` object.
|
||||
@ -186,13 +271,12 @@ impl NamedFile {
|
||||
/// Retrieve the path of this file.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # use std::io;
|
||||
/// use actix_files::NamedFile;
|
||||
///
|
||||
/// # fn path() -> io::Result<()> {
|
||||
/// let file = NamedFile::open("test.txt")?;
|
||||
/// # async fn path() -> io::Result<()> {
|
||||
/// let file = NamedFile::open_async("test.txt").await?;
|
||||
/// assert_eq!(file.path().as_os_str(), "foo.txt");
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
@ -332,7 +416,7 @@ impl NamedFile {
|
||||
res.encoding(current_encoding);
|
||||
}
|
||||
|
||||
let reader = ChunkedReadFile::new(self.md.len(), 0, self.file);
|
||||
let reader = super::chunked::new_chunked_read(self.md.len(), 0, self.file);
|
||||
|
||||
return res.streaming(reader);
|
||||
}
|
||||
@ -443,10 +527,10 @@ impl NamedFile {
|
||||
if precondition_failed {
|
||||
return resp.status(StatusCode::PRECONDITION_FAILED).finish();
|
||||
} else if not_modified {
|
||||
return resp.status(StatusCode::NOT_MODIFIED).finish();
|
||||
return resp.status(StatusCode::NOT_MODIFIED).body(AnyBody::None);
|
||||
}
|
||||
|
||||
let reader = ChunkedReadFile::new(length, offset, self.file);
|
||||
let reader = super::chunked::new_chunked_read(length, offset, self.file);
|
||||
|
||||
if offset != 0 || length != self.md.len() {
|
||||
resp.status(StatusCode::PARTIAL_CONTENT);
|
||||
@ -456,20 +540,6 @@ impl NamedFile {
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for NamedFile {
|
||||
type Target = File;
|
||||
|
||||
fn deref(&self) -> &File {
|
||||
&self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for NamedFile {
|
||||
fn deref_mut(&mut self) -> &mut File {
|
||||
&mut self.file
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if `req` has no `If-Match` header or one which matches `etag`.
|
||||
fn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
|
||||
match req.get_header::<header::IfMatch>() {
|
||||
@ -510,6 +580,20 @@ fn none_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for NamedFile {
|
||||
type Target = File;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for NamedFile {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl Responder for NamedFile {
|
||||
fn respond_to(self, req: &HttpRequest) -> HttpResponse {
|
||||
self.into_response(req)
|
||||
@ -520,14 +604,16 @@ impl ServiceFactory<ServiceRequest> for NamedFile {
|
||||
type Response = ServiceResponse;
|
||||
type Error = Error;
|
||||
type Config = ();
|
||||
type InitError = ();
|
||||
type Service = NamedFileService;
|
||||
type Future = Ready<Result<Self::Service, ()>>;
|
||||
type InitError = ();
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
|
||||
|
||||
fn new_service(&self, _: ()) -> Self::Future {
|
||||
ok(NamedFileService {
|
||||
let service = NamedFileService {
|
||||
path: self.path.clone(),
|
||||
})
|
||||
};
|
||||
|
||||
Box::pin(async move { Ok(service) })
|
||||
}
|
||||
}
|
||||
|
||||
@ -540,18 +626,19 @@ pub struct NamedFileService {
|
||||
impl Service<ServiceRequest> for NamedFileService {
|
||||
type Response = ServiceResponse;
|
||||
type Error = Error;
|
||||
type Future = Ready<Result<Self::Response, Self::Error>>;
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
||||
|
||||
actix_service::always_ready!();
|
||||
|
||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||
let (req, _) = req.into_parts();
|
||||
ready(
|
||||
NamedFile::open(&self.path)
|
||||
.map_err(|e| e.into())
|
||||
.map(|f| f.into_response(&req))
|
||||
.map(|res| ServiceResponse::new(req, res)),
|
||||
)
|
||||
|
||||
let path = self.path.clone();
|
||||
Box::pin(async move {
|
||||
let file = NamedFile::open_async(path).await?;
|
||||
let res = file.into_response(&req);
|
||||
Ok(ServiceResponse::new(req, res))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@ use actix_web::{dev::Payload, FromRequest, HttpRequest};
|
||||
|
||||
use crate::error::UriSegmentError;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct PathBufWrap(PathBuf);
|
||||
|
||||
impl FromStr for PathBufWrap {
|
||||
@ -21,6 +21,8 @@ impl FromStr for PathBufWrap {
|
||||
|
||||
impl PathBufWrap {
|
||||
/// Parse a path, giving the choice of allowing hidden files to be considered valid segments.
|
||||
///
|
||||
/// Path traversal is guarded by this method.
|
||||
pub fn parse_path(path: &str, hidden_files: bool) -> Result<Self, UriSegmentError> {
|
||||
let mut buf = PathBuf::new();
|
||||
|
||||
@ -59,7 +61,6 @@ impl AsRef<Path> for PathBufWrap {
|
||||
impl FromRequest for PathBufWrap {
|
||||
type Error = UriSegmentError;
|
||||
type Future = Ready<Result<Self, Self::Error>>;
|
||||
type Config = ();
|
||||
|
||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||
ready(req.match_info().path().parse())
|
||||
@ -116,4 +117,24 @@ mod tests {
|
||||
PathBuf::from_iter(vec!["test", ".tt"])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_traversal() {
|
||||
assert_eq!(
|
||||
PathBufWrap::parse_path("/../README.md", false).unwrap().0,
|
||||
PathBuf::from_iter(vec!["README.md"])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
PathBufWrap::parse_path("/../README.md", true).unwrap().0,
|
||||
PathBuf::from_iter(vec!["README.md"])
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
PathBufWrap::parse_path("/../../../../../../../../../../etc/passwd", false)
|
||||
.unwrap()
|
||||
.0,
|
||||
PathBuf::from_iter(vec!["etc/passwd"])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
use std::{fmt, io, path::PathBuf, rc::Rc};
|
||||
use std::{fmt, io, ops::Deref, path::PathBuf, rc::Rc};
|
||||
|
||||
use actix_service::Service;
|
||||
use actix_utils::future::ok;
|
||||
use actix_web::{
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
error::Error,
|
||||
@ -17,7 +16,18 @@ use crate::{
|
||||
};
|
||||
|
||||
/// Assembled file serving service.
|
||||
pub struct FilesService {
|
||||
#[derive(Clone)]
|
||||
pub struct FilesService(pub(crate) Rc<FilesServiceInner>);
|
||||
|
||||
impl Deref for FilesService {
|
||||
type Target = FilesServiceInner;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&*self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FilesServiceInner {
|
||||
pub(crate) directory: PathBuf,
|
||||
pub(crate) index: Option<String>,
|
||||
pub(crate) show_index: bool,
|
||||
@ -31,20 +41,50 @@ pub struct FilesService {
|
||||
pub(crate) hidden_files: bool,
|
||||
}
|
||||
|
||||
impl fmt::Debug for FilesServiceInner {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("FilesServiceInner")
|
||||
}
|
||||
}
|
||||
|
||||
impl FilesService {
|
||||
fn handle_err(
|
||||
async fn handle_err(
|
||||
&self,
|
||||
err: io::Error,
|
||||
req: ServiceRequest,
|
||||
) -> LocalBoxFuture<'static, Result<ServiceResponse, Error>> {
|
||||
) -> Result<ServiceResponse, Error> {
|
||||
log::debug!("error handling {}: {}", req.path(), err);
|
||||
|
||||
if let Some(ref default) = self.default {
|
||||
Box::pin(default.call(req))
|
||||
default.call(req).await
|
||||
} else {
|
||||
Box::pin(ok(req.error_response(err)))
|
||||
Ok(req.error_response(err))
|
||||
}
|
||||
}
|
||||
|
||||
fn serve_named_file(
|
||||
&self,
|
||||
req: ServiceRequest,
|
||||
mut named_file: NamedFile,
|
||||
) -> ServiceResponse {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
ServiceResponse::new(req, res)
|
||||
}
|
||||
|
||||
fn show_index(&self, req: ServiceRequest, path: PathBuf) -> ServiceResponse {
|
||||
let dir = Directory::new(self.directory.clone(), path);
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
|
||||
(self.renderer)(&dir, &req).unwrap_or_else(|e| ServiceResponse::from_err(e, req))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for FilesService {
|
||||
@ -56,7 +96,7 @@ impl fmt::Debug for FilesService {
|
||||
impl Service<ServiceRequest> for FilesService {
|
||||
type Response = ServiceResponse;
|
||||
type Error = Error;
|
||||
type Future = LocalBoxFuture<'static, Result<ServiceResponse, Error>>;
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
||||
|
||||
actix_service::always_ready!();
|
||||
|
||||
@ -69,103 +109,87 @@ impl Service<ServiceRequest> for FilesService {
|
||||
matches!(*req.method(), Method::HEAD | Method::GET)
|
||||
};
|
||||
|
||||
if !is_method_valid {
|
||||
return Box::pin(ok(req.into_response(
|
||||
actix_web::HttpResponse::MethodNotAllowed()
|
||||
.insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
|
||||
.body("Request did not meet this resource's requirements."),
|
||||
)));
|
||||
}
|
||||
let this = self.clone();
|
||||
|
||||
let real_path =
|
||||
match PathBufWrap::parse_path(req.match_info().path(), self.hidden_files) {
|
||||
Ok(item) => item,
|
||||
Err(e) => return Box::pin(ok(req.error_response(e))),
|
||||
};
|
||||
Box::pin(async move {
|
||||
if !is_method_valid {
|
||||
return Ok(req.into_response(
|
||||
actix_web::HttpResponse::MethodNotAllowed()
|
||||
.insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
|
||||
.body("Request did not meet this resource's requirements."),
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(filter) = &self.path_filter {
|
||||
if !filter(real_path.as_ref(), req.head()) {
|
||||
if let Some(ref default) = self.default {
|
||||
return Box::pin(default.call(req));
|
||||
} else {
|
||||
return Box::pin(ok(
|
||||
req.into_response(actix_web::HttpResponse::NotFound().finish())
|
||||
let real_path =
|
||||
match PathBufWrap::parse_path(req.match_info().path(), this.hidden_files) {
|
||||
Ok(item) => item,
|
||||
Err(e) => return Ok(req.error_response(e)),
|
||||
};
|
||||
|
||||
if let Some(filter) = &this.path_filter {
|
||||
if !filter(real_path.as_ref(), req.head()) {
|
||||
if let Some(ref default) = this.default {
|
||||
return default.call(req).await;
|
||||
} else {
|
||||
return Ok(
|
||||
req.into_response(actix_web::HttpResponse::NotFound().finish())
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// full file path
|
||||
let path = this.directory.join(&real_path);
|
||||
if let Err(err) = path.canonicalize() {
|
||||
return this.handle_err(err, req).await;
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
if this.redirect_to_slash
|
||||
&& !req.path().ends_with('/')
|
||||
&& (this.index.is_some() || this.show_index)
|
||||
{
|
||||
let redirect_to = format!("{}/", req.path());
|
||||
|
||||
return Ok(req.into_response(
|
||||
HttpResponse::Found()
|
||||
.insert_header((header::LOCATION, redirect_to))
|
||||
.finish(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// full file path
|
||||
let path = self.directory.join(&real_path);
|
||||
if let Err(err) = path.canonicalize() {
|
||||
return Box::pin(self.handle_err(err, req));
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
if self.redirect_to_slash
|
||||
&& !req.path().ends_with('/')
|
||||
&& (self.index.is_some() || self.show_index)
|
||||
{
|
||||
let redirect_to = format!("{}/", req.path());
|
||||
|
||||
return Box::pin(ok(req.into_response(
|
||||
HttpResponse::Found()
|
||||
.insert_header((header::LOCATION, redirect_to))
|
||||
.finish(),
|
||||
)));
|
||||
}
|
||||
|
||||
let serve_named_file = |req: ServiceRequest, mut named_file: NamedFile| {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Box::pin(ok(ServiceResponse::new(req, res)))
|
||||
};
|
||||
|
||||
let show_index = |req: ServiceRequest| {
|
||||
let dir = Directory::new(self.directory.clone(), path.clone());
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let x = (self.renderer)(&dir, &req);
|
||||
|
||||
Box::pin(match x {
|
||||
Ok(resp) => ok(resp),
|
||||
Err(err) => ok(ServiceResponse::from_err(err, req)),
|
||||
})
|
||||
};
|
||||
|
||||
match self.index {
|
||||
Some(ref index) => match NamedFile::open(path.join(index)) {
|
||||
Ok(named_file) => serve_named_file(req, named_file),
|
||||
Err(_) if self.show_index => show_index(req),
|
||||
Err(err) => self.handle_err(err, req),
|
||||
},
|
||||
None if self.show_index => show_index(req),
|
||||
_ => Box::pin(ok(ServiceResponse::from_err(
|
||||
FilesError::IsDirectory,
|
||||
req.into_parts().0,
|
||||
))),
|
||||
}
|
||||
} else {
|
||||
match NamedFile::open(path) {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
match this.index {
|
||||
Some(ref index) => {
|
||||
let named_path = path.join(index);
|
||||
match NamedFile::open_async(named_path).await {
|
||||
Ok(named_file) => Ok(this.serve_named_file(req, named_file)),
|
||||
Err(_) if this.show_index => Ok(this.show_index(req, path)),
|
||||
Err(err) => this.handle_err(err, req).await,
|
||||
}
|
||||
}
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Box::pin(ok(ServiceResponse::new(req, res)))
|
||||
None if this.show_index => Ok(this.show_index(req, path)),
|
||||
_ => Ok(ServiceResponse::from_err(
|
||||
FilesError::IsDirectory,
|
||||
req.into_parts().0,
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
match NamedFile::open_async(&path).await {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = this.mime_override {
|
||||
let new_disposition =
|
||||
mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = this.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Ok(ServiceResponse::new(req, res))
|
||||
}
|
||||
Err(err) => this.handle_err(err, req).await,
|
||||
}
|
||||
Err(err) => self.handle_err(err, req),
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use actix_web::{
|
||||
App,
|
||||
};
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_utf8_file_contents() {
|
||||
// use default ISO-8859-1 encoding
|
||||
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
|
||||
|
@ -7,7 +7,7 @@ use actix_web::{
|
||||
};
|
||||
use bytes::Bytes;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_guard_filter() {
|
||||
let srv = test::init_service(
|
||||
App::new()
|
||||
|
27
actix-files/tests/traversal.rs
Normal file
27
actix-files/tests/traversal.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use actix_files::Files;
|
||||
use actix_web::{
|
||||
http::StatusCode,
|
||||
test::{self, TestRequest},
|
||||
App,
|
||||
};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_directory_traversal_prevention() {
|
||||
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
|
||||
|
||||
let req =
|
||||
TestRequest::with_uri("/../../../../../../../../../../../etc/passwd").to_request();
|
||||
let res = test::call_service(&srv, req).await;
|
||||
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||
|
||||
let req = TestRequest::with_uri(
|
||||
"/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/etc/passwd",
|
||||
)
|
||||
.to_request();
|
||||
let res = test::call_service(&srv, req).await;
|
||||
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||
|
||||
let req = TestRequest::with_uri("/%00/etc/passwd%00").to_request();
|
||||
let res = test::call_service(&srv, req).await;
|
||||
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||
}
|
@ -3,6 +3,26 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-11-30
|
||||
* Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
||||
|
||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
||||
|
||||
|
||||
## 3.0.0-beta.7 - 2021-11-22
|
||||
* Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
|
||||
|
||||
[#2408]: https://github.com/actix/actix-web/pull/2408
|
||||
|
||||
|
||||
## 3.0.0-beta.6 - 2021-11-15
|
||||
* `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
||||
* Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||
|
||||
|
||||
## 3.0.0-beta.5 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-http-test"
|
||||
version = "3.0.0-beta.5"
|
||||
version = "3.0.0-beta.8"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Various helpers for Actix applications to use during testing"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
@ -30,26 +30,26 @@ openssl = ["tls-openssl", "awc/openssl"]
|
||||
|
||||
[dependencies]
|
||||
actix-service = "2.0.0"
|
||||
actix-codec = "0.4.0"
|
||||
actix-tls = "3.0.0-beta.5"
|
||||
actix-codec = "0.4.1"
|
||||
actix-tls = "3.0.0-rc.1"
|
||||
actix-utils = "3.0.0"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
awc = { version = "3.0.0-beta.8", default-features = false }
|
||||
actix-server = "2.0.0-beta.9"
|
||||
awc = { version = "3.0.0-beta.11", default-features = false }
|
||||
|
||||
base64 = "0.13"
|
||||
bytes = "1"
|
||||
futures-core = { version = "0.3.7", default-features = false }
|
||||
http = "0.2.2"
|
||||
http = "0.2.5"
|
||||
log = "0.4"
|
||||
socket2 = "0.4"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
slab = "0.4"
|
||||
serde_urlencoded = "0.7"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
||||
tokio = { version = "1.2", features = ["sync"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-web = { version = "4.0.0-beta.11", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.14"
|
||||
|
@ -3,15 +3,15 @@
|
||||
> Various helpers for Actix applications to use during testing.
|
||||
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.8)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br>
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.8)
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-http-test)
|
||||
- Minimum Supported Rust Version (MSRV): 1.51.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -7,8 +7,7 @@
|
||||
#[cfg(feature = "openssl")]
|
||||
extern crate tls_openssl as openssl;
|
||||
|
||||
use std::sync::mpsc;
|
||||
use std::{net, thread, time};
|
||||
use std::{net, thread, time::Duration};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use actix_rt::{net::TcpStream, System};
|
||||
@ -20,29 +19,28 @@ use bytes::Bytes;
|
||||
use futures_core::stream::Stream;
|
||||
use http::Method;
|
||||
use socket2::{Domain, Protocol, Socket, Type};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
/// Start test server
|
||||
/// Start test server.
|
||||
///
|
||||
/// `TestServer` is very simple test server that simplify process of writing
|
||||
/// integration tests cases for actix web applications.
|
||||
/// `TestServer` is very simple test server that simplify process of writing integration tests cases
|
||||
/// for HTTP applications.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// ```no_run
|
||||
/// use actix_http::HttpService;
|
||||
/// use actix_http_test::TestServer;
|
||||
/// use actix_http_test::test_server;
|
||||
/// use actix_web::{web, App, HttpResponse, Error};
|
||||
///
|
||||
/// async fn my_handler() -> Result<HttpResponse, Error> {
|
||||
/// Ok(HttpResponse::Ok().into())
|
||||
/// }
|
||||
///
|
||||
/// #[actix_rt::test]
|
||||
/// #[actix_web::test]
|
||||
/// async fn test_example() {
|
||||
/// let mut srv = TestServer::start(
|
||||
/// || HttpService::new(
|
||||
/// App::new().service(
|
||||
/// web::resource("/").to(my_handler))
|
||||
/// let mut srv = TestServer::start(||
|
||||
/// HttpService::new(
|
||||
/// App::new().service(web::resource("/").to(my_handler))
|
||||
/// )
|
||||
/// );
|
||||
///
|
||||
@ -56,72 +54,86 @@ pub async fn test_server<F: ServiceFactory<TcpStream>>(factory: F) -> TestServer
|
||||
test_server_with_addr(tcp, factory).await
|
||||
}
|
||||
|
||||
/// Start [`test server`](test_server()) on a concrete Address
|
||||
/// Start [`test server`](test_server()) on an existing address binding.
|
||||
pub async fn test_server_with_addr<F: ServiceFactory<TcpStream>>(
|
||||
tcp: net::TcpListener,
|
||||
factory: F,
|
||||
) -> TestServer {
|
||||
let (tx, rx) = mpsc::channel();
|
||||
let (started_tx, started_rx) = std::sync::mpsc::channel();
|
||||
let (thread_stop_tx, thread_stop_rx) = mpsc::channel(1);
|
||||
|
||||
// run server in separate thread
|
||||
thread::spawn(move || {
|
||||
let sys = System::new();
|
||||
let local_addr = tcp.local_addr().unwrap();
|
||||
System::new().block_on(async move {
|
||||
let local_addr = tcp.local_addr().unwrap();
|
||||
|
||||
let srv = Server::build()
|
||||
.listen("test", tcp, factory)?
|
||||
.workers(1)
|
||||
.disable_signals();
|
||||
let srv = Server::build()
|
||||
.workers(1)
|
||||
.disable_signals()
|
||||
.system_exit()
|
||||
.listen("test", tcp, factory)
|
||||
.expect("test server could not be created");
|
||||
|
||||
sys.block_on(async {
|
||||
srv.run();
|
||||
tx.send((System::current(), local_addr)).unwrap();
|
||||
let srv = srv.run();
|
||||
started_tx
|
||||
.send((System::current(), srv.handle(), local_addr))
|
||||
.unwrap();
|
||||
|
||||
// drive server loop
|
||||
srv.await.unwrap();
|
||||
});
|
||||
|
||||
sys.run()
|
||||
// notify TestServer that server and system have shut down
|
||||
// all thread managed resources should be dropped at this point
|
||||
let _ = thread_stop_tx.send(());
|
||||
});
|
||||
|
||||
let (system, addr) = rx.recv().unwrap();
|
||||
let (system, server, addr) = started_rx.recv().unwrap();
|
||||
|
||||
let client = {
|
||||
#[cfg(feature = "openssl")]
|
||||
let connector = {
|
||||
#[cfg(feature = "openssl")]
|
||||
{
|
||||
use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
|
||||
use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
|
||||
|
||||
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
|
||||
builder.set_verify(SslVerifyMode::NONE);
|
||||
let _ = builder
|
||||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
.ssl(builder.build())
|
||||
}
|
||||
#[cfg(not(feature = "openssl"))]
|
||||
{
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
}
|
||||
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
|
||||
|
||||
builder.set_verify(SslVerifyMode::NONE);
|
||||
let _ = builder
|
||||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
||||
|
||||
Connector::new()
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
.ssl(builder.build())
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "openssl"))]
|
||||
let connector = {
|
||||
Connector::new()
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
};
|
||||
|
||||
Client::builder().connector(connector).finish()
|
||||
};
|
||||
|
||||
TestServer {
|
||||
addr,
|
||||
server,
|
||||
client,
|
||||
system,
|
||||
addr,
|
||||
thread_stop_rx,
|
||||
}
|
||||
}
|
||||
|
||||
/// Test server controller
|
||||
pub struct TestServer {
|
||||
server: actix_server::ServerHandle,
|
||||
client: awc::Client,
|
||||
system: actix_rt::System,
|
||||
addr: net::SocketAddr,
|
||||
client: Client,
|
||||
system: System,
|
||||
thread_stop_rx: mpsc::Receiver<()>,
|
||||
}
|
||||
|
||||
impl TestServer {
|
||||
@ -258,15 +270,32 @@ impl TestServer {
|
||||
self.client.headers()
|
||||
}
|
||||
|
||||
/// Stop HTTP server
|
||||
fn stop(&mut self) {
|
||||
/// Stop HTTP server.
|
||||
///
|
||||
/// Waits for spawned `Server` and `System` to (force) shutdown.
|
||||
pub async fn stop(&mut self) {
|
||||
// signal server to stop
|
||||
self.server.stop(false).await;
|
||||
|
||||
// also signal system to stop
|
||||
// though this is handled by `ServerBuilder::exit_system` too
|
||||
self.system.stop();
|
||||
|
||||
// wait for thread to be stopped but don't care about result
|
||||
let _ = self.thread_stop_rx.recv().await;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.stop()
|
||||
// calls in this Drop impl should be enough to shut down the server, system, and thread
|
||||
// without needing to await anything
|
||||
|
||||
// signal server to stop
|
||||
let _ = self.server.stop(true);
|
||||
|
||||
// signal system to stop
|
||||
self.system.stop();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,6 +3,64 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.14 - 2021-11-30
|
||||
### Changed
|
||||
* Guarantee ordering of `header::GetAll` iterator to be same as insertion order. [#2467]
|
||||
* Expose `header::map` module. [#2467]
|
||||
* Implement `ExactSizeIterator` and `FusedIterator` for all `HeaderMap` iterators. [#2470]
|
||||
* Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
||||
|
||||
[#2467]: https://github.com/actix/actix-web/pull/2467
|
||||
[#2470]: https://github.com/actix/actix-web/pull/2470
|
||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
||||
|
||||
|
||||
## 3.0.0-beta.13 - 2021-11-22
|
||||
### Added
|
||||
* `body::AnyBody::empty` for quickly creating an empty body. [#2446]
|
||||
* `body::AnyBody::none` for quickly creating a "none" body. [#2456]
|
||||
* `impl Clone` for `body::AnyBody<S> where S: Clone`. [#2448]
|
||||
* `body::AnyBody::into_boxed` for quickly converting to a type-erased, boxed body type. [#2448]
|
||||
|
||||
### Changed
|
||||
* Rename `body::AnyBody::{Message => Body}`. [#2446]
|
||||
* Rename `body::AnyBody::{from_message => new_boxed}`. [#2448]
|
||||
* Rename `body::AnyBody::{from_slice => copy_from_slice}`. [#2448]
|
||||
* Rename `body::{BoxAnyBody => BoxBody}`. [#2448]
|
||||
* Change representation of `AnyBody` to include a type parameter in `Body` variant. Defaults to `BoxBody`. [#2448]
|
||||
* `Encoder::response` now returns `AnyBody<Encoder<B>>`. [#2448]
|
||||
|
||||
### Removed
|
||||
* `body::AnyBody::Empty`; an empty body can now only be represented as a zero-length `Bytes` variant. [#2446]
|
||||
* `body::BodySize::Empty`; an empty body can now only be represented as a `Sized(0)` variant. [#2446]
|
||||
* `EncoderError::Boxed`; it is no longer required. [#2446]
|
||||
* `body::ResponseBody`; is function is replaced by the new `body::AnyBody` enum. [#2446]
|
||||
|
||||
[#2446]: https://github.com/actix/actix-web/pull/2446
|
||||
[#2448]: https://github.com/actix/actix-web/pull/2448
|
||||
[#2456]: https://github.com/actix/actix-web/pull/2456
|
||||
|
||||
|
||||
## 3.0.0-beta.12 - 2021-11-15
|
||||
### Changed
|
||||
* Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
||||
|
||||
### Removed
|
||||
* `client` module. [#2425]
|
||||
* `trust-dns` feature. [#2425]
|
||||
|
||||
[#2425]: https://github.com/actix/actix-web/pull/2425
|
||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||
|
||||
|
||||
## 3.0.0-beta.11 - 2021-10-20
|
||||
### Changed
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 3.0.0-beta.10 - 2021-09-09
|
||||
### Changed
|
||||
* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
|
||||
|
@ -1,14 +1,17 @@
|
||||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.0.0-beta.10"
|
||||
version = "3.0.0-beta.14"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "HTTP primitives for the Actix ecosystem"
|
||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -24,29 +27,25 @@ path = "src/lib.rs"
|
||||
default = []
|
||||
|
||||
# openssl
|
||||
openssl = ["actix-tls/openssl"]
|
||||
openssl = ["actix-tls/accept", "actix-tls/openssl"]
|
||||
|
||||
# rustls support
|
||||
rustls = ["actix-tls/rustls"]
|
||||
rustls = ["actix-tls/accept", "actix-tls/rustls"]
|
||||
|
||||
# enable compression support
|
||||
compress-brotli = ["brotli2", "__compress"]
|
||||
compress-gzip = ["flate2", "__compress"]
|
||||
compress-zstd = ["zstd", "__compress"]
|
||||
|
||||
# trust-dns as client dns resolver
|
||||
trust-dns = ["trust-dns-resolver"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-service = "2.0.0"
|
||||
actix-codec = "0.4.0"
|
||||
actix-codec = "0.4.1"
|
||||
actix-utils = "3.0.0"
|
||||
actix-rt = "2.2"
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["accept", "connect"] }
|
||||
|
||||
ahash = "0.7"
|
||||
base64 = "0.13"
|
||||
@ -58,45 +57,45 @@ encoding_rs = "0.8"
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] }
|
||||
h2 = "0.3.1"
|
||||
http = "0.2.2"
|
||||
http = "0.2.5"
|
||||
httparse = "1.5.1"
|
||||
httpdate = "1.0.1"
|
||||
itoa = "0.4"
|
||||
language-tags = "0.3"
|
||||
local-channel = "0.1"
|
||||
once_cell = "1.5"
|
||||
log = "0.4"
|
||||
mime = "0.3"
|
||||
percent-encoding = "2.1"
|
||||
pin-project = "1.0.0"
|
||||
pin-project-lite = "0.2"
|
||||
rand = "0.8"
|
||||
regex = "1.3"
|
||||
serde = "1.0"
|
||||
sha-1 = "0.9"
|
||||
smallvec = "1.6.1"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
tokio = { version = "1.2", features = ["sync"] }
|
||||
|
||||
# tls
|
||||
actix-tls = { version = "3.0.0-rc.1", default-features = false, optional = true }
|
||||
|
||||
# compression
|
||||
brotli2 = { version="0.3.2", optional = true }
|
||||
flate2 = { version = "1.0.13", optional = true }
|
||||
zstd = { version = "0.7", optional = true }
|
||||
|
||||
trust-dns-resolver = { version = "0.20.0", optional = true }
|
||||
zstd = { version = "0.9", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-server = "2.0.0-beta.9"
|
||||
actix-http-test = { version = "3.0.0-beta.7", features = ["openssl"] }
|
||||
actix-tls = { version = "3.0.0-rc.1", features = ["openssl"] }
|
||||
async-stream = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
env_logger = "0.8"
|
||||
env_logger = "0.9"
|
||||
rcgen = "0.8"
|
||||
regex = "1.3"
|
||||
rustls-pemfile = "0.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tls-openssl = { version = "0.10", package = "openssl" }
|
||||
tls-rustls = { version = "0.19", package = "rustls" }
|
||||
webpki = { version = "0.21.0" }
|
||||
static_assertions = "1"
|
||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
||||
tokio = { version = "1.2", features = ["net", "rt"] }
|
||||
|
||||
[[example]]
|
||||
name = "ws"
|
||||
|
@ -3,18 +3,18 @@
|
||||
> HTTP primitives for the Actix ecosystem.
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.10)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.14)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.10)
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.14)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-http)
|
||||
- Minimum Supported Rust Version (MSRV): 1.51.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
use std::io;
|
||||
|
||||
use actix_http::{body::Body, http::HeaderValue, http::StatusCode};
|
||||
use actix_http::{body::AnyBody, http::HeaderValue, http::StatusCode};
|
||||
use actix_http::{Error, HttpService, Request, Response};
|
||||
use actix_server::Server;
|
||||
use bytes::BytesMut;
|
||||
use futures_util::StreamExt as _;
|
||||
|
||||
async fn handle_request(mut req: Request) -> Result<Response<Body>, Error> {
|
||||
async fn handle_request(mut req: Request) -> Result<Response<AnyBody>, Error> {
|
||||
let mut body = BytesMut::new();
|
||||
while let Some(item) = req.payload().next().await {
|
||||
body.extend_from_slice(&item?)
|
||||
|
@ -85,22 +85,31 @@ impl Stream for Heartbeat {
|
||||
fn tls_config() -> rustls::ServerConfig {
|
||||
use std::io::BufReader;
|
||||
|
||||
use rustls::{
|
||||
internal::pemfile::{certs, pkcs8_private_keys},
|
||||
NoClientAuth, ServerConfig,
|
||||
};
|
||||
use rustls::{Certificate, PrivateKey};
|
||||
use rustls_pemfile::{certs, pkcs8_private_keys};
|
||||
|
||||
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
|
||||
let cert_file = cert.serialize_pem().unwrap();
|
||||
let key_file = cert.serialize_private_key_pem();
|
||||
|
||||
let mut config = ServerConfig::new(NoClientAuth::new());
|
||||
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
||||
let key_file = &mut BufReader::new(key_file.as_bytes());
|
||||
|
||||
let cert_chain = certs(cert_file).unwrap();
|
||||
let cert_chain = certs(cert_file)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(Certificate)
|
||||
.collect();
|
||||
let mut keys = pkcs8_private_keys(key_file).unwrap();
|
||||
config.set_single_cert(cert_chain, keys.remove(0)).unwrap();
|
||||
|
||||
let mut config = rustls::ServerConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_no_client_auth()
|
||||
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
|
||||
.unwrap();
|
||||
|
||||
config.alpn_protocols.push(b"http/1.1".to_vec());
|
||||
config.alpn_protocols.push(b"h2".to_vec());
|
||||
|
||||
config
|
||||
}
|
||||
|
@ -8,53 +8,94 @@ use std::{
|
||||
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures_core::Stream;
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
use super::{BodySize, BodyStream, MessageBody, MessageBodyMapErr, SizedStream};
|
||||
|
||||
#[deprecated(since = "4.0.0", note = "Renamed to `AnyBody`.")]
|
||||
pub type Body = AnyBody;
|
||||
|
||||
/// Represents various types of HTTP message body.
|
||||
pub enum AnyBody {
|
||||
#[pin_project(project = AnyBodyProj)]
|
||||
#[derive(Clone)]
|
||||
pub enum AnyBody<B = BoxBody> {
|
||||
/// Empty response. `Content-Length` header is not set.
|
||||
None,
|
||||
|
||||
/// Zero sized response body. `Content-Length` header is set to `0`.
|
||||
Empty,
|
||||
|
||||
/// Specific response body.
|
||||
/// Complete, in-memory response body.
|
||||
Bytes(Bytes),
|
||||
|
||||
/// Generic message body.
|
||||
Message(BoxAnyBody),
|
||||
/// Generic / Other message body.
|
||||
Body(#[pin] B),
|
||||
}
|
||||
|
||||
impl AnyBody {
|
||||
/// Create body from slice (copy)
|
||||
pub fn from_slice(s: &[u8]) -> Self {
|
||||
Self::Bytes(Bytes::copy_from_slice(s))
|
||||
/// Constructs a "body" representing an empty response.
|
||||
pub fn none() -> Self {
|
||||
Self::None
|
||||
}
|
||||
|
||||
/// Create body from generic message body.
|
||||
pub fn from_message<B>(body: B) -> Self
|
||||
/// Constructs a new, 0-length body.
|
||||
pub fn empty() -> Self {
|
||||
Self::Bytes(Bytes::new())
|
||||
}
|
||||
|
||||
/// Create boxed body from generic message body.
|
||||
pub fn new_boxed<B>(body: B) -> Self
|
||||
where
|
||||
B: MessageBody + 'static,
|
||||
B::Error: Into<Box<dyn StdError + 'static>>,
|
||||
{
|
||||
Self::Message(BoxAnyBody::from_body(body))
|
||||
Self::Body(BoxBody::from_body(body))
|
||||
}
|
||||
|
||||
/// Constructs new `AnyBody` instance from a slice of bytes by copying it.
|
||||
///
|
||||
/// If your bytes container is owned, it may be cheaper to use a `From` impl.
|
||||
pub fn copy_from_slice(s: &[u8]) -> Self {
|
||||
Self::Bytes(Bytes::copy_from_slice(s))
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[deprecated(since = "4.0.0", note = "Renamed to `copy_from_slice`.")]
|
||||
pub fn from_slice(s: &[u8]) -> Self {
|
||||
Self::Bytes(Bytes::copy_from_slice(s))
|
||||
}
|
||||
}
|
||||
|
||||
impl MessageBody for AnyBody {
|
||||
impl<B> AnyBody<B>
|
||||
where
|
||||
B: MessageBody + 'static,
|
||||
B::Error: Into<Box<dyn StdError + 'static>>,
|
||||
{
|
||||
/// Create body from generic message body.
|
||||
pub fn new(body: B) -> Self {
|
||||
Self::Body(body)
|
||||
}
|
||||
|
||||
pub fn into_boxed(self) -> AnyBody {
|
||||
match self {
|
||||
Self::None => AnyBody::None,
|
||||
Self::Bytes(bytes) => AnyBody::Bytes(bytes),
|
||||
Self::Body(body) => AnyBody::new_boxed(body),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> MessageBody for AnyBody<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Box<dyn StdError>> + 'static,
|
||||
{
|
||||
type Error = Error;
|
||||
|
||||
fn size(&self) -> BodySize {
|
||||
match self {
|
||||
AnyBody::None => BodySize::None,
|
||||
AnyBody::Empty => BodySize::Empty,
|
||||
AnyBody::Bytes(ref bin) => BodySize::Sized(bin.len() as u64),
|
||||
AnyBody::Message(ref body) => body.size(),
|
||||
AnyBody::Body(ref body) => body.size(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -62,10 +103,9 @@ impl MessageBody for AnyBody {
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||
match self.get_mut() {
|
||||
AnyBody::None => Poll::Ready(None),
|
||||
AnyBody::Empty => Poll::Ready(None),
|
||||
AnyBody::Bytes(ref mut bin) => {
|
||||
match self.project() {
|
||||
AnyBodyProj::None => Poll::Ready(None),
|
||||
AnyBodyProj::Bytes(bin) => {
|
||||
let len = bin.len();
|
||||
if len == 0 {
|
||||
Poll::Ready(None)
|
||||
@ -74,8 +114,7 @@ impl MessageBody for AnyBody {
|
||||
}
|
||||
}
|
||||
|
||||
AnyBody::Message(body) => body
|
||||
.as_pin_mut()
|
||||
AnyBodyProj::Body(body) => body
|
||||
.poll_next(cx)
|
||||
.map_err(|err| Error::new_body().with_cause(err)),
|
||||
}
|
||||
@ -83,80 +122,88 @@ impl MessageBody for AnyBody {
|
||||
}
|
||||
|
||||
impl PartialEq for AnyBody {
|
||||
fn eq(&self, other: &Body) -> bool {
|
||||
fn eq(&self, other: &AnyBody) -> bool {
|
||||
match *self {
|
||||
AnyBody::None => matches!(*other, AnyBody::None),
|
||||
AnyBody::Empty => matches!(*other, AnyBody::Empty),
|
||||
AnyBody::Bytes(ref b) => match *other {
|
||||
AnyBody::Bytes(ref b2) => b == b2,
|
||||
_ => false,
|
||||
},
|
||||
AnyBody::Message(_) => false,
|
||||
AnyBody::Body(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for AnyBody {
|
||||
impl<S: fmt::Debug> fmt::Debug for AnyBody<S> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match *self {
|
||||
AnyBody::None => write!(f, "AnyBody::None"),
|
||||
AnyBody::Empty => write!(f, "AnyBody::Empty"),
|
||||
AnyBody::Bytes(ref b) => write!(f, "AnyBody::Bytes({:?})", b),
|
||||
AnyBody::Message(_) => write!(f, "AnyBody::Message(_)"),
|
||||
AnyBody::Bytes(ref bytes) => write!(f, "AnyBody::Bytes({:?})", bytes),
|
||||
AnyBody::Body(ref stream) => write!(f, "AnyBody::Message({:?})", stream),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'static str> for AnyBody {
|
||||
fn from(s: &'static str) -> Body {
|
||||
AnyBody::Bytes(Bytes::from_static(s.as_ref()))
|
||||
impl<B> From<&'static str> for AnyBody<B> {
|
||||
fn from(string: &'static str) -> Self {
|
||||
Self::Bytes(Bytes::from_static(string.as_ref()))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'static [u8]> for AnyBody {
|
||||
fn from(s: &'static [u8]) -> Body {
|
||||
AnyBody::Bytes(Bytes::from_static(s))
|
||||
impl<B> From<&'static [u8]> for AnyBody<B> {
|
||||
fn from(bytes: &'static [u8]) -> Self {
|
||||
Self::Bytes(Bytes::from_static(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<u8>> for AnyBody {
|
||||
fn from(vec: Vec<u8>) -> Body {
|
||||
AnyBody::Bytes(Bytes::from(vec))
|
||||
impl<B> From<Vec<u8>> for AnyBody<B> {
|
||||
fn from(vec: Vec<u8>) -> Self {
|
||||
Self::Bytes(Bytes::from(vec))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for AnyBody {
|
||||
fn from(s: String) -> Body {
|
||||
s.into_bytes().into()
|
||||
impl<B> From<String> for AnyBody<B> {
|
||||
fn from(string: String) -> Self {
|
||||
Self::Bytes(Bytes::from(string))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'_ String> for AnyBody {
|
||||
fn from(s: &String) -> Body {
|
||||
AnyBody::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(&s)))
|
||||
impl<B> From<&'_ String> for AnyBody<B> {
|
||||
fn from(string: &String) -> Self {
|
||||
Self::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(&string)))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Cow<'_, str>> for AnyBody {
|
||||
fn from(s: Cow<'_, str>) -> Body {
|
||||
match s {
|
||||
Cow::Owned(s) => AnyBody::from(s),
|
||||
impl<B> From<Cow<'_, str>> for AnyBody<B> {
|
||||
fn from(string: Cow<'_, str>) -> Self {
|
||||
match string {
|
||||
Cow::Owned(s) => Self::from(s),
|
||||
Cow::Borrowed(s) => {
|
||||
AnyBody::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(s)))
|
||||
Self::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(s)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Bytes> for AnyBody {
|
||||
fn from(s: Bytes) -> Body {
|
||||
AnyBody::Bytes(s)
|
||||
impl<B> From<Bytes> for AnyBody<B> {
|
||||
fn from(bytes: Bytes) -> Self {
|
||||
Self::Bytes(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BytesMut> for AnyBody {
|
||||
fn from(s: BytesMut) -> Body {
|
||||
AnyBody::Bytes(s.freeze())
|
||||
impl<B> From<BytesMut> for AnyBody<B> {
|
||||
fn from(bytes: BytesMut) -> Self {
|
||||
Self::Bytes(bytes.freeze())
|
||||
}
|
||||
}
|
||||
|
||||
impl<S, E> From<SizedStream<S>> for AnyBody<SizedStream<S>>
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, E>> + 'static,
|
||||
E: Into<Box<dyn StdError>> + 'static,
|
||||
{
|
||||
fn from(stream: SizedStream<S>) -> Self {
|
||||
AnyBody::new(stream)
|
||||
}
|
||||
}
|
||||
|
||||
@ -165,8 +212,18 @@ where
|
||||
S: Stream<Item = Result<Bytes, E>> + 'static,
|
||||
E: Into<Box<dyn StdError>> + 'static,
|
||||
{
|
||||
fn from(s: SizedStream<S>) -> Body {
|
||||
AnyBody::from_message(s)
|
||||
fn from(stream: SizedStream<S>) -> Self {
|
||||
AnyBody::new_boxed(stream)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S, E> From<BodyStream<S>> for AnyBody<BodyStream<S>>
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, E>> + 'static,
|
||||
E: Into<Box<dyn StdError>> + 'static,
|
||||
{
|
||||
fn from(stream: BodyStream<S>) -> Self {
|
||||
AnyBody::new(stream)
|
||||
}
|
||||
}
|
||||
|
||||
@ -175,15 +232,15 @@ where
|
||||
S: Stream<Item = Result<Bytes, E>> + 'static,
|
||||
E: Into<Box<dyn StdError>> + 'static,
|
||||
{
|
||||
fn from(s: BodyStream<S>) -> Body {
|
||||
AnyBody::from_message(s)
|
||||
fn from(stream: BodyStream<S>) -> Self {
|
||||
AnyBody::new_boxed(stream)
|
||||
}
|
||||
}
|
||||
|
||||
/// A boxed message body with boxed errors.
|
||||
pub struct BoxAnyBody(Pin<Box<dyn MessageBody<Error = Box<dyn StdError + 'static>>>>);
|
||||
pub struct BoxBody(Pin<Box<dyn MessageBody<Error = Box<dyn StdError>>>>);
|
||||
|
||||
impl BoxAnyBody {
|
||||
impl BoxBody {
|
||||
/// Boxes a `MessageBody` and any errors it generates.
|
||||
pub fn from_body<B>(body: B) -> Self
|
||||
where
|
||||
@ -197,18 +254,18 @@ impl BoxAnyBody {
|
||||
/// Returns a mutable pinned reference to the inner message body type.
|
||||
pub fn as_pin_mut(
|
||||
&mut self,
|
||||
) -> Pin<&mut (dyn MessageBody<Error = Box<dyn StdError + 'static>>)> {
|
||||
) -> Pin<&mut (dyn MessageBody<Error = Box<dyn StdError>>)> {
|
||||
self.0.as_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for BoxAnyBody {
|
||||
impl fmt::Debug for BoxBody {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("BoxAnyBody(dyn MessageBody)")
|
||||
}
|
||||
}
|
||||
|
||||
impl MessageBody for BoxAnyBody {
|
||||
impl MessageBody for BoxBody {
|
||||
type Error = Error;
|
||||
|
||||
fn size(&self) -> BodySize {
|
||||
@ -225,3 +282,52 @@ impl MessageBody for BoxAnyBody {
|
||||
.map_err(|err| Error::new_body().with_cause(err))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::marker::PhantomPinned;
|
||||
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
||||
|
||||
use super::*;
|
||||
use crate::body::to_bytes;
|
||||
|
||||
struct PinType(PhantomPinned);
|
||||
|
||||
impl MessageBody for PinType {
|
||||
type Error = crate::Error;
|
||||
|
||||
fn size(&self) -> BodySize {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn poll_next(
|
||||
self: Pin<&mut Self>,
|
||||
_cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
assert_impl_all!(AnyBody<()>: MessageBody, fmt::Debug, Send, Sync, Unpin);
|
||||
assert_impl_all!(AnyBody<AnyBody<()>>: MessageBody, fmt::Debug, Send, Sync, Unpin);
|
||||
assert_impl_all!(AnyBody<Bytes>: MessageBody, fmt::Debug, Send, Sync, Unpin);
|
||||
assert_impl_all!(AnyBody: MessageBody, fmt::Debug, Unpin);
|
||||
assert_impl_all!(BoxBody: MessageBody, fmt::Debug, Unpin);
|
||||
assert_impl_all!(AnyBody<PinType>: MessageBody);
|
||||
|
||||
assert_not_impl_all!(AnyBody: Send, Sync, Unpin);
|
||||
assert_not_impl_all!(BoxBody: Send, Sync, Unpin);
|
||||
assert_not_impl_all!(AnyBody<PinType>: Send, Sync, Unpin);
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_boxed_body() {
|
||||
let body = AnyBody::copy_from_slice(&[1, 2, 3]);
|
||||
let boxed_body = BoxBody::from_body(BoxBody::from_body(body));
|
||||
|
||||
assert_eq!(
|
||||
to_bytes(boxed_body).await.unwrap(),
|
||||
Bytes::from(vec![1, 2, 3]),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -75,10 +75,22 @@ mod tests {
|
||||
use derive_more::{Display, Error};
|
||||
use futures_core::ready;
|
||||
use futures_util::{stream, FutureExt as _};
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
||||
|
||||
use super::*;
|
||||
use crate::body::to_bytes;
|
||||
|
||||
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, &'static str>>>: MessageBody);
|
||||
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, &'static str>>>: MessageBody);
|
||||
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
||||
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
||||
|
||||
assert_not_impl_all!(BodyStream<stream::Empty<Bytes>>: MessageBody);
|
||||
assert_not_impl_all!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
|
||||
// crate::Error is not Clone
|
||||
assert_not_impl_all!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn skips_empty_chunks() {
|
||||
let body = BodyStream::new(stream::iter(
|
||||
@ -124,6 +136,30 @@ mod tests {
|
||||
assert!(matches!(to_bytes(body).await, Err(StreamErr)));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stream_string_error() {
|
||||
// `&'static str` does not impl `Error`
|
||||
// but it does impl `Into<Box<dyn Error>>`
|
||||
|
||||
let body = BodyStream::new(stream::once(async { Err("stringy error") }));
|
||||
assert!(matches!(to_bytes(body).await, Err("stringy error")));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stream_boxed_error() {
|
||||
// `Box<dyn Error>` does not impl `Error`
|
||||
// but it does impl `Into<Box<dyn Error>>`
|
||||
|
||||
let body = BodyStream::new(stream::once(async {
|
||||
Err(Box::<dyn StdError>::from("stringy error"))
|
||||
}));
|
||||
|
||||
assert_eq!(
|
||||
to_bytes(body).await.unwrap_err().to_string(),
|
||||
"stringy error"
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stream_delayed_error() {
|
||||
let body =
|
||||
|
@ -31,7 +31,7 @@ impl MessageBody for () {
|
||||
type Error = Infallible;
|
||||
|
||||
fn size(&self) -> BodySize {
|
||||
BodySize::Empty
|
||||
BodySize::Sized(0)
|
||||
}
|
||||
|
||||
fn poll_next(
|
||||
|
@ -11,15 +11,14 @@ use futures_core::ready;
|
||||
mod body;
|
||||
mod body_stream;
|
||||
mod message_body;
|
||||
mod response_body;
|
||||
mod size;
|
||||
mod sized_stream;
|
||||
|
||||
pub use self::body::{AnyBody, Body, BoxAnyBody};
|
||||
#[allow(deprecated)]
|
||||
pub use self::body::{AnyBody, Body, BoxBody};
|
||||
pub use self::body_stream::BodyStream;
|
||||
pub use self::message_body::MessageBody;
|
||||
pub(crate) use self::message_body::MessageBodyMapErr;
|
||||
pub use self::response_body::ResponseBody;
|
||||
pub use self::size::BodySize;
|
||||
pub use self::sized_stream::SizedStream;
|
||||
|
||||
@ -29,23 +28,24 @@ pub use self::sized_stream::SizedStream;
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use actix_http::body::{Body, to_bytes};
|
||||
/// use actix_http::body::{AnyBody, to_bytes};
|
||||
/// use bytes::Bytes;
|
||||
///
|
||||
/// # async fn test_to_bytes() {
|
||||
/// let body = Body::Empty;
|
||||
/// let body = AnyBody::none();
|
||||
/// let bytes = to_bytes(body).await.unwrap();
|
||||
/// assert!(bytes.is_empty());
|
||||
///
|
||||
/// let body = Body::Bytes(Bytes::from_static(b"123"));
|
||||
/// let body = AnyBody::copy_from_slice(b"123");
|
||||
/// let bytes = to_bytes(body).await.unwrap();
|
||||
/// assert_eq!(bytes, b"123"[..]);
|
||||
/// # }
|
||||
/// ```
|
||||
pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
|
||||
let cap = match body.size() {
|
||||
BodySize::None | BodySize::Empty | BodySize::Sized(0) => return Ok(Bytes::new()),
|
||||
BodySize::None | BodySize::Sized(0) => return Ok(Bytes::new()),
|
||||
BodySize::Sized(size) => size as usize,
|
||||
// good enough first guess for chunk size
|
||||
BodySize::Stream => 32_768,
|
||||
};
|
||||
|
||||
@ -75,22 +75,25 @@ mod tests {
|
||||
use actix_utils::future::poll_fn;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
|
||||
use super::*;
|
||||
use super::{to_bytes, AnyBody as TestAnyBody, BodySize, MessageBody as _};
|
||||
|
||||
impl Body {
|
||||
impl AnyBody {
|
||||
pub(crate) fn get_ref(&self) -> &[u8] {
|
||||
match *self {
|
||||
Body::Bytes(ref bin) => bin,
|
||||
AnyBody::Bytes(ref bin) => bin,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// AnyBody alias because rustc does not (can not?) infer the default type parameter.
|
||||
type AnyBody = TestAnyBody;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_static_str() {
|
||||
assert_eq!(Body::from("").size(), BodySize::Sized(0));
|
||||
assert_eq!(Body::from("test").size(), BodySize::Sized(4));
|
||||
assert_eq!(Body::from("test").get_ref(), b"test");
|
||||
assert_eq!(AnyBody::from("").size(), BodySize::Sized(0));
|
||||
assert_eq!(AnyBody::from("test").size(), BodySize::Sized(4));
|
||||
assert_eq!(AnyBody::from("test").get_ref(), b"test");
|
||||
|
||||
assert_eq!("test".size(), BodySize::Sized(4));
|
||||
assert_eq!(
|
||||
@ -104,13 +107,16 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_static_bytes() {
|
||||
assert_eq!(Body::from(b"test".as_ref()).size(), BodySize::Sized(4));
|
||||
assert_eq!(Body::from(b"test".as_ref()).get_ref(), b"test");
|
||||
assert_eq!(AnyBody::from(b"test".as_ref()).size(), BodySize::Sized(4));
|
||||
assert_eq!(AnyBody::from(b"test".as_ref()).get_ref(), b"test");
|
||||
assert_eq!(
|
||||
Body::from_slice(b"test".as_ref()).size(),
|
||||
AnyBody::copy_from_slice(b"test".as_ref()).size(),
|
||||
BodySize::Sized(4)
|
||||
);
|
||||
assert_eq!(Body::from_slice(b"test".as_ref()).get_ref(), b"test");
|
||||
assert_eq!(
|
||||
AnyBody::copy_from_slice(b"test".as_ref()).get_ref(),
|
||||
b"test"
|
||||
);
|
||||
let sb = Bytes::from(&b"test"[..]);
|
||||
pin!(sb);
|
||||
|
||||
@ -123,8 +129,8 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_vec() {
|
||||
assert_eq!(Body::from(Vec::from("test")).size(), BodySize::Sized(4));
|
||||
assert_eq!(Body::from(Vec::from("test")).get_ref(), b"test");
|
||||
assert_eq!(AnyBody::from(Vec::from("test")).size(), BodySize::Sized(4));
|
||||
assert_eq!(AnyBody::from(Vec::from("test")).get_ref(), b"test");
|
||||
let test_vec = Vec::from("test");
|
||||
pin!(test_vec);
|
||||
|
||||
@ -141,8 +147,8 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_bytes() {
|
||||
let b = Bytes::from("test");
|
||||
assert_eq!(Body::from(b.clone()).size(), BodySize::Sized(4));
|
||||
assert_eq!(Body::from(b.clone()).get_ref(), b"test");
|
||||
assert_eq!(AnyBody::from(b.clone()).size(), BodySize::Sized(4));
|
||||
assert_eq!(AnyBody::from(b.clone()).get_ref(), b"test");
|
||||
pin!(b);
|
||||
|
||||
assert_eq!(b.size(), BodySize::Sized(4));
|
||||
@ -155,8 +161,8 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_bytes_mut() {
|
||||
let b = BytesMut::from("test");
|
||||
assert_eq!(Body::from(b.clone()).size(), BodySize::Sized(4));
|
||||
assert_eq!(Body::from(b.clone()).get_ref(), b"test");
|
||||
assert_eq!(AnyBody::from(b.clone()).size(), BodySize::Sized(4));
|
||||
assert_eq!(AnyBody::from(b.clone()).get_ref(), b"test");
|
||||
pin!(b);
|
||||
|
||||
assert_eq!(b.size(), BodySize::Sized(4));
|
||||
@ -169,10 +175,10 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_string() {
|
||||
let b = "test".to_owned();
|
||||
assert_eq!(Body::from(b.clone()).size(), BodySize::Sized(4));
|
||||
assert_eq!(Body::from(b.clone()).get_ref(), b"test");
|
||||
assert_eq!(Body::from(&b).size(), BodySize::Sized(4));
|
||||
assert_eq!(Body::from(&b).get_ref(), b"test");
|
||||
assert_eq!(AnyBody::from(b.clone()).size(), BodySize::Sized(4));
|
||||
assert_eq!(AnyBody::from(b.clone()).get_ref(), b"test");
|
||||
assert_eq!(AnyBody::from(&b).size(), BodySize::Sized(4));
|
||||
assert_eq!(AnyBody::from(&b).get_ref(), b"test");
|
||||
pin!(b);
|
||||
|
||||
assert_eq!(b.size(), BodySize::Sized(4));
|
||||
@ -184,7 +190,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_unit() {
|
||||
assert_eq!(().size(), BodySize::Empty);
|
||||
assert_eq!(().size(), BodySize::Sized(0));
|
||||
assert!(poll_fn(|cx| Pin::new(&mut ()).poll_next(cx))
|
||||
.await
|
||||
.is_none());
|
||||
@ -194,41 +200,44 @@ mod tests {
|
||||
async fn test_box_and_pin() {
|
||||
let val = Box::new(());
|
||||
pin!(val);
|
||||
assert_eq!(val.size(), BodySize::Empty);
|
||||
assert_eq!(val.size(), BodySize::Sized(0));
|
||||
assert!(poll_fn(|cx| val.as_mut().poll_next(cx)).await.is_none());
|
||||
|
||||
let mut val = Box::pin(());
|
||||
assert_eq!(val.size(), BodySize::Empty);
|
||||
assert_eq!(val.size(), BodySize::Sized(0));
|
||||
assert!(poll_fn(|cx| val.as_mut().poll_next(cx)).await.is_none());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_body_eq() {
|
||||
assert!(
|
||||
Body::Bytes(Bytes::from_static(b"1"))
|
||||
== Body::Bytes(Bytes::from_static(b"1"))
|
||||
AnyBody::Bytes(Bytes::from_static(b"1"))
|
||||
== AnyBody::Bytes(Bytes::from_static(b"1"))
|
||||
);
|
||||
assert!(Body::Bytes(Bytes::from_static(b"1")) != Body::None);
|
||||
assert!(AnyBody::Bytes(Bytes::from_static(b"1")) != AnyBody::None);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_body_debug() {
|
||||
assert!(format!("{:?}", Body::None).contains("Body::None"));
|
||||
assert!(format!("{:?}", Body::Empty).contains("Body::Empty"));
|
||||
assert!(format!("{:?}", Body::Bytes(Bytes::from_static(b"1"))).contains('1'));
|
||||
assert!(format!("{:?}", AnyBody::None).contains("Body::None"));
|
||||
assert!(format!("{:?}", AnyBody::from(Bytes::from_static(b"1"))).contains('1'));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_serde_json() {
|
||||
use serde_json::{json, Value};
|
||||
assert_eq!(
|
||||
Body::from(serde_json::to_vec(&Value::String("test".to_owned())).unwrap())
|
||||
.size(),
|
||||
AnyBody::from(
|
||||
serde_json::to_vec(&Value::String("test".to_owned())).unwrap()
|
||||
)
|
||||
.size(),
|
||||
BodySize::Sized(6)
|
||||
);
|
||||
assert_eq!(
|
||||
Body::from(serde_json::to_vec(&json!({"test-key":"test-value"})).unwrap())
|
||||
.size(),
|
||||
AnyBody::from(
|
||||
serde_json::to_vec(&json!({"test-key":"test-value"})).unwrap()
|
||||
)
|
||||
.size(),
|
||||
BodySize::Sized(25)
|
||||
);
|
||||
}
|
||||
@ -252,11 +261,11 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_to_bytes() {
|
||||
let body = Body::Empty;
|
||||
let body = AnyBody::empty();
|
||||
let bytes = to_bytes(body).await.unwrap();
|
||||
assert!(bytes.is_empty());
|
||||
|
||||
let body = Body::Bytes(Bytes::from_static(b"123"));
|
||||
let body = AnyBody::copy_from_slice(b"123");
|
||||
let bytes = to_bytes(body).await.unwrap();
|
||||
assert_eq!(bytes, b"123"[..]);
|
||||
}
|
||||
|
@ -1,84 +0,0 @@
|
||||
use std::{
|
||||
mem,
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use bytes::Bytes;
|
||||
use futures_core::Stream;
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
use super::{Body, BodySize, MessageBody};
|
||||
|
||||
#[pin_project(project = ResponseBodyProj)]
|
||||
pub enum ResponseBody<B> {
|
||||
Body(#[pin] B),
|
||||
Other(Body),
|
||||
}
|
||||
|
||||
impl ResponseBody<Body> {
|
||||
pub fn into_body<B>(self) -> ResponseBody<B> {
|
||||
match self {
|
||||
ResponseBody::Body(b) => ResponseBody::Other(b),
|
||||
ResponseBody::Other(b) => ResponseBody::Other(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> ResponseBody<B> {
|
||||
pub fn take_body(&mut self) -> ResponseBody<B> {
|
||||
mem::replace(self, ResponseBody::Other(Body::None))
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: MessageBody> ResponseBody<B> {
|
||||
pub fn as_ref(&self) -> Option<&B> {
|
||||
if let ResponseBody::Body(ref b) = self {
|
||||
Some(b)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> MessageBody for ResponseBody<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = Error;
|
||||
|
||||
fn size(&self) -> BodySize {
|
||||
match self {
|
||||
ResponseBody::Body(ref body) => body.size(),
|
||||
ResponseBody::Other(ref body) => body.size(),
|
||||
}
|
||||
}
|
||||
|
||||
fn poll_next(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||
Stream::poll_next(self, cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> Stream for ResponseBody<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
match self.project() {
|
||||
ResponseBodyProj::Body(body) => body.poll_next(cx).map_err(Into::into),
|
||||
ResponseBodyProj::Other(body) => Pin::new(body).poll_next(cx),
|
||||
}
|
||||
}
|
||||
}
|
@ -6,14 +6,9 @@ pub enum BodySize {
|
||||
/// Will skip writing Content-Length header.
|
||||
None,
|
||||
|
||||
/// Zero size body.
|
||||
///
|
||||
/// Will write `Content-Length: 0` header.
|
||||
Empty,
|
||||
|
||||
/// Known size body.
|
||||
///
|
||||
/// Will write `Content-Length: N` header. `Sized(0)` is treated the same as `Empty`.
|
||||
/// Will write `Content-Length: N` header.
|
||||
Sized(u64),
|
||||
|
||||
/// Unknown size body.
|
||||
@ -25,16 +20,17 @@ pub enum BodySize {
|
||||
impl BodySize {
|
||||
/// Returns true if size hint indicates no or empty body.
|
||||
///
|
||||
/// Streams will return false because it cannot be known without reading the stream.
|
||||
///
|
||||
/// ```
|
||||
/// # use actix_http::body::BodySize;
|
||||
/// assert!(BodySize::None.is_eof());
|
||||
/// assert!(BodySize::Empty.is_eof());
|
||||
/// assert!(BodySize::Sized(0).is_eof());
|
||||
///
|
||||
/// assert!(!BodySize::Sized(64).is_eof());
|
||||
/// assert!(!BodySize::Stream.is_eof());
|
||||
/// ```
|
||||
pub fn is_eof(&self) -> bool {
|
||||
matches!(self, BodySize::None | BodySize::Empty | BodySize::Sized(0))
|
||||
matches!(self, BodySize::None | BodySize::Sized(0))
|
||||
}
|
||||
}
|
||||
|
@ -72,10 +72,22 @@ mod tests {
|
||||
use actix_rt::pin;
|
||||
use actix_utils::future::poll_fn;
|
||||
use futures_util::stream;
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
||||
|
||||
use super::*;
|
||||
use crate::body::to_bytes;
|
||||
|
||||
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, &'static str>>>: MessageBody);
|
||||
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, &'static str>>>: MessageBody);
|
||||
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
||||
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
||||
|
||||
assert_not_impl_all!(SizedStream<stream::Empty<Bytes>>: MessageBody);
|
||||
assert_not_impl_all!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
|
||||
// crate::Error is not Clone
|
||||
assert_not_impl_all!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn skips_empty_chunks() {
|
||||
let body = SizedStream::new(
|
||||
@ -119,4 +131,37 @@ mod tests {
|
||||
|
||||
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stream_string_error() {
|
||||
// `&'static str` does not impl `Error`
|
||||
// but it does impl `Into<Box<dyn Error>>`
|
||||
|
||||
let body = SizedStream::new(0, stream::once(async { Err("stringy error") }));
|
||||
assert_eq!(to_bytes(body).await, Ok(Bytes::new()));
|
||||
|
||||
let body = SizedStream::new(1, stream::once(async { Err("stringy error") }));
|
||||
assert!(matches!(to_bytes(body).await, Err("stringy error")));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stream_boxed_error() {
|
||||
// `Box<dyn Error>` does not impl `Error`
|
||||
// but it does impl `Into<Box<dyn Error>>`
|
||||
|
||||
let body = SizedStream::new(
|
||||
0,
|
||||
stream::once(async { Err(Box::<dyn StdError>::from("stringy error")) }),
|
||||
);
|
||||
assert_eq!(to_bytes(body).await.unwrap(), Bytes::new());
|
||||
|
||||
let body = SizedStream::new(
|
||||
1,
|
||||
stream::once(async { Err(Box::<dyn StdError>::from("stringy error")) }),
|
||||
);
|
||||
assert_eq!(
|
||||
to_bytes(body).await.unwrap_err().to_string(),
|
||||
"stringy error"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,26 +1,29 @@
|
||||
use std::cell::Cell;
|
||||
use std::fmt::Write;
|
||||
use std::rc::Rc;
|
||||
use std::time::Duration;
|
||||
use std::{fmt, net};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
fmt::{self, Write},
|
||||
net,
|
||||
rc::Rc,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use actix_rt::{
|
||||
task::JoinHandle,
|
||||
time::{interval, sleep_until, Instant, Sleep},
|
||||
};
|
||||
use bytes::BytesMut;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
/// "Sun, 06 Nov 1994 08:49:37 GMT".len()
|
||||
const DATE_VALUE_LENGTH: usize = 29;
|
||||
pub(crate) const DATE_VALUE_LENGTH: usize = 29;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||
/// Server keep-alive setting
|
||||
pub enum KeepAlive {
|
||||
/// Keep alive in seconds
|
||||
Timeout(usize),
|
||||
|
||||
/// Rely on OS to shutdown tcp connection
|
||||
Os,
|
||||
|
||||
/// Disabled
|
||||
Disabled,
|
||||
}
|
||||
@ -206,12 +209,7 @@ impl Date {
|
||||
|
||||
fn update(&mut self) {
|
||||
self.pos = 0;
|
||||
write!(
|
||||
self,
|
||||
"{}",
|
||||
OffsetDateTime::now_utc().format("%a, %d %b %Y %H:%M:%S GMT")
|
||||
)
|
||||
.unwrap();
|
||||
write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@ -269,11 +267,11 @@ impl DateService {
|
||||
}
|
||||
|
||||
// TODO: move to a util module for testing all spawn handle drop style tasks.
|
||||
#[cfg(test)]
|
||||
/// Test Module for checking the drop state of certain async tasks that are spawned
|
||||
/// with `actix_rt::spawn`
|
||||
///
|
||||
/// The target task must explicitly generate `NotifyOnDrop` when spawn the task
|
||||
#[cfg(test)]
|
||||
mod notify_on_drop {
|
||||
use std::cell::RefCell;
|
||||
|
||||
@ -283,9 +281,8 @@ mod notify_on_drop {
|
||||
|
||||
/// Check if the spawned task is dropped.
|
||||
///
|
||||
/// # Panic:
|
||||
///
|
||||
/// When there was no `NotifyOnDrop` instance on current thread
|
||||
/// # Panics
|
||||
/// Panics when there was no `NotifyOnDrop` instance on current thread.
|
||||
pub(crate) fn is_dropped() -> bool {
|
||||
NOTIFY_DROPPED.with(|bool| {
|
||||
bool.borrow()
|
||||
|
@ -24,7 +24,7 @@ use flate2::write::{GzEncoder, ZlibEncoder};
|
||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||
|
||||
use crate::{
|
||||
body::{Body, BodySize, BoxAnyBody, MessageBody, ResponseBody},
|
||||
body::{AnyBody, BodySize, MessageBody},
|
||||
http::{
|
||||
header::{ContentEncoding, CONTENT_ENCODING},
|
||||
HeaderValue, StatusCode,
|
||||
@ -50,8 +50,8 @@ impl<B: MessageBody> Encoder<B> {
|
||||
pub fn response(
|
||||
encoding: ContentEncoding,
|
||||
head: &mut ResponseHead,
|
||||
body: ResponseBody<B>,
|
||||
) -> ResponseBody<Encoder<B>> {
|
||||
body: AnyBody<B>,
|
||||
) -> AnyBody<Encoder<B>> {
|
||||
let can_encode = !(head.headers().contains_key(&CONTENT_ENCODING)
|
||||
|| head.status == StatusCode::SWITCHING_PROTOCOLS
|
||||
|| head.status == StatusCode::NO_CONTENT
|
||||
@ -59,19 +59,15 @@ impl<B: MessageBody> Encoder<B> {
|
||||
|| encoding == ContentEncoding::Auto);
|
||||
|
||||
let body = match body {
|
||||
ResponseBody::Other(b) => match b {
|
||||
Body::None => return ResponseBody::Other(Body::None),
|
||||
Body::Empty => return ResponseBody::Other(Body::Empty),
|
||||
Body::Bytes(buf) => {
|
||||
if can_encode {
|
||||
EncoderBody::Bytes(buf)
|
||||
} else {
|
||||
return ResponseBody::Other(Body::Bytes(buf));
|
||||
}
|
||||
AnyBody::None => return AnyBody::None,
|
||||
AnyBody::Bytes(buf) => {
|
||||
if can_encode {
|
||||
EncoderBody::Bytes(buf)
|
||||
} else {
|
||||
return AnyBody::Bytes(buf);
|
||||
}
|
||||
Body::Message(stream) => EncoderBody::BoxedStream(stream),
|
||||
},
|
||||
ResponseBody::Body(stream) => EncoderBody::Stream(stream),
|
||||
}
|
||||
AnyBody::Body(body) => EncoderBody::Stream(body),
|
||||
};
|
||||
|
||||
if can_encode {
|
||||
@ -79,7 +75,8 @@ impl<B: MessageBody> Encoder<B> {
|
||||
if let Some(enc) = ContentEncoder::encoder(encoding) {
|
||||
update_head(encoding, head);
|
||||
head.no_chunking(false);
|
||||
return ResponseBody::Body(Encoder {
|
||||
|
||||
return AnyBody::Body(Encoder {
|
||||
body,
|
||||
eof: false,
|
||||
fut: None,
|
||||
@ -88,7 +85,7 @@ impl<B: MessageBody> Encoder<B> {
|
||||
}
|
||||
}
|
||||
|
||||
ResponseBody::Body(Encoder {
|
||||
AnyBody::Body(Encoder {
|
||||
body,
|
||||
eof: false,
|
||||
fut: None,
|
||||
@ -101,7 +98,6 @@ impl<B: MessageBody> Encoder<B> {
|
||||
enum EncoderBody<B> {
|
||||
Bytes(Bytes),
|
||||
Stream(#[pin] B),
|
||||
BoxedStream(BoxAnyBody),
|
||||
}
|
||||
|
||||
impl<B> MessageBody for EncoderBody<B>
|
||||
@ -114,7 +110,6 @@ where
|
||||
match self {
|
||||
EncoderBody::Bytes(ref b) => b.size(),
|
||||
EncoderBody::Stream(ref b) => b.size(),
|
||||
EncoderBody::BoxedStream(ref b) => b.size(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -131,9 +126,6 @@ where
|
||||
}
|
||||
}
|
||||
EncoderBodyProj::Stream(b) => b.poll_next(cx).map_err(EncoderError::Body),
|
||||
EncoderBodyProj::BoxedStream(ref mut b) => {
|
||||
b.as_pin_mut().poll_next(cx).map_err(EncoderError::Boxed)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -349,9 +341,6 @@ pub enum EncoderError<E> {
|
||||
#[display(fmt = "body")]
|
||||
Body(E),
|
||||
|
||||
#[display(fmt = "boxed")]
|
||||
Boxed(Box<dyn StdError>),
|
||||
|
||||
#[display(fmt = "blocking")]
|
||||
Blocking(BlockingError),
|
||||
|
||||
@ -363,7 +352,6 @@ impl<E: StdError + 'static> StdError for EncoderError<E> {
|
||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
match self {
|
||||
EncoderError::Body(err) => Some(err),
|
||||
EncoderError::Boxed(err) => Some(&**err),
|
||||
EncoderError::Blocking(err) => Some(err),
|
||||
EncoderError::Io(err) => Some(err),
|
||||
}
|
||||
|
@ -5,10 +5,7 @@ use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Err
|
||||
use derive_more::{Display, Error, From};
|
||||
use http::{uri::InvalidUri, StatusCode};
|
||||
|
||||
use crate::{
|
||||
body::{AnyBody, Body},
|
||||
ws, Response,
|
||||
};
|
||||
use crate::{body::AnyBody, ws, Response};
|
||||
|
||||
pub use http::Error as HttpError;
|
||||
|
||||
@ -29,6 +26,11 @@ impl Error {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn with_cause(mut self, cause: impl Into<Box<dyn StdError>>) -> Self {
|
||||
self.inner.cause = Some(cause.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn new_http() -> Self {
|
||||
Self::new(Kind::Http)
|
||||
}
|
||||
@ -49,14 +51,12 @@ impl Error {
|
||||
Self::new(Kind::SendResponse)
|
||||
}
|
||||
|
||||
// TODO: remove allow
|
||||
#[allow(dead_code)]
|
||||
#[allow(unused)] // reserved for future use (TODO: remove allow when being used)
|
||||
pub(crate) fn new_io() -> Self {
|
||||
Self::new(Kind::Io)
|
||||
}
|
||||
|
||||
// used in encoder behind feature flag so ignore unused warning
|
||||
#[allow(unused)]
|
||||
#[allow(unused)] // used in encoder behind feature flag so ignore unused warning
|
||||
pub(crate) fn new_encoder() -> Self {
|
||||
Self::new(Kind::Encoder)
|
||||
}
|
||||
@ -64,26 +64,21 @@ impl Error {
|
||||
pub(crate) fn new_ws() -> Self {
|
||||
Self::new(Kind::Ws)
|
||||
}
|
||||
|
||||
pub(crate) fn with_cause(mut self, cause: impl Into<Box<dyn StdError>>) -> Self {
|
||||
self.inner.cause = Some(cause.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for Response<AnyBody> {
|
||||
impl<B> From<Error> for Response<AnyBody<B>> {
|
||||
fn from(err: Error) -> Self {
|
||||
let status_code = match err.inner.kind {
|
||||
Kind::Parse => StatusCode::BAD_REQUEST,
|
||||
_ => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
};
|
||||
|
||||
Response::new(status_code).set_body(Body::from(err.to_string()))
|
||||
Response::new(status_code).set_body(AnyBody::from(err.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
|
||||
pub enum Kind {
|
||||
pub(crate) enum Kind {
|
||||
#[display(fmt = "error processing HTTP")]
|
||||
Http,
|
||||
|
||||
|
@ -120,7 +120,7 @@ impl Decoder for ClientCodec {
|
||||
debug_assert!(!self.inner.payload.is_some(), "Payload decoder is set");
|
||||
|
||||
if let Some((req, payload)) = self.inner.decoder.decode(src)? {
|
||||
if let Some(ctype) = req.ctype() {
|
||||
if let Some(ctype) = req.conn_type() {
|
||||
// do not use peer's keep-alive
|
||||
self.inner.ctype = if ctype == ConnectionType::KeepAlive {
|
||||
self.inner.ctype
|
||||
|
@ -29,7 +29,7 @@ pub struct Codec {
|
||||
decoder: decoder::MessageDecoder<Request>,
|
||||
payload: Option<PayloadDecoder>,
|
||||
version: Version,
|
||||
ctype: ConnectionType,
|
||||
conn_type: ConnectionType,
|
||||
|
||||
// encoder part
|
||||
flags: Flags,
|
||||
@ -65,7 +65,7 @@ impl Codec {
|
||||
decoder: decoder::MessageDecoder::default(),
|
||||
payload: None,
|
||||
version: Version::HTTP_11,
|
||||
ctype: ConnectionType::Close,
|
||||
conn_type: ConnectionType::Close,
|
||||
encoder: encoder::MessageEncoder::default(),
|
||||
}
|
||||
}
|
||||
@ -73,13 +73,13 @@ impl Codec {
|
||||
/// Check if request is upgrade.
|
||||
#[inline]
|
||||
pub fn upgrade(&self) -> bool {
|
||||
self.ctype == ConnectionType::Upgrade
|
||||
self.conn_type == ConnectionType::Upgrade
|
||||
}
|
||||
|
||||
/// Check if last response is keep-alive.
|
||||
#[inline]
|
||||
pub fn keepalive(&self) -> bool {
|
||||
self.ctype == ConnectionType::KeepAlive
|
||||
self.conn_type == ConnectionType::KeepAlive
|
||||
}
|
||||
|
||||
/// Check if keep-alive enabled on server level.
|
||||
@ -124,11 +124,11 @@ impl Decoder for Codec {
|
||||
let head = req.head();
|
||||
self.flags.set(Flags::HEAD, head.method == Method::HEAD);
|
||||
self.version = head.version;
|
||||
self.ctype = head.connection_type();
|
||||
if self.ctype == ConnectionType::KeepAlive
|
||||
self.conn_type = head.connection_type();
|
||||
if self.conn_type == ConnectionType::KeepAlive
|
||||
&& !self.flags.contains(Flags::KEEPALIVE_ENABLED)
|
||||
{
|
||||
self.ctype = ConnectionType::Close
|
||||
self.conn_type = ConnectionType::Close
|
||||
}
|
||||
match payload {
|
||||
PayloadType::None => self.payload = None,
|
||||
@ -159,14 +159,14 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||
res.head_mut().version = self.version;
|
||||
|
||||
// connection status
|
||||
self.ctype = if let Some(ct) = res.head().ctype() {
|
||||
self.conn_type = if let Some(ct) = res.head().conn_type() {
|
||||
if ct == ConnectionType::KeepAlive {
|
||||
self.ctype
|
||||
self.conn_type
|
||||
} else {
|
||||
ct
|
||||
}
|
||||
} else {
|
||||
self.ctype
|
||||
self.conn_type
|
||||
};
|
||||
|
||||
// encode message
|
||||
@ -177,10 +177,9 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||
self.flags.contains(Flags::STREAM),
|
||||
self.version,
|
||||
length,
|
||||
self.ctype,
|
||||
self.conn_type,
|
||||
&self.config,
|
||||
)?;
|
||||
// self.headers_size = (dst.len() - len) as u32;
|
||||
}
|
||||
Message::Chunk(Some(bytes)) => {
|
||||
self.encoder.encode_chunk(bytes.as_ref(), dst)?;
|
||||
@ -189,6 +188,7 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||
self.encoder.encode_eof(dst)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -174,7 +174,7 @@ pub(crate) trait MessageType: Sized {
|
||||
self.set_expect()
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7230#section-3.3.3
|
||||
// https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.3
|
||||
if chunked {
|
||||
// Chunked encoding
|
||||
Ok(PayloadLength::Payload(PayloadType::Payload(
|
||||
|
@ -303,9 +303,9 @@ where
|
||||
body: &impl MessageBody,
|
||||
) -> Result<BodySize, DispatchError> {
|
||||
let size = body.size();
|
||||
let mut this = self.project();
|
||||
let this = self.project();
|
||||
this.codec
|
||||
.encode(Message::Item((message, size)), &mut this.write_buf)
|
||||
.encode(Message::Item((message, size)), this.write_buf)
|
||||
.map_err(|err| {
|
||||
if let Some(mut payload) = this.payload.take() {
|
||||
payload.set_error(PayloadError::Incomplete(None));
|
||||
@ -325,7 +325,7 @@ where
|
||||
) -> Result<(), DispatchError> {
|
||||
let size = self.as_mut().send_response_inner(message, &body)?;
|
||||
let state = match size {
|
||||
BodySize::None | BodySize::Empty => State::None,
|
||||
BodySize::None | BodySize::Sized(0) => State::None,
|
||||
_ => State::SendPayload(body),
|
||||
};
|
||||
self.project().state.set(state);
|
||||
@ -339,7 +339,7 @@ where
|
||||
) -> Result<(), DispatchError> {
|
||||
let size = self.as_mut().send_response_inner(message, &body)?;
|
||||
let state = match size {
|
||||
BodySize::None | BodySize::Empty => State::None,
|
||||
BodySize::None | BodySize::Sized(0) => State::None,
|
||||
_ => State::SendErrorPayload(body),
|
||||
};
|
||||
self.project().state.set(state);
|
||||
@ -380,7 +380,7 @@ where
|
||||
// send_response would update InnerDispatcher state to SendPayload or
|
||||
// None(If response body is empty).
|
||||
// continue loop to poll it.
|
||||
self.as_mut().send_error_response(res, AnyBody::Empty)?;
|
||||
self.as_mut().send_error_response(res, AnyBody::empty())?;
|
||||
}
|
||||
|
||||
// return with upgrade request and poll it exclusively.
|
||||
@ -425,13 +425,13 @@ where
|
||||
Poll::Ready(Some(Ok(item))) => {
|
||||
this.codec.encode(
|
||||
Message::Chunk(Some(item)),
|
||||
&mut this.write_buf,
|
||||
this.write_buf,
|
||||
)?;
|
||||
}
|
||||
|
||||
Poll::Ready(None) => {
|
||||
this.codec
|
||||
.encode(Message::Chunk(None), &mut this.write_buf)?;
|
||||
.encode(Message::Chunk(None), this.write_buf)?;
|
||||
// payload stream finished.
|
||||
// set state to None and handle next message
|
||||
this.state.set(State::None);
|
||||
@ -460,13 +460,13 @@ where
|
||||
Poll::Ready(Some(Ok(item))) => {
|
||||
this.codec.encode(
|
||||
Message::Chunk(Some(item)),
|
||||
&mut this.write_buf,
|
||||
this.write_buf,
|
||||
)?;
|
||||
}
|
||||
|
||||
Poll::Ready(None) => {
|
||||
this.codec
|
||||
.encode(Message::Chunk(None), &mut this.write_buf)?;
|
||||
.encode(Message::Chunk(None), this.write_buf)?;
|
||||
// payload stream finished.
|
||||
// set state to None and handle next message
|
||||
this.state.set(State::None);
|
||||
@ -592,7 +592,7 @@ where
|
||||
let mut updated = false;
|
||||
let mut this = self.as_mut().project();
|
||||
loop {
|
||||
match this.codec.decode(&mut this.read_buf) {
|
||||
match this.codec.decode(this.read_buf) {
|
||||
Ok(Some(msg)) => {
|
||||
updated = true;
|
||||
this.flags.insert(Flags::STARTED);
|
||||
@ -772,7 +772,7 @@ where
|
||||
trace!("Slow request timeout");
|
||||
let _ = self.as_mut().send_error_response(
|
||||
Response::with_body(StatusCode::REQUEST_TIMEOUT, ()),
|
||||
AnyBody::Empty,
|
||||
AnyBody::empty(),
|
||||
);
|
||||
this = self.project();
|
||||
this.flags.insert(Flags::STARTED | Flags::SHUTDOWN);
|
||||
@ -1077,7 +1077,7 @@ mod tests {
|
||||
fn_service(|req: Request| {
|
||||
let path = req.path().as_bytes();
|
||||
ready(Ok::<_, Error>(
|
||||
Response::ok().set_body(AnyBody::from_slice(path)),
|
||||
Response::ok().set_body(AnyBody::copy_from_slice(path)),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ const AVERAGE_HEADER_SIZE: usize = 30;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MessageEncoder<T: MessageType> {
|
||||
#[allow(dead_code)]
|
||||
pub length: BodySize,
|
||||
pub te: TransferEncoding,
|
||||
_phantom: PhantomData<T>,
|
||||
@ -55,7 +56,7 @@ pub(crate) trait MessageType: Sized {
|
||||
dst: &mut BytesMut,
|
||||
version: Version,
|
||||
mut length: BodySize,
|
||||
ctype: ConnectionType,
|
||||
conn_type: ConnectionType,
|
||||
config: &ServiceConfig,
|
||||
) -> io::Result<()> {
|
||||
let chunked = self.chunked();
|
||||
@ -70,14 +71,24 @@ pub(crate) trait MessageType: Sized {
|
||||
| StatusCode::PROCESSING
|
||||
| StatusCode::NO_CONTENT => {
|
||||
// skip content-length and transfer-encoding headers
|
||||
// See https://tools.ietf.org/html/rfc7230#section-3.3.1
|
||||
// and https://tools.ietf.org/html/rfc7230#section-3.3.2
|
||||
// see https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.1
|
||||
// and https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.2
|
||||
skip_len = true;
|
||||
length = BodySize::None
|
||||
}
|
||||
|
||||
StatusCode::NOT_MODIFIED => {
|
||||
// 304 responses should never have a body but should retain a manually set
|
||||
// content-length header
|
||||
// see https://datatracker.ietf.org/doc/html/rfc7232#section-4.1
|
||||
skip_len = false;
|
||||
length = BodySize::None;
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
match length {
|
||||
BodySize::Stream => {
|
||||
if chunked {
|
||||
@ -92,19 +103,16 @@ pub(crate) trait MessageType: Sized {
|
||||
dst.put_slice(b"\r\n");
|
||||
}
|
||||
}
|
||||
BodySize::Empty => {
|
||||
if camel_case {
|
||||
dst.put_slice(b"\r\nContent-Length: 0\r\n");
|
||||
} else {
|
||||
dst.put_slice(b"\r\ncontent-length: 0\r\n");
|
||||
}
|
||||
BodySize::Sized(0) if camel_case => {
|
||||
dst.put_slice(b"\r\nContent-Length: 0\r\n")
|
||||
}
|
||||
BodySize::Sized(0) => dst.put_slice(b"\r\ncontent-length: 0\r\n"),
|
||||
BodySize::Sized(len) => helpers::write_content_length(len, dst),
|
||||
BodySize::None => dst.put_slice(b"\r\n"),
|
||||
}
|
||||
|
||||
// Connection
|
||||
match ctype {
|
||||
match conn_type {
|
||||
ConnectionType::Upgrade => dst.put_slice(b"connection: upgrade\r\n"),
|
||||
ConnectionType::KeepAlive if version < Version::HTTP_11 => {
|
||||
if camel_case {
|
||||
@ -329,13 +337,13 @@ impl<T: MessageType> MessageEncoder<T> {
|
||||
stream: bool,
|
||||
version: Version,
|
||||
length: BodySize,
|
||||
ctype: ConnectionType,
|
||||
conn_type: ConnectionType,
|
||||
config: &ServiceConfig,
|
||||
) -> io::Result<()> {
|
||||
// transfer encoding
|
||||
if !head {
|
||||
self.te = match length {
|
||||
BodySize::Empty => TransferEncoding::empty(),
|
||||
BodySize::Sized(0) => TransferEncoding::empty(),
|
||||
BodySize::Sized(len) => TransferEncoding::length(len),
|
||||
BodySize::Stream => {
|
||||
if message.chunked() && !stream {
|
||||
@ -351,7 +359,7 @@ impl<T: MessageType> MessageEncoder<T> {
|
||||
}
|
||||
|
||||
message.encode_status(dst)?;
|
||||
message.encode_headers(dst, version, length, ctype, config)
|
||||
message.encode_headers(dst, version, length, conn_type, config)
|
||||
}
|
||||
}
|
||||
|
||||
@ -365,10 +373,12 @@ pub(crate) struct TransferEncoding {
|
||||
enum TransferEncodingKind {
|
||||
/// An Encoder for when Transfer-Encoding includes `chunked`.
|
||||
Chunked(bool),
|
||||
|
||||
/// An Encoder for when Content-Length is set.
|
||||
///
|
||||
/// Enforces that the body is not longer than the Content-Length header.
|
||||
Length(u64),
|
||||
|
||||
/// An Encoder for when Content-Length is not known.
|
||||
///
|
||||
/// Application decides when to stop writing.
|
||||
@ -552,7 +562,7 @@ mod tests {
|
||||
let _ = head.encode_headers(
|
||||
&mut bytes,
|
||||
Version::HTTP_11,
|
||||
BodySize::Empty,
|
||||
BodySize::Sized(0),
|
||||
ConnectionType::Close,
|
||||
&ServiceConfig::default(),
|
||||
);
|
||||
@ -623,7 +633,7 @@ mod tests {
|
||||
let _ = head.encode_headers(
|
||||
&mut bytes,
|
||||
Version::HTTP_11,
|
||||
BodySize::Empty,
|
||||
BodySize::Sized(0),
|
||||
ConnectionType::Close,
|
||||
&ServiceConfig::default(),
|
||||
);
|
||||
|
@ -102,9 +102,11 @@ where
|
||||
mod openssl {
|
||||
use super::*;
|
||||
|
||||
use actix_service::ServiceFactoryExt;
|
||||
use actix_tls::accept::{
|
||||
openssl::{Acceptor, SslAcceptor, SslError, TlsStream},
|
||||
openssl::{
|
||||
reexports::{Error as SslError, SslAcceptor},
|
||||
Acceptor, TlsStream,
|
||||
},
|
||||
TlsError,
|
||||
};
|
||||
|
||||
@ -133,7 +135,7 @@ mod openssl {
|
||||
U::Error: fmt::Display + Into<Response<AnyBody>>,
|
||||
U::InitError: fmt::Debug,
|
||||
{
|
||||
/// Create openssl based service
|
||||
/// Create OpenSSL based service.
|
||||
pub fn openssl(
|
||||
self,
|
||||
acceptor: SslAcceptor,
|
||||
@ -145,11 +147,13 @@ mod openssl {
|
||||
InitError = (),
|
||||
> {
|
||||
Acceptor::new(acceptor)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!())
|
||||
.and_then(|io: TlsStream<TcpStream>| {
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
.map_err(TlsError::into_service_error)
|
||||
.map(|io: TlsStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().peer_addr().ok();
|
||||
ready(Ok((io, peer_addr)))
|
||||
(io, peer_addr)
|
||||
})
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
@ -158,16 +162,17 @@ mod openssl {
|
||||
|
||||
#[cfg(feature = "rustls")]
|
||||
mod rustls {
|
||||
use super::*;
|
||||
|
||||
use std::io;
|
||||
|
||||
use actix_service::ServiceFactoryExt;
|
||||
use actix_service::ServiceFactoryExt as _;
|
||||
use actix_tls::accept::{
|
||||
rustls::{Acceptor, ServerConfig, TlsStream},
|
||||
rustls::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||
TlsError,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||
where
|
||||
S: ServiceFactory<Request, Config = ()>,
|
||||
@ -193,7 +198,7 @@ mod rustls {
|
||||
U::Error: fmt::Display + Into<Response<AnyBody>>,
|
||||
U::InitError: fmt::Debug,
|
||||
{
|
||||
/// Create rustls based service
|
||||
/// Create Rustls based service.
|
||||
pub fn rustls(
|
||||
self,
|
||||
config: ServerConfig,
|
||||
@ -205,11 +210,13 @@ mod rustls {
|
||||
InitError = (),
|
||||
> {
|
||||
Acceptor::new(config)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!())
|
||||
.and_then(|io: TlsStream<TcpStream>| {
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
.map_err(TlsError::into_service_error)
|
||||
.map(|io: TlsStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||
ready(Ok((io, peer_addr)))
|
||||
(io, peer_addr)
|
||||
})
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
|
@ -10,11 +10,15 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite};
|
||||
use actix_rt::time::Sleep;
|
||||
use actix_service::Service;
|
||||
use actix_utils::future::poll_fn;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures_core::ready;
|
||||
use h2::server::{Connection, SendResponse};
|
||||
use h2::{
|
||||
server::{Connection, SendResponse},
|
||||
Ping, PingPong,
|
||||
};
|
||||
use http::header::{HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING};
|
||||
use log::{error, trace};
|
||||
use pin_project_lite::pin_project;
|
||||
@ -36,29 +40,46 @@ pin_project! {
|
||||
on_connect_data: OnConnectData,
|
||||
config: ServiceConfig,
|
||||
peer_addr: Option<net::SocketAddr>,
|
||||
_phantom: PhantomData<B>,
|
||||
ping_pong: Option<H2PingPong>,
|
||||
_phantom: PhantomData<B>
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S, B, X, U> Dispatcher<T, S, B, X, U> {
|
||||
impl<T, S, B, X, U> Dispatcher<T, S, B, X, U>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin,
|
||||
{
|
||||
pub(crate) fn new(
|
||||
flow: Rc<HttpFlow<S, X, U>>,
|
||||
connection: Connection<T, Bytes>,
|
||||
mut connection: Connection<T, Bytes>,
|
||||
on_connect_data: OnConnectData,
|
||||
config: ServiceConfig,
|
||||
peer_addr: Option<net::SocketAddr>,
|
||||
) -> Self {
|
||||
let ping_pong = config.keep_alive_timer().map(|timer| H2PingPong {
|
||||
timer: Box::pin(timer),
|
||||
on_flight: false,
|
||||
ping_pong: connection.ping_pong().unwrap(),
|
||||
});
|
||||
|
||||
Self {
|
||||
flow,
|
||||
config,
|
||||
peer_addr,
|
||||
connection,
|
||||
on_connect_data,
|
||||
ping_pong,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct H2PingPong {
|
||||
timer: Pin<Box<Sleep>>,
|
||||
on_flight: bool,
|
||||
ping_pong: PingPong,
|
||||
}
|
||||
|
||||
impl<T, S, B, X, U> Future for Dispatcher<T, S, B, X, U>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin,
|
||||
@ -77,54 +98,92 @@ where
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.get_mut();
|
||||
|
||||
while let Some((req, tx)) =
|
||||
ready!(Pin::new(&mut this.connection).poll_accept(cx)?)
|
||||
{
|
||||
let (parts, body) = req.into_parts();
|
||||
let pl = crate::h2::Payload::new(body);
|
||||
let pl = Payload::<crate::payload::PayloadStream>::H2(pl);
|
||||
let mut req = Request::with_payload(pl);
|
||||
loop {
|
||||
match Pin::new(&mut this.connection).poll_accept(cx)? {
|
||||
Poll::Ready(Some((req, tx))) => {
|
||||
let (parts, body) = req.into_parts();
|
||||
let pl = crate::h2::Payload::new(body);
|
||||
let pl = Payload::<crate::payload::PayloadStream>::H2(pl);
|
||||
let mut req = Request::with_payload(pl);
|
||||
|
||||
let head = req.head_mut();
|
||||
head.uri = parts.uri;
|
||||
head.method = parts.method;
|
||||
head.version = parts.version;
|
||||
head.headers = parts.headers.into();
|
||||
head.peer_addr = this.peer_addr;
|
||||
let head = req.head_mut();
|
||||
head.uri = parts.uri;
|
||||
head.method = parts.method;
|
||||
head.version = parts.version;
|
||||
head.headers = parts.headers.into();
|
||||
head.peer_addr = this.peer_addr;
|
||||
|
||||
// merge on_connect_ext data into request extensions
|
||||
this.on_connect_data.merge_into(&mut req);
|
||||
// merge on_connect_ext data into request extensions
|
||||
this.on_connect_data.merge_into(&mut req);
|
||||
|
||||
let fut = this.flow.service.call(req);
|
||||
let config = this.config.clone();
|
||||
let fut = this.flow.service.call(req);
|
||||
let config = this.config.clone();
|
||||
|
||||
// multiplex request handling with spawn task
|
||||
actix_rt::spawn(async move {
|
||||
// resolve service call and send response.
|
||||
let res = match fut.await {
|
||||
Ok(res) => handle_response(res.into(), tx, config).await,
|
||||
Err(err) => {
|
||||
let res: Response<AnyBody> = err.into();
|
||||
handle_response(res, tx, config).await
|
||||
}
|
||||
};
|
||||
// multiplex request handling with spawn task
|
||||
actix_rt::spawn(async move {
|
||||
// resolve service call and send response.
|
||||
let res = match fut.await {
|
||||
Ok(res) => handle_response(res.into(), tx, config).await,
|
||||
Err(err) => {
|
||||
let res: Response<AnyBody> = err.into();
|
||||
handle_response(res, tx, config).await
|
||||
}
|
||||
};
|
||||
|
||||
// log error.
|
||||
if let Err(err) = res {
|
||||
match err {
|
||||
DispatchError::SendResponse(err) => {
|
||||
trace!("Error sending HTTP/2 response: {:?}", err)
|
||||
// log error.
|
||||
if let Err(err) = res {
|
||||
match err {
|
||||
DispatchError::SendResponse(err) => {
|
||||
trace!("Error sending HTTP/2 response: {:?}", err)
|
||||
}
|
||||
DispatchError::SendData(err) => warn!("{:?}", err),
|
||||
DispatchError::ResponseBody(err) => {
|
||||
error!("Response payload stream error: {:?}", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
DispatchError::SendData(err) => warn!("{:?}", err),
|
||||
DispatchError::ResponseBody(err) => {
|
||||
error!("Response payload stream error: {:?}", err)
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
Poll::Ready(None) => return Poll::Ready(Ok(())),
|
||||
Poll::Pending => match this.ping_pong.as_mut() {
|
||||
Some(ping_pong) => loop {
|
||||
if ping_pong.on_flight {
|
||||
// When have on flight ping pong. poll pong and and keep alive timer.
|
||||
// on success pong received update keep alive timer to determine the next timing of
|
||||
// ping pong.
|
||||
match ping_pong.ping_pong.poll_pong(cx)? {
|
||||
Poll::Ready(_) => {
|
||||
ping_pong.on_flight = false;
|
||||
|
||||
Poll::Ready(Ok(()))
|
||||
let dead_line =
|
||||
this.config.keep_alive_expire().unwrap();
|
||||
ping_pong.timer.as_mut().reset(dead_line);
|
||||
}
|
||||
Poll::Pending => {
|
||||
return ping_pong
|
||||
.timer
|
||||
.as_mut()
|
||||
.poll(cx)
|
||||
.map(|_| Ok(()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// When there is no on flight ping pong. keep alive timer is used to wait for next
|
||||
// timing of ping pong. Therefore at this point it serves as an interval instead.
|
||||
ready!(ping_pong.timer.as_mut().poll(cx));
|
||||
|
||||
ping_pong.ping_pong.send_ping(Ping::opaque())?;
|
||||
|
||||
let dead_line = this.config.keep_alive_expire().unwrap();
|
||||
ping_pong.timer.as_mut().reset(dead_line);
|
||||
|
||||
ping_pong.on_flight = true;
|
||||
}
|
||||
},
|
||||
None => return Poll::Pending,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -226,9 +285,11 @@ fn prepare_response(
|
||||
|
||||
let _ = match size {
|
||||
BodySize::None | BodySize::Stream => None,
|
||||
BodySize::Empty => res
|
||||
|
||||
BodySize::Sized(0) => res
|
||||
.headers_mut()
|
||||
.insert(CONTENT_LENGTH, HeaderValue::from_static("0")),
|
||||
|
||||
BodySize::Sized(len) => {
|
||||
let mut buf = itoa::Buffer::new();
|
||||
|
||||
@ -243,7 +304,7 @@ fn prepare_response(
|
||||
for (key, value) in head.headers.iter() {
|
||||
match *key {
|
||||
// TODO: consider skipping other headers according to:
|
||||
// https://tools.ietf.org/html/rfc7540#section-8.1.2.2
|
||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
||||
// omit HTTP/1.x only headers
|
||||
CONNECTION | TRANSFER_ENCODING => continue,
|
||||
CONTENT_LENGTH if skip_len => continue,
|
||||
|
@ -101,9 +101,14 @@ where
|
||||
|
||||
#[cfg(feature = "openssl")]
|
||||
mod openssl {
|
||||
use actix_service::{fn_factory, fn_service, ServiceFactoryExt};
|
||||
use actix_tls::accept::openssl::{Acceptor, SslAcceptor, SslError, TlsStream};
|
||||
use actix_tls::accept::TlsError;
|
||||
use actix_service::ServiceFactoryExt as _;
|
||||
use actix_tls::accept::{
|
||||
openssl::{
|
||||
reexports::{Error as SslError, SslAcceptor},
|
||||
Acceptor, TlsStream,
|
||||
},
|
||||
TlsError,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
@ -118,7 +123,7 @@ mod openssl {
|
||||
B: MessageBody + 'static,
|
||||
B::Error: Into<Box<dyn StdError>>,
|
||||
{
|
||||
/// Create OpenSSL based service
|
||||
/// Create OpenSSL based service.
|
||||
pub fn openssl(
|
||||
self,
|
||||
acceptor: SslAcceptor,
|
||||
@ -130,16 +135,14 @@ mod openssl {
|
||||
InitError = S::InitError,
|
||||
> {
|
||||
Acceptor::new(acceptor)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!())
|
||||
.and_then(fn_factory(|| {
|
||||
ready(Ok::<_, S::InitError>(fn_service(
|
||||
|io: TlsStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().peer_addr().ok();
|
||||
ready(Ok((io, peer_addr)))
|
||||
},
|
||||
)))
|
||||
}))
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
.map_err(TlsError::into_service_error)
|
||||
.map(|io: TlsStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().peer_addr().ok();
|
||||
(io, peer_addr)
|
||||
})
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
@ -147,12 +150,16 @@ mod openssl {
|
||||
|
||||
#[cfg(feature = "rustls")]
|
||||
mod rustls {
|
||||
use super::*;
|
||||
use actix_service::ServiceFactoryExt;
|
||||
use actix_tls::accept::rustls::{Acceptor, ServerConfig, TlsStream};
|
||||
use actix_tls::accept::TlsError;
|
||||
use std::io;
|
||||
|
||||
use actix_service::ServiceFactoryExt as _;
|
||||
use actix_tls::accept::{
|
||||
rustls::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||
TlsError,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||
where
|
||||
S: ServiceFactory<Request, Config = ()>,
|
||||
@ -164,7 +171,7 @@ mod rustls {
|
||||
B: MessageBody + 'static,
|
||||
B::Error: Into<Box<dyn StdError>>,
|
||||
{
|
||||
/// Create Rustls based service
|
||||
/// Create Rustls based service.
|
||||
pub fn rustls(
|
||||
self,
|
||||
mut config: ServerConfig,
|
||||
@ -177,19 +184,17 @@ mod rustls {
|
||||
> {
|
||||
let mut protos = vec![b"h2".to_vec()];
|
||||
protos.extend_from_slice(&config.alpn_protocols);
|
||||
config.set_protocols(&protos);
|
||||
config.alpn_protocols = protos;
|
||||
|
||||
Acceptor::new(config)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!())
|
||||
.and_then(fn_factory(|| {
|
||||
ready(Ok::<_, S::InitError>(fn_service(
|
||||
|io: TlsStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||
ready(Ok((io, peer_addr)))
|
||||
},
|
||||
)))
|
||||
}))
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
.map_err(TlsError::into_service_error)
|
||||
.map(|io: TlsStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||
(io, peer_addr)
|
||||
})
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,12 @@
|
||||
//! Helper trait for types that can be effectively borrowed as a [HeaderValue].
|
||||
//!
|
||||
//! [HeaderValue]: crate::http::HeaderValue
|
||||
//! Sealed [`AsHeaderName`] trait and implementations.
|
||||
|
||||
use std::{borrow::Cow, str::FromStr};
|
||||
use std::{borrow::Cow, str::FromStr as _};
|
||||
|
||||
use http::header::{HeaderName, InvalidHeaderName};
|
||||
|
||||
/// Sealed trait implemented for types that can be effectively borrowed as a [`HeaderValue`].
|
||||
///
|
||||
/// [`HeaderValue`]: crate::http::HeaderValue
|
||||
pub trait AsHeaderName: Sealed {}
|
||||
|
||||
pub struct Seal;
|
||||
|
@ -1,4 +1,6 @@
|
||||
use std::convert::TryFrom;
|
||||
//! [`IntoHeaderPair`] trait and implementations.
|
||||
|
||||
use std::convert::TryFrom as _;
|
||||
|
||||
use http::{
|
||||
header::{HeaderName, InvalidHeaderName, InvalidHeaderValue},
|
||||
@ -7,7 +9,10 @@ use http::{
|
||||
|
||||
use super::{Header, IntoHeaderValue};
|
||||
|
||||
/// Transforms structures into header K/V pairs for inserting into `HeaderMap`s.
|
||||
/// An interface for types that can be converted into a [`HeaderName`]/[`HeaderValue`] pair for
|
||||
/// insertion into a [`HeaderMap`].
|
||||
///
|
||||
/// [`HeaderMap`]: crate::http::HeaderMap
|
||||
pub trait IntoHeaderPair: Sized {
|
||||
type Error: Into<HttpError>;
|
||||
|
||||
|
@ -1,10 +1,12 @@
|
||||
use std::convert::TryFrom;
|
||||
//! [`IntoHeaderValue`] trait and implementations.
|
||||
|
||||
use std::convert::TryFrom as _;
|
||||
|
||||
use bytes::Bytes;
|
||||
use http::{header::InvalidHeaderValue, Error as HttpError, HeaderValue};
|
||||
use mime::Mime;
|
||||
|
||||
/// A trait for any object that can be Converted to a `HeaderValue`
|
||||
/// An interface for types that can be converted into a [`HeaderValue`].
|
||||
pub trait IntoHeaderValue: Sized {
|
||||
/// The type returned in the event of a conversion error.
|
||||
type Error: Into<HttpError>;
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! A multi-value [`HeaderMap`] and its iterators.
|
||||
|
||||
use std::{borrow::Cow, collections::hash_map, ops};
|
||||
use std::{borrow::Cow, collections::hash_map, iter, ops};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use http::header::{HeaderName, HeaderValue};
|
||||
@ -288,7 +288,7 @@ impl HeaderMap {
|
||||
/// Returns an iterator over all values associated with a header name.
|
||||
///
|
||||
/// The returned iterator does not incur any allocations and will yield no items if there are no
|
||||
/// values associated with the key. Iteration order is **not** guaranteed to be the same as
|
||||
/// values associated with the key. Iteration order is guaranteed to be the same as
|
||||
/// insertion order.
|
||||
///
|
||||
/// # Examples
|
||||
@ -355,6 +355,19 @@ impl HeaderMap {
|
||||
///
|
||||
/// assert_eq!(map.len(), 1);
|
||||
/// ```
|
||||
///
|
||||
/// A convenience method is provided on the returned iterator to check if the insertion replaced
|
||||
/// any values.
|
||||
/// ```
|
||||
/// # use actix_http::http::{header, HeaderMap, HeaderValue};
|
||||
/// let mut map = HeaderMap::new();
|
||||
///
|
||||
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
|
||||
/// assert!(removed.is_empty());
|
||||
///
|
||||
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
|
||||
/// assert!(!removed.is_empty());
|
||||
/// ```
|
||||
pub fn insert(&mut self, key: HeaderName, val: HeaderValue) -> Removed {
|
||||
let value = self.inner.insert(key, Value::one(val));
|
||||
Removed::new(value)
|
||||
@ -393,6 +406,9 @@ impl HeaderMap {
|
||||
|
||||
/// Removes all headers for a particular header name from the map.
|
||||
///
|
||||
/// Providing an invalid header names (as a string argument) will have no effect and return
|
||||
/// without error.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// # use actix_http::http::{header, HeaderMap, HeaderValue};
|
||||
@ -409,6 +425,21 @@ impl HeaderMap {
|
||||
/// assert!(removed.next().is_none());
|
||||
///
|
||||
/// assert!(map.is_empty());
|
||||
/// ```
|
||||
///
|
||||
/// A convenience method is provided on the returned iterator to check if the `remove` call
|
||||
/// actually removed any values.
|
||||
/// ```
|
||||
/// # use actix_http::http::{header, HeaderMap, HeaderValue};
|
||||
/// let mut map = HeaderMap::new();
|
||||
///
|
||||
/// let removed = map.remove("accept");
|
||||
/// assert!(removed.is_empty());
|
||||
///
|
||||
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
|
||||
/// let removed = map.remove("accept");
|
||||
/// assert!(!removed.is_empty());
|
||||
/// ```
|
||||
pub fn remove(&mut self, key: impl AsHeaderName) -> Removed {
|
||||
let value = match key.try_as_name(super::as_name::Seal) {
|
||||
Ok(Cow::Borrowed(name)) => self.inner.remove(name),
|
||||
@ -550,7 +581,8 @@ impl HeaderMap {
|
||||
}
|
||||
}
|
||||
|
||||
/// Note that this implementation will clone a [HeaderName] for each value.
|
||||
/// Note that this implementation will clone a [HeaderName] for each value. Consider using
|
||||
/// [`drain`](Self::drain) to control header name cloning.
|
||||
impl IntoIterator for HeaderMap {
|
||||
type Item = (HeaderName, HeaderValue);
|
||||
type IntoIter = IntoIter;
|
||||
@ -571,7 +603,7 @@ impl<'a> IntoIterator for &'a HeaderMap {
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator for all values with the same header name.
|
||||
/// Iterator over borrowed values with the same associated name.
|
||||
///
|
||||
/// See [`HeaderMap::get_all`].
|
||||
#[derive(Debug)]
|
||||
@ -613,18 +645,36 @@ impl<'a> Iterator for GetAll<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator for owned [`HeaderValue`]s with the same associated [`HeaderName`] returned from methods
|
||||
/// on [`HeaderMap`] that remove or replace items.
|
||||
impl ExactSizeIterator for GetAll<'_> {}
|
||||
|
||||
impl iter::FusedIterator for GetAll<'_> {}
|
||||
|
||||
/// Iterator over removed, owned values with the same associated name.
|
||||
///
|
||||
/// Returned from methods that remove or replace items. See [`HeaderMap::insert`]
|
||||
/// and [`HeaderMap::remove`].
|
||||
#[derive(Debug)]
|
||||
pub struct Removed {
|
||||
inner: Option<smallvec::IntoIter<[HeaderValue; 4]>>,
|
||||
}
|
||||
|
||||
impl<'a> Removed {
|
||||
impl Removed {
|
||||
fn new(value: Option<Value>) -> Self {
|
||||
let inner = value.map(|value| value.inner.into_iter());
|
||||
Self { inner }
|
||||
}
|
||||
|
||||
/// Returns true if iterator contains no elements, without consuming it.
|
||||
///
|
||||
/// If called immediately after [`HeaderMap::insert`] or [`HeaderMap::remove`], it will indicate
|
||||
/// wether any items were actually replaced or removed, respectively.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self.inner {
|
||||
// size hint lower bound of smallvec is the correct length
|
||||
Some(ref iter) => iter.size_hint().0 == 0,
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Removed {
|
||||
@ -644,7 +694,11 @@ impl Iterator for Removed {
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over all [`HeaderName`]s in the map.
|
||||
impl ExactSizeIterator for Removed {}
|
||||
|
||||
impl iter::FusedIterator for Removed {}
|
||||
|
||||
/// Iterator over all names in the map.
|
||||
#[derive(Debug)]
|
||||
pub struct Keys<'a>(hash_map::Keys<'a, HeaderName, Value>);
|
||||
|
||||
@ -662,6 +716,11 @@ impl<'a> Iterator for Keys<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for Keys<'_> {}
|
||||
|
||||
impl iter::FusedIterator for Keys<'_> {}
|
||||
|
||||
/// Iterator over borrowed name-value pairs.
|
||||
#[derive(Debug)]
|
||||
pub struct Iter<'a> {
|
||||
inner: hash_map::Iter<'a, HeaderName, Value>,
|
||||
@ -713,6 +772,10 @@ impl<'a> Iterator for Iter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for Iter<'_> {}
|
||||
|
||||
impl iter::FusedIterator for Iter<'_> {}
|
||||
|
||||
/// Iterator over drained name-value pairs.
|
||||
///
|
||||
/// Iterator items are `(Option<HeaderName>, HeaderValue)` to avoid cloning.
|
||||
@ -764,6 +827,10 @@ impl<'a> Iterator for Drain<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for Drain<'_> {}
|
||||
|
||||
impl iter::FusedIterator for Drain<'_> {}
|
||||
|
||||
/// Iterator over owned name-value pairs.
|
||||
///
|
||||
/// Implementation necessarily clones header names for each value.
|
||||
@ -814,12 +881,27 @@ impl Iterator for IntoIter {
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for IntoIter {}
|
||||
|
||||
impl iter::FusedIterator for IntoIter {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use http::header;
|
||||
use static_assertions::assert_impl_all;
|
||||
|
||||
use super::*;
|
||||
|
||||
assert_impl_all!(HeaderMap: IntoIterator);
|
||||
assert_impl_all!(Keys<'_>: Iterator, ExactSizeIterator, FusedIterator);
|
||||
assert_impl_all!(GetAll<'_>: Iterator, ExactSizeIterator, FusedIterator);
|
||||
assert_impl_all!(Removed: Iterator, ExactSizeIterator, FusedIterator);
|
||||
assert_impl_all!(Iter<'_>: Iterator, ExactSizeIterator, FusedIterator);
|
||||
assert_impl_all!(IntoIter: Iterator, ExactSizeIterator, FusedIterator);
|
||||
assert_impl_all!(Drain<'_>: Iterator, ExactSizeIterator, FusedIterator);
|
||||
|
||||
#[test]
|
||||
fn create() {
|
||||
let map = HeaderMap::new();
|
||||
@ -945,6 +1027,56 @@ mod tests {
|
||||
assert_eq!(vals.next(), removed.next().as_ref());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_all_iteration_order_matches_insertion_order() {
|
||||
let mut map = HeaderMap::new();
|
||||
|
||||
let mut vals = map.get_all(header::COOKIE);
|
||||
assert!(vals.next().is_none());
|
||||
|
||||
map.append(header::COOKIE, HeaderValue::from_static("1"));
|
||||
let mut vals = map.get_all(header::COOKIE);
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"1");
|
||||
assert!(vals.next().is_none());
|
||||
|
||||
map.append(header::COOKIE, HeaderValue::from_static("2"));
|
||||
let mut vals = map.get_all(header::COOKIE);
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"1");
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"2");
|
||||
assert!(vals.next().is_none());
|
||||
|
||||
map.append(header::COOKIE, HeaderValue::from_static("3"));
|
||||
map.append(header::COOKIE, HeaderValue::from_static("4"));
|
||||
map.append(header::COOKIE, HeaderValue::from_static("5"));
|
||||
let mut vals = map.get_all(header::COOKIE);
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"1");
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"2");
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"3");
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"4");
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"5");
|
||||
assert!(vals.next().is_none());
|
||||
|
||||
let _ = map.insert(header::COOKIE, HeaderValue::from_static("6"));
|
||||
let mut vals = map.get_all(header::COOKIE);
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"6");
|
||||
assert!(vals.next().is_none());
|
||||
|
||||
let _ = map.insert(header::COOKIE, HeaderValue::from_static("7"));
|
||||
let _ = map.insert(header::COOKIE, HeaderValue::from_static("8"));
|
||||
let mut vals = map.get_all(header::COOKIE);
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"8");
|
||||
assert!(vals.next().is_none());
|
||||
|
||||
map.append(header::COOKIE, HeaderValue::from_static("9"));
|
||||
let mut vals = map.get_all(header::COOKIE);
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"8");
|
||||
assert_eq!(vals.next().unwrap().as_bytes(), b"9");
|
||||
assert!(vals.next().is_none());
|
||||
|
||||
// check for fused-ness
|
||||
assert!(vals.next().is_none());
|
||||
}
|
||||
|
||||
fn owned_pair<'a>(
|
||||
(name, val): (&'a HeaderName, &'a HeaderValue),
|
||||
) -> (HeaderName, HeaderValue) {
|
||||
|
@ -29,16 +29,14 @@ pub use http::header::{
|
||||
X_FRAME_OPTIONS, X_XSS_PROTECTION,
|
||||
};
|
||||
|
||||
use crate::error::ParseError;
|
||||
use crate::HttpMessage;
|
||||
use crate::{error::ParseError, HttpMessage};
|
||||
|
||||
mod as_name;
|
||||
mod into_pair;
|
||||
mod into_value;
|
||||
mod utils;
|
||||
|
||||
pub(crate) mod map;
|
||||
pub mod map;
|
||||
mod shared;
|
||||
mod utils;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub use self::shared::*;
|
||||
@ -46,18 +44,18 @@ pub use self::shared::*;
|
||||
pub use self::as_name::AsHeaderName;
|
||||
pub use self::into_pair::IntoHeaderPair;
|
||||
pub use self::into_value::IntoHeaderValue;
|
||||
#[doc(hidden)]
|
||||
pub use self::map::GetAll;
|
||||
pub use self::map::HeaderMap;
|
||||
pub use self::utils::*;
|
||||
pub use self::utils::{
|
||||
fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode,
|
||||
};
|
||||
|
||||
/// A trait for any object that already represents a valid header field and value.
|
||||
/// An interface for types that already represent a valid header.
|
||||
pub trait Header: IntoHeaderValue {
|
||||
/// Returns the name of the header field
|
||||
fn name() -> HeaderName;
|
||||
|
||||
/// Parse a header
|
||||
fn parse<T: HttpMessage>(msg: &T) -> Result<Self, ParseError>;
|
||||
fn parse<M: HttpMessage>(msg: &M) -> Result<Self, ParseError>;
|
||||
}
|
||||
|
||||
/// Convert `http::HeaderMap` to our `HeaderMap`.
|
||||
@ -68,7 +66,7 @@ impl From<http::HeaderMap> for HeaderMap {
|
||||
}
|
||||
|
||||
/// This encode set is used for HTTP header values and is defined at
|
||||
/// https://tools.ietf.org/html/rfc5987#section-3.2.
|
||||
/// <https://datatracker.ietf.org/doc/html/rfc5987#section-3.2>.
|
||||
pub(crate) const HTTP_VALUE: &AsciiSet = &CONTROLS
|
||||
.add(b' ')
|
||||
.add(b'"')
|
||||
|
@ -1,14 +1,13 @@
|
||||
use std::fmt::{self, Display};
|
||||
use std::str::FromStr;
|
||||
use std::{fmt, str};
|
||||
|
||||
use self::Charset::*;
|
||||
|
||||
/// A Mime charset.
|
||||
/// A MIME character set.
|
||||
///
|
||||
/// The string representation is normalized to upper case.
|
||||
///
|
||||
/// See <http://www.iana.org/assignments/character-sets/character-sets.xhtml>.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum Charset {
|
||||
/// US ASCII
|
||||
@ -88,20 +87,20 @@ impl Charset {
|
||||
Iso_8859_8_E => "ISO-8859-8-E",
|
||||
Iso_8859_8_I => "ISO-8859-8-I",
|
||||
Gb2312 => "GB2312",
|
||||
Big5 => "big5",
|
||||
Big5 => "Big5",
|
||||
Koi8_R => "KOI8-R",
|
||||
Ext(ref s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Charset {
|
||||
impl fmt::Display for Charset {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.label())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Charset {
|
||||
impl str::FromStr for Charset {
|
||||
type Err = crate::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Charset, crate::Error> {
|
||||
@ -128,7 +127,7 @@ impl FromStr for Charset {
|
||||
"ISO-8859-8-E" => Iso_8859_8_E,
|
||||
"ISO-8859-8-I" => Iso_8859_8_I,
|
||||
"GB2312" => Gb2312,
|
||||
"big5" => Big5,
|
||||
"BIG5" => Big5,
|
||||
"KOI8-R" => Koi8_R,
|
||||
s => Ext(s.to_owned()),
|
||||
})
|
||||
|
@ -9,14 +9,17 @@ use crate::{
|
||||
HttpMessage,
|
||||
};
|
||||
|
||||
/// Error return when a content encoding is unknown.
|
||||
///
|
||||
/// Example: 'compress'
|
||||
/// Error returned when a content encoding is unknown.
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "unsupported content encoding")]
|
||||
pub struct ContentEncodingParseError;
|
||||
|
||||
/// Represents a supported content encoding.
|
||||
///
|
||||
/// Includes a commonly-used subset of media types appropriate for use as HTTP content encodings.
|
||||
/// See [IANA HTTP Content Coding Registry].
|
||||
///
|
||||
/// [IANA HTTP Content Coding Registry]: https://www.iana.org/assignments/http-parameters/http-parameters.xhtml
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
#[non_exhaustive]
|
||||
pub enum ContentEncoding {
|
||||
@ -32,7 +35,7 @@ pub enum ContentEncoding {
|
||||
/// Gzip algorithm.
|
||||
Gzip,
|
||||
|
||||
// Zstd algorithm.
|
||||
/// Zstd algorithm.
|
||||
Zstd,
|
||||
|
||||
/// Indicates the identity function (i.e. no compression, nor modification).
|
||||
|
@ -1,17 +1,17 @@
|
||||
// Originally from hyper v0.11.27 src/header/parsing.rs
|
||||
|
||||
use std::{fmt, str::FromStr};
|
||||
|
||||
use language_tags::LanguageTag;
|
||||
|
||||
use crate::header::{Charset, HTTP_VALUE};
|
||||
|
||||
// From hyper v0.11.27 src/header/parsing.rs
|
||||
|
||||
/// The value part of an extended parameter consisting of three parts:
|
||||
/// - The REQUIRED character set name (`charset`).
|
||||
/// - The OPTIONAL language information (`language_tag`).
|
||||
/// - A character sequence representing the actual value (`value`), separated by single quotes.
|
||||
///
|
||||
/// It is defined in [RFC 5987](https://tools.ietf.org/html/rfc5987#section-3.2).
|
||||
/// It is defined in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct ExtendedValue {
|
||||
/// The character set that is used to encode the `value` to a string.
|
||||
@ -24,17 +24,17 @@ pub struct ExtendedValue {
|
||||
pub value: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Parses extended header parameter values (`ext-value`), as defined in
|
||||
/// [RFC 5987](https://tools.ietf.org/html/rfc5987#section-3.2).
|
||||
/// Parses extended header parameter values (`ext-value`), as defined
|
||||
/// in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
|
||||
///
|
||||
/// Extended values are denoted by parameter names that end with `*`.
|
||||
///
|
||||
/// ## ABNF
|
||||
///
|
||||
/// ```text
|
||||
/// ```plain
|
||||
/// ext-value = charset "'" [ language ] "'" value-chars
|
||||
/// ; like RFC 2231's <extended-initial-value>
|
||||
/// ; (see [RFC2231], Section 7)
|
||||
/// ; (see [RFC 2231 §7])
|
||||
///
|
||||
/// charset = "UTF-8" / "ISO-8859-1" / mime-charset
|
||||
///
|
||||
@ -43,22 +43,26 @@ pub struct ExtendedValue {
|
||||
/// / "!" / "#" / "$" / "%" / "&"
|
||||
/// / "+" / "-" / "^" / "_" / "`"
|
||||
/// / "{" / "}" / "~"
|
||||
/// ; as <mime-charset> in Section 2.3 of [RFC2978]
|
||||
/// ; as <mime-charset> in [RFC 2978 §2.3]
|
||||
/// ; except that the single quote is not included
|
||||
/// ; SHOULD be registered in the IANA charset registry
|
||||
///
|
||||
/// language = <Language-Tag, defined in [RFC5646], Section 2.1>
|
||||
/// language = <Language-Tag, defined in [RFC 5646 §2.1]>
|
||||
///
|
||||
/// value-chars = *( pct-encoded / attr-char )
|
||||
///
|
||||
/// pct-encoded = "%" HEXDIG HEXDIG
|
||||
/// ; see [RFC3986], Section 2.1
|
||||
/// ; see [RFC 3986 §2.1]
|
||||
///
|
||||
/// attr-char = ALPHA / DIGIT
|
||||
/// / "!" / "#" / "$" / "&" / "+" / "-" / "."
|
||||
/// / "^" / "_" / "`" / "|" / "~"
|
||||
/// ; token except ( "*" / "'" / "%" )
|
||||
/// ```
|
||||
///
|
||||
/// [RFC 2231 §7]: https://datatracker.ietf.org/doc/html/rfc2231#section-7
|
||||
/// [RFC 2978 §2.3]: https://datatracker.ietf.org/doc/html/rfc2978#section-2.3
|
||||
/// [RFC 3986 §2.1]: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1
|
||||
pub fn parse_extended_value(
|
||||
val: &str,
|
||||
) -> Result<ExtendedValue, crate::error::ParseError> {
|
||||
|
82
actix-http/src/header/shared/http_date.rs
Normal file
82
actix-http/src/header/shared/http_date.rs
Normal file
@ -0,0 +1,82 @@
|
||||
use std::{fmt, io::Write, str::FromStr, time::SystemTime};
|
||||
|
||||
use bytes::BytesMut;
|
||||
use http::header::{HeaderValue, InvalidHeaderValue};
|
||||
|
||||
use crate::{
|
||||
config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue,
|
||||
helpers::MutWriter,
|
||||
};
|
||||
|
||||
/// A timestamp with HTTP-style formatting and parsing.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct HttpDate(SystemTime);
|
||||
|
||||
impl FromStr for HttpDate {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
|
||||
match httpdate::parse_http_date(s) {
|
||||
Ok(sys_time) => Ok(HttpDate(sys_time)),
|
||||
Err(_) => Err(ParseError::Header),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for HttpDate {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let date_str = httpdate::fmt_http_date(self.0);
|
||||
f.write_str(&date_str)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for HttpDate {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
|
||||
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH);
|
||||
let mut wrt = MutWriter(&mut buf);
|
||||
|
||||
// unwrap: date output is known to be well formed and of known length
|
||||
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
|
||||
|
||||
HeaderValue::from_maybe_shared(buf.split().freeze())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SystemTime> for HttpDate {
|
||||
fn from(sys_time: SystemTime) -> HttpDate {
|
||||
HttpDate(sys_time)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HttpDate> for SystemTime {
|
||||
fn from(HttpDate(sys_time): HttpDate) -> SystemTime {
|
||||
sys_time
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::time::Duration;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn date_header() {
|
||||
macro_rules! assert_parsed_date {
|
||||
($case:expr, $exp:expr) => {
|
||||
assert_eq!($case.parse::<HttpDate>().unwrap(), $exp);
|
||||
};
|
||||
}
|
||||
|
||||
// 784198117 = SystemTime::from(datetime!(1994-11-07 08:48:37).assume_utc()).duration_since(SystemTime::UNIX_EPOCH));
|
||||
let nov_07 = HttpDate(SystemTime::UNIX_EPOCH + Duration::from_secs(784198117));
|
||||
|
||||
assert_parsed_date!("Mon, 07 Nov 1994 08:48:37 GMT", nov_07);
|
||||
assert_parsed_date!("Monday, 07-Nov-94 08:48:37 GMT", nov_07);
|
||||
assert_parsed_date!("Mon Nov 7 08:48:37 1994", nov_07);
|
||||
|
||||
assert!("this-is-no-date".parse::<HttpDate>().is_err());
|
||||
}
|
||||
}
|
@ -1,97 +0,0 @@
|
||||
use std::{
|
||||
fmt,
|
||||
io::Write,
|
||||
str::FromStr,
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
use bytes::buf::BufMut;
|
||||
use bytes::BytesMut;
|
||||
use http::header::{HeaderValue, InvalidHeaderValue};
|
||||
use time::{OffsetDateTime, PrimitiveDateTime, UtcOffset};
|
||||
|
||||
use crate::error::ParseError;
|
||||
use crate::header::IntoHeaderValue;
|
||||
use crate::time_parser;
|
||||
|
||||
/// A timestamp with HTTP formatting and parsing.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct HttpDate(OffsetDateTime);
|
||||
|
||||
impl FromStr for HttpDate {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
|
||||
match time_parser::parse_http_date(s) {
|
||||
Some(t) => Ok(HttpDate(t.assume_utc())),
|
||||
None => Err(ParseError::Header),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for HttpDate {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.0.format("%a, %d %b %Y %H:%M:%S GMT"), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SystemTime> for HttpDate {
|
||||
fn from(sys: SystemTime) -> HttpDate {
|
||||
HttpDate(PrimitiveDateTime::from(sys).assume_utc())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for HttpDate {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
|
||||
let mut wrt = BytesMut::with_capacity(29).writer();
|
||||
write!(
|
||||
wrt,
|
||||
"{}",
|
||||
self.0
|
||||
.to_offset(UtcOffset::UTC)
|
||||
.format("%a, %d %b %Y %H:%M:%S GMT")
|
||||
)
|
||||
.unwrap();
|
||||
HeaderValue::from_maybe_shared(wrt.get_mut().split().freeze())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HttpDate> for SystemTime {
|
||||
fn from(date: HttpDate) -> SystemTime {
|
||||
let dt = date.0;
|
||||
let epoch = OffsetDateTime::unix_epoch();
|
||||
|
||||
UNIX_EPOCH + (dt - epoch)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::HttpDate;
|
||||
use time::{date, time, PrimitiveDateTime};
|
||||
|
||||
#[test]
|
||||
fn test_date() {
|
||||
let nov_07 = HttpDate(
|
||||
PrimitiveDateTime::new(date!(1994 - 11 - 07), time!(8:48:37)).assume_utc(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"Sun, 07 Nov 1994 08:48:37 GMT".parse::<HttpDate>().unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert_eq!(
|
||||
"Sunday, 07-Nov-94 08:48:37 GMT"
|
||||
.parse::<HttpDate>()
|
||||
.unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert_eq!(
|
||||
"Sun Nov 7 08:48:37 1994".parse::<HttpDate>().unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert!("this-is-no-date".parse::<HttpDate>().is_err());
|
||||
}
|
||||
}
|
@ -3,12 +3,12 @@
|
||||
mod charset;
|
||||
mod content_encoding;
|
||||
mod extended;
|
||||
mod httpdate;
|
||||
mod http_date;
|
||||
mod quality_item;
|
||||
|
||||
pub use self::charset::Charset;
|
||||
pub use self::content_encoding::ContentEncoding;
|
||||
pub use self::extended::{parse_extended_value, ExtendedValue};
|
||||
pub use self::httpdate::HttpDate;
|
||||
pub use self::http_date::HttpDate;
|
||||
pub use self::quality_item::{q, qitem, Quality, QualityItem};
|
||||
pub use language_tags::LanguageTag;
|
||||
|
@ -1,8 +1,7 @@
|
||||
use std::{
|
||||
cmp,
|
||||
convert::{TryFrom, TryInto},
|
||||
fmt,
|
||||
str::{self, FromStr},
|
||||
fmt, str,
|
||||
};
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
@ -26,9 +25,9 @@ const MAX_FLOAT_QUALITY: f32 = 1.0;
|
||||
/// a value between 0 and 1000 e.g. `Quality(532)` matches the quality
|
||||
/// `q=0.532`.
|
||||
///
|
||||
/// [RFC7231 Section 5.3.1](https://tools.ietf.org/html/rfc7231#section-5.3.1)
|
||||
/// gives more information on quality values in HTTP header fields.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
/// [RFC 7231 §5.3.1](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.1) gives more
|
||||
/// information on quality values in HTTP header fields.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Quality(u16);
|
||||
|
||||
impl Quality {
|
||||
@ -79,20 +78,21 @@ impl TryFrom<f32> for Quality {
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents an item with a quality value as defined in
|
||||
/// [RFC7231](https://tools.ietf.org/html/rfc7231#section-5.3.1).
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
/// Represents an item with a quality value as defined
|
||||
/// in [RFC 7231 §5.3.1](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.1).
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct QualityItem<T> {
|
||||
/// The actual contents of the field.
|
||||
/// The wrapped contents of the field.
|
||||
pub item: T,
|
||||
|
||||
/// The quality (client or server preference) for the value.
|
||||
pub quality: Quality,
|
||||
}
|
||||
|
||||
impl<T> QualityItem<T> {
|
||||
/// Creates a new `QualityItem` from an item and a quality.
|
||||
/// The item can be of any type.
|
||||
/// The quality should be a value in the range [0, 1].
|
||||
/// Constructs a new `QualityItem` from an item and a quality value.
|
||||
///
|
||||
/// The item can be of any type. The quality should be a value in the range [0, 1].
|
||||
pub fn new(item: T, quality: Quality) -> QualityItem<T> {
|
||||
QualityItem { item, quality }
|
||||
}
|
||||
@ -116,7 +116,7 @@ impl<T: fmt::Display> fmt::Display for QualityItem<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: FromStr> FromStr for QualityItem<T> {
|
||||
impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(qitem_str: &str) -> Result<Self, Self::Err> {
|
||||
@ -128,6 +128,7 @@ impl<T: FromStr> FromStr for QualityItem<T> {
|
||||
let mut raw_item = qitem_str;
|
||||
let mut quality = 1f32;
|
||||
|
||||
// TODO: MSRV(1.52): use rsplit_once
|
||||
let parts: Vec<_> = qitem_str.rsplitn(2, ';').map(str::trim).collect();
|
||||
|
||||
if parts.len() == 2 {
|
||||
@ -195,6 +196,7 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
// copy of encoding from actix-web headers
|
||||
#[allow(clippy::enum_variant_names)] // allow Encoding prefix on EncodingExt
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub enum Encoding {
|
||||
Chunked,
|
||||
@ -223,7 +225,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Encoding {
|
||||
impl str::FromStr for Encoding {
|
||||
type Err = crate::error::ParseError;
|
||||
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
|
||||
use Encoding::*;
|
||||
|
@ -1,3 +1,5 @@
|
||||
//! Header parsing utilities.
|
||||
|
||||
use std::{fmt, str::FromStr};
|
||||
|
||||
use super::HeaderValue;
|
||||
@ -10,9 +12,12 @@ where
|
||||
I: Iterator<Item = &'a HeaderValue> + 'a,
|
||||
T: FromStr,
|
||||
{
|
||||
let mut result = Vec::new();
|
||||
let size_guess = all.size_hint().1.unwrap_or(2);
|
||||
let mut result = Vec::with_capacity(size_guess);
|
||||
|
||||
for h in all {
|
||||
let s = h.to_str().map_err(|_| ParseError::Header)?;
|
||||
|
||||
result.extend(
|
||||
s.split(',')
|
||||
.filter_map(|x| match x.trim() {
|
||||
@ -22,6 +27,7 @@ where
|
||||
.filter_map(|x| x.trim().parse().ok()),
|
||||
)
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
@ -30,10 +36,12 @@ where
|
||||
pub fn from_one_raw_str<T: FromStr>(val: Option<&HeaderValue>) -> Result<T, ParseError> {
|
||||
if let Some(line) = val {
|
||||
let line = line.to_str().map_err(|_| ParseError::Header)?;
|
||||
|
||||
if !line.is_empty() {
|
||||
return T::from_str(line).or(Err(ParseError::Header));
|
||||
}
|
||||
}
|
||||
|
||||
Err(ParseError::Header)
|
||||
}
|
||||
|
||||
@ -44,19 +52,45 @@ where
|
||||
T: fmt::Display,
|
||||
{
|
||||
let mut iter = parts.iter();
|
||||
|
||||
if let Some(part) = iter.next() {
|
||||
fmt::Display::fmt(part, f)?;
|
||||
}
|
||||
|
||||
for part in iter {
|
||||
f.write_str(", ")?;
|
||||
fmt::Display::fmt(part, f)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Percent encode a sequence of bytes with a character set defined in
|
||||
/// <https://tools.ietf.org/html/rfc5987#section-3.2>
|
||||
/// <https://datatracker.ietf.org/doc/html/rfc5987#section-3.2>
|
||||
#[inline]
|
||||
pub fn http_percent_encode(f: &mut fmt::Formatter<'_>, bytes: &[u8]) -> fmt::Result {
|
||||
let encoded = percent_encoding::percent_encode(bytes, HTTP_VALUE);
|
||||
fmt::Display::fmt(&encoded, f)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn comma_delimited_parsing() {
|
||||
let headers = vec![];
|
||||
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
|
||||
assert_eq!(res, vec![0; 0]);
|
||||
|
||||
let headers = vec![
|
||||
HeaderValue::from_static(""),
|
||||
HeaderValue::from_static(","),
|
||||
HeaderValue::from_static(" "),
|
||||
HeaderValue::from_static("1 ,"),
|
||||
HeaderValue::from_static(""),
|
||||
];
|
||||
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
|
||||
assert_eq!(res, vec![1]);
|
||||
}
|
||||
}
|
||||
|
@ -29,7 +29,6 @@ extern crate log;
|
||||
|
||||
pub mod body;
|
||||
mod builder;
|
||||
pub mod client;
|
||||
mod config;
|
||||
|
||||
#[cfg(feature = "__compress")]
|
||||
@ -44,7 +43,6 @@ mod request;
|
||||
mod response;
|
||||
mod response_builder;
|
||||
mod service;
|
||||
mod time_parser;
|
||||
|
||||
pub mod error;
|
||||
pub mod h1;
|
||||
@ -104,14 +102,9 @@ type ConnectCallback<IO> = dyn Fn(&IO, &mut Extensions);
|
||||
///
|
||||
/// # Implementation Details
|
||||
/// Uses Option to reduce necessary allocations when merging with request extensions.
|
||||
#[derive(Default)]
|
||||
pub(crate) struct OnConnectData(Option<Extensions>);
|
||||
|
||||
impl Default for OnConnectData {
|
||||
fn default() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl OnConnectData {
|
||||
/// Construct by calling the on-connect callback with the underlying transport I/O.
|
||||
pub(crate) fn from_io<T>(
|
||||
|
@ -317,7 +317,7 @@ impl ResponseHead {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn ctype(&self) -> Option<ConnectionType> {
|
||||
pub(crate) fn conn_type(&self) -> Option<ConnectionType> {
|
||||
if self.flags.contains(Flags::CLOSE) {
|
||||
Some(ConnectionType::Close)
|
||||
} else if self.flags.contains(Flags::KEEP_ALIVE) {
|
||||
|
@ -28,7 +28,7 @@ impl Response<AnyBody> {
|
||||
pub fn new(status: StatusCode) -> Self {
|
||||
Response {
|
||||
head: BoxedResponseHead::new(status),
|
||||
body: AnyBody::Empty,
|
||||
body: AnyBody::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -262,7 +262,7 @@ impl ResponseBuilder {
|
||||
S: Stream<Item = Result<Bytes, E>> + 'static,
|
||||
E: Into<Box<dyn StdError>> + 'static,
|
||||
{
|
||||
self.body(AnyBody::from_message(BodyStream::new(stream)))
|
||||
self.body(AnyBody::new_boxed(BodyStream::new(stream)))
|
||||
}
|
||||
|
||||
/// Generate response with an empty body.
|
||||
@ -270,7 +270,7 @@ impl ResponseBuilder {
|
||||
/// This `ResponseBuilder` will be left in a useless state.
|
||||
#[inline]
|
||||
pub fn finish(&mut self) -> Response<AnyBody> {
|
||||
self.body(AnyBody::Empty)
|
||||
self.body(AnyBody::empty())
|
||||
}
|
||||
|
||||
/// Create an owned `ResponseBuilder`, leaving the original in a useless state.
|
||||
@ -357,7 +357,7 @@ impl fmt::Debug for ResponseBuilder {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::body::Body;
|
||||
use crate::body::AnyBody;
|
||||
use crate::http::header::{HeaderName, HeaderValue, CONTENT_TYPE};
|
||||
|
||||
#[test]
|
||||
@ -390,13 +390,13 @@ mod tests {
|
||||
fn test_content_type() {
|
||||
let resp = Response::build(StatusCode::OK)
|
||||
.content_type("text/plain")
|
||||
.body(Body::Empty);
|
||||
.body(AnyBody::empty());
|
||||
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_into_builder() {
|
||||
let mut resp: Response<Body> = "test".into();
|
||||
let mut resp: Response<AnyBody> = "test".into();
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
|
||||
resp.headers_mut().insert(
|
||||
|
@ -195,9 +195,14 @@ where
|
||||
|
||||
#[cfg(feature = "openssl")]
|
||||
mod openssl {
|
||||
use actix_service::ServiceFactoryExt;
|
||||
use actix_tls::accept::openssl::{Acceptor, SslAcceptor, SslError, TlsStream};
|
||||
use actix_tls::accept::TlsError;
|
||||
use actix_service::ServiceFactoryExt as _;
|
||||
use actix_tls::accept::{
|
||||
openssl::{
|
||||
reexports::{Error as SslError, SslAcceptor},
|
||||
Acceptor, TlsStream,
|
||||
},
|
||||
TlsError,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
@ -227,7 +232,7 @@ mod openssl {
|
||||
U::Error: fmt::Display + Into<Response<AnyBody>>,
|
||||
U::InitError: fmt::Debug,
|
||||
{
|
||||
/// Create openssl based service
|
||||
/// Create OpenSSL based service.
|
||||
pub fn openssl(
|
||||
self,
|
||||
acceptor: SslAcceptor,
|
||||
@ -239,9 +244,11 @@ mod openssl {
|
||||
InitError = (),
|
||||
> {
|
||||
Acceptor::new(acceptor)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!())
|
||||
.and_then(|io: TlsStream<TcpStream>| async {
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
.map_err(TlsError::into_service_error)
|
||||
.map(|io: TlsStream<TcpStream>| {
|
||||
let proto = if let Some(protos) = io.ssl().selected_alpn_protocol() {
|
||||
if protos.windows(2).any(|window| window == b"h2") {
|
||||
Protocol::Http2
|
||||
@ -251,8 +258,9 @@ mod openssl {
|
||||
} else {
|
||||
Protocol::Http1
|
||||
};
|
||||
|
||||
let peer_addr = io.get_ref().peer_addr().ok();
|
||||
Ok((io, proto, peer_addr))
|
||||
(io, proto, peer_addr)
|
||||
})
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
@ -263,11 +271,13 @@ mod openssl {
|
||||
mod rustls {
|
||||
use std::io;
|
||||
|
||||
use actix_tls::accept::rustls::{Acceptor, ServerConfig, Session, TlsStream};
|
||||
use actix_tls::accept::TlsError;
|
||||
use actix_service::ServiceFactoryExt as _;
|
||||
use actix_tls::accept::{
|
||||
rustls::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||
TlsError,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
use actix_service::ServiceFactoryExt;
|
||||
|
||||
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
|
||||
where
|
||||
@ -295,7 +305,7 @@ mod rustls {
|
||||
U::Error: fmt::Display + Into<Response<AnyBody>>,
|
||||
U::InitError: fmt::Debug,
|
||||
{
|
||||
/// Create rustls based service
|
||||
/// Create Rustls based service.
|
||||
pub fn rustls(
|
||||
self,
|
||||
mut config: ServerConfig,
|
||||
@ -308,14 +318,15 @@ mod rustls {
|
||||
> {
|
||||
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||
protos.extend_from_slice(&config.alpn_protocols);
|
||||
config.set_protocols(&protos);
|
||||
config.alpn_protocols = protos;
|
||||
|
||||
Acceptor::new(config)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!())
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
.map_err(TlsError::into_service_error)
|
||||
.and_then(|io: TlsStream<TcpStream>| async {
|
||||
let proto = if let Some(protos) = io.get_ref().1.get_alpn_protocol()
|
||||
{
|
||||
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
|
||||
if protos.windows(2).any(|window| window == b"h2") {
|
||||
Protocol::Http2
|
||||
} else {
|
||||
|
@ -1,72 +0,0 @@
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
|
||||
/// Attempt to parse a `time` string as one of either RFC 1123, RFC 850, or asctime.
|
||||
pub(crate) fn parse_http_date(time: &str) -> Option<PrimitiveDateTime> {
|
||||
try_parse_rfc_1123(time)
|
||||
.or_else(|| try_parse_rfc_850(time))
|
||||
.or_else(|| try_parse_asctime(time))
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string as a RFC 1123 formatted date time string.
|
||||
///
|
||||
/// Eg: `Fri, 12 Feb 2021 00:14:29 GMT`
|
||||
fn try_parse_rfc_1123(time: &str) -> Option<PrimitiveDateTime> {
|
||||
time::parse(time, "%a, %d %b %Y %H:%M:%S").ok()
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string as a RFC 850 formatted date time string.
|
||||
///
|
||||
/// Eg: `Wednesday, 11-Jan-21 13:37:41 UTC`
|
||||
fn try_parse_rfc_850(time: &str) -> Option<PrimitiveDateTime> {
|
||||
let dt = PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S").ok()?;
|
||||
|
||||
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
|
||||
// we consider the year as part of this century if it's within the next 50 years,
|
||||
// otherwise we consider as part of the previous century.
|
||||
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let century_start_year = (now.year() / 100) * 100;
|
||||
let mut expanded_year = century_start_year + dt.year();
|
||||
|
||||
if expanded_year > now.year() + 50 {
|
||||
expanded_year -= 100;
|
||||
}
|
||||
|
||||
let date = Date::try_from_ymd(expanded_year, dt.month(), dt.day()).ok()?;
|
||||
Some(PrimitiveDateTime::new(date, dt.time()))
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string using ANSI C's `asctime` format.
|
||||
///
|
||||
/// Eg: `Wed Feb 13 15:46:11 2013`
|
||||
fn try_parse_asctime(time: &str) -> Option<PrimitiveDateTime> {
|
||||
time::parse(time, "%a %b %_d %H:%M:%S %Y").ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use time::{date, time};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_rfc_850_year_shift() {
|
||||
let date = try_parse_rfc_850("Friday, 19-Nov-82 16:14:55 EST").unwrap();
|
||||
assert_eq!(date, date!(1982 - 11 - 19).with_time(time!(16:14:55)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-62 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2062 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-21 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2021 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-23 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2023 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-99 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(1999 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-00 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2000 - 01 - 11).with_time(time!(13:37:41)));
|
||||
}
|
||||
}
|
@ -25,8 +25,8 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
|
||||
//
|
||||
// un aligned prefix and suffix would be mask/unmask per byte.
|
||||
// proper aligned middle slice goes into fast path and operates on 4-byte blocks.
|
||||
let (mut prefix, words, mut suffix) = unsafe { buf.align_to_mut::<u32>() };
|
||||
apply_mask_fallback(&mut prefix, mask);
|
||||
let (prefix, words, suffix) = unsafe { buf.align_to_mut::<u32>() };
|
||||
apply_mask_fallback(prefix, mask);
|
||||
let head = prefix.len() & 3;
|
||||
let mask_u32 = if head > 0 {
|
||||
if cfg!(target_endian = "big") {
|
||||
@ -40,7 +40,7 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
|
||||
for word in words.iter_mut() {
|
||||
*word ^= mask_u32;
|
||||
}
|
||||
apply_mask_fallback(&mut suffix, mask_u32.to_ne_bytes());
|
||||
apply_mask_fallback(suffix, mask_u32.to_ne_bytes());
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -210,7 +210,6 @@ pub fn handshake_response(req: &RequestHead) -> ResponseBuilder {
|
||||
|
||||
Response::build(StatusCode::SWITCHING_PROTOCOLS)
|
||||
.upgrade("websocket")
|
||||
.insert_header((header::TRANSFER_ENCODING, "chunked"))
|
||||
.insert_header((
|
||||
header::SEC_WEBSOCKET_ACCEPT,
|
||||
// key is known to be header value safe ascii
|
||||
|
@ -3,7 +3,9 @@ use std::{
|
||||
fmt,
|
||||
};
|
||||
|
||||
/// Operation codes as part of RFC6455.
|
||||
/// Operation codes defined in [RFC 6455 §11.8].
|
||||
///
|
||||
/// [RFC 6455]: https://datatracker.ietf.org/doc/html/rfc6455#section-11.8
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Copy)]
|
||||
pub enum OpCode {
|
||||
/// Indicates a continuation frame of a fragmented message.
|
||||
@ -105,7 +107,7 @@ pub enum CloseCode {
|
||||
Abnormal,
|
||||
|
||||
/// Indicates that an endpoint is terminating the connection because it has received data within
|
||||
/// a message that was not consistent with the type of the message (e.g., non-UTF-8 \[RFC3629\]
|
||||
/// a message that was not consistent with the type of the message (e.g., non-UTF-8 \[RFC 3629\]
|
||||
/// data within a text message).
|
||||
Invalid,
|
||||
|
||||
@ -220,7 +222,8 @@ impl<T: Into<String>> From<(CloseCode, T)> for CloseReason {
|
||||
}
|
||||
}
|
||||
|
||||
/// The WebSocket GUID as stated in the spec. See https://tools.ietf.org/html/rfc6455#section-1.3.
|
||||
/// The WebSocket GUID as stated in the spec.
|
||||
/// See <https://datatracker.ietf.org/doc/html/rfc6455#section-1.3>.
|
||||
static WS_GUID: &[u8] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
|
||||
|
||||
/// Hashes the `Sec-WebSocket-Key` header according to the WebSocket spec.
|
||||
|
77
actix-http/tests/test_h2_ping_pong.rs
Normal file
77
actix-http/tests/test_h2_ping_pong.rs
Normal file
@ -0,0 +1,77 @@
|
||||
use std::io;
|
||||
|
||||
use actix_http::{error::Error, HttpService, Response};
|
||||
use actix_server::Server;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn h2_ping_pong() -> io::Result<()> {
|
||||
let (tx, rx) = std::sync::mpsc::sync_channel(1);
|
||||
|
||||
let lst = std::net::TcpListener::bind("127.0.0.1:0")?;
|
||||
|
||||
let addr = lst.local_addr().unwrap();
|
||||
|
||||
let join = std::thread::spawn(move || {
|
||||
actix_rt::System::new().block_on(async move {
|
||||
let srv = Server::build()
|
||||
.disable_signals()
|
||||
.workers(1)
|
||||
.listen("h2_ping_pong", lst, || {
|
||||
HttpService::build()
|
||||
.keep_alive(3)
|
||||
.h2(|_| async { Ok::<_, Error>(Response::ok()) })
|
||||
.tcp()
|
||||
})?
|
||||
.run();
|
||||
|
||||
tx.send(srv.handle()).unwrap();
|
||||
|
||||
srv.await
|
||||
})
|
||||
});
|
||||
|
||||
let handle = rx.recv().unwrap();
|
||||
|
||||
let (sync_tx, rx) = std::sync::mpsc::sync_channel(1);
|
||||
|
||||
// use a separate thread for h2 client so it can be blocked.
|
||||
std::thread::spawn(move || {
|
||||
tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap()
|
||||
.block_on(async move {
|
||||
let stream = tokio::net::TcpStream::connect(addr).await.unwrap();
|
||||
|
||||
let (mut tx, conn) = h2::client::handshake(stream).await.unwrap();
|
||||
|
||||
tokio::spawn(async move { conn.await.unwrap() });
|
||||
|
||||
let (res, _) = tx.send_request(::http::Request::new(()), true).unwrap();
|
||||
let res = res.await.unwrap();
|
||||
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
|
||||
sync_tx.send(()).unwrap();
|
||||
|
||||
// intentionally block the client thread so it can not answer ping pong.
|
||||
std::thread::sleep(std::time::Duration::from_secs(1000));
|
||||
})
|
||||
});
|
||||
|
||||
rx.recv().unwrap();
|
||||
|
||||
let now = std::time::Instant::now();
|
||||
|
||||
// stop server gracefully. this step would take up to 30 seconds.
|
||||
handle.stop(true).await;
|
||||
|
||||
// join server thread. only when connection are all gone this step would finish.
|
||||
join.join().unwrap()?;
|
||||
|
||||
// check the time used for join server thread so it's known that the server shutdown
|
||||
// is from keep alive and not server graceful shutdown timeout.
|
||||
assert!(now.elapsed() < std::time::Duration::from_secs(30));
|
||||
|
||||
Ok(())
|
||||
}
|
@ -5,10 +5,10 @@ extern crate tls_openssl as openssl;
|
||||
use std::{convert::Infallible, io};
|
||||
|
||||
use actix_http::{
|
||||
body::{AnyBody, Body, SizedStream},
|
||||
body::{AnyBody, SizedStream},
|
||||
error::PayloadError,
|
||||
http::{
|
||||
header::{self, HeaderName, HeaderValue},
|
||||
header::{self, HeaderValue},
|
||||
Method, StatusCode, Version,
|
||||
},
|
||||
Error, HttpMessage, HttpService, Request, Response,
|
||||
@ -143,38 +143,25 @@ async fn test_h2_content_length() {
|
||||
})
|
||||
.await;
|
||||
|
||||
let header = HeaderName::from_static("content-length");
|
||||
let value = HeaderValue::from_static("0");
|
||||
static VALUE: HeaderValue = HeaderValue::from_static("0");
|
||||
|
||||
{
|
||||
for &i in &[0] {
|
||||
let req = srv
|
||||
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
|
||||
.send();
|
||||
let _response = req.await.expect_err("should timeout on recv 1xx frame");
|
||||
// assert_eq!(response.headers().get(&header), None);
|
||||
let req = srv.request(Method::HEAD, srv.surl("/0")).send();
|
||||
req.await.expect_err("should timeout on recv 1xx frame");
|
||||
|
||||
let req = srv
|
||||
.request(Method::GET, srv.surl(&format!("/{}", i)))
|
||||
.send();
|
||||
let _response = req.await.expect_err("should timeout on recv 1xx frame");
|
||||
// assert_eq!(response.headers().get(&header), None);
|
||||
}
|
||||
let req = srv.request(Method::GET, srv.surl("/0")).send();
|
||||
req.await.expect_err("should timeout on recv 1xx frame");
|
||||
|
||||
for &i in &[1] {
|
||||
let req = srv
|
||||
.request(Method::GET, srv.surl(&format!("/{}", i)))
|
||||
.send();
|
||||
let response = req.await.unwrap();
|
||||
assert_eq!(response.headers().get(&header), None);
|
||||
}
|
||||
let req = srv.request(Method::GET, srv.surl("/1")).send();
|
||||
let response = req.await.unwrap();
|
||||
assert!(response.headers().get("content-length").is_none());
|
||||
|
||||
for &i in &[2, 3] {
|
||||
let req = srv
|
||||
.request(Method::GET, srv.surl(&format!("/{}", i)))
|
||||
.send();
|
||||
let response = req.await.unwrap();
|
||||
assert_eq!(response.headers().get(&header), Some(&value));
|
||||
assert_eq!(response.headers().get("content-length"), Some(&VALUE));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -422,7 +409,7 @@ impl From<BadRequest> for Response<AnyBody> {
|
||||
async fn test_h2_service_error() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| err::<Response<Body>, _>(BadRequest))
|
||||
.h2(|_| err::<Response<AnyBody>, _>(BadRequest))
|
||||
.openssl(tls_config())
|
||||
.map_err(|_| ())
|
||||
})
|
||||
|
@ -3,14 +3,14 @@
|
||||
extern crate tls_rustls as rustls;
|
||||
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
convert::{Infallible, TryFrom},
|
||||
io::{self, BufReader, Write},
|
||||
net::{SocketAddr, TcpStream as StdTcpStream},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use actix_http::{
|
||||
body::{AnyBody, Body, SizedStream},
|
||||
body::{AnyBody, SizedStream},
|
||||
error::PayloadError,
|
||||
http::{
|
||||
header::{self, HeaderName, HeaderValue},
|
||||
@ -20,16 +20,14 @@ use actix_http::{
|
||||
};
|
||||
use actix_http_test::test_server;
|
||||
use actix_service::{fn_factory_with_config, fn_service};
|
||||
use actix_tls::connect::rustls::webpki_roots_cert_store;
|
||||
use actix_utils::future::{err, ok};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use derive_more::{Display, Error};
|
||||
use futures_core::Stream;
|
||||
use futures_util::stream::{once, StreamExt as _};
|
||||
use rustls::{
|
||||
internal::pemfile::{certs, pkcs8_private_keys},
|
||||
NoClientAuth, ServerConfig as RustlsServerConfig, Session,
|
||||
};
|
||||
use webpki::DNSNameRef;
|
||||
use rustls::{Certificate, PrivateKey, ServerConfig as RustlsServerConfig, ServerName};
|
||||
use rustls_pemfile::{certs, pkcs8_private_keys};
|
||||
|
||||
async fn load_body<S>(mut stream: S) -> Result<BytesMut, PayloadError>
|
||||
where
|
||||
@ -47,13 +45,24 @@ fn tls_config() -> RustlsServerConfig {
|
||||
let cert_file = cert.serialize_pem().unwrap();
|
||||
let key_file = cert.serialize_private_key_pem();
|
||||
|
||||
let mut config = RustlsServerConfig::new(NoClientAuth::new());
|
||||
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
||||
let key_file = &mut BufReader::new(key_file.as_bytes());
|
||||
|
||||
let cert_chain = certs(cert_file).unwrap();
|
||||
let cert_chain = certs(cert_file)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(Certificate)
|
||||
.collect();
|
||||
let mut keys = pkcs8_private_keys(key_file).unwrap();
|
||||
config.set_single_cert(cert_chain, keys.remove(0)).unwrap();
|
||||
|
||||
let mut config = RustlsServerConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_no_client_auth()
|
||||
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
|
||||
.unwrap();
|
||||
|
||||
config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec());
|
||||
config.alpn_protocols.push(H2_ALPN_PROTOCOL.to_vec());
|
||||
|
||||
config
|
||||
}
|
||||
@ -62,19 +71,28 @@ pub fn get_negotiated_alpn_protocol(
|
||||
addr: SocketAddr,
|
||||
client_alpn_protocol: &[u8],
|
||||
) -> Option<Vec<u8>> {
|
||||
let mut config = rustls::ClientConfig::new();
|
||||
let mut config = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(webpki_roots_cert_store())
|
||||
.with_no_client_auth();
|
||||
|
||||
config.alpn_protocols.push(client_alpn_protocol.to_vec());
|
||||
let mut sess = rustls::ClientSession::new(
|
||||
&Arc::new(config),
|
||||
DNSNameRef::try_from_ascii_str("localhost").unwrap(),
|
||||
);
|
||||
|
||||
let mut sess = rustls::ClientConnection::new(
|
||||
Arc::new(config),
|
||||
ServerName::try_from("localhost").unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut sock = StdTcpStream::connect(addr).unwrap();
|
||||
let mut stream = rustls::Stream::new(&mut sess, &mut sock);
|
||||
|
||||
// The handshake will fails because the client will not be able to verify the server
|
||||
// certificate, but it doesn't matter here as we are just interested in the negotiated ALPN
|
||||
// protocol
|
||||
let _ = stream.flush();
|
||||
sess.get_alpn_protocol().map(|proto| proto.to_vec())
|
||||
|
||||
sess.alpn_protocol().map(|proto| proto.to_vec())
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -459,7 +477,7 @@ impl From<BadRequest> for Response<AnyBody> {
|
||||
async fn test_h2_service_error() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| err::<Response<Body>, _>(BadRequest))
|
||||
.h2(|_| err::<Response<AnyBody>, _>(BadRequest))
|
||||
.rustls(tls_config())
|
||||
})
|
||||
.await;
|
||||
@ -476,7 +494,7 @@ async fn test_h2_service_error() {
|
||||
async fn test_h1_service_error() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h1(|_| err::<Response<Body>, _>(BadRequest))
|
||||
.h1(|_| err::<Response<AnyBody>, _>(BadRequest))
|
||||
.rustls(tls_config())
|
||||
})
|
||||
.await;
|
||||
|
@ -6,7 +6,7 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_http::{
|
||||
body::{AnyBody, Body, SizedStream},
|
||||
body::{AnyBody, SizedStream},
|
||||
header, http, Error, HttpMessage, HttpService, KeepAlive, Request, Response,
|
||||
StatusCode,
|
||||
};
|
||||
@ -24,7 +24,7 @@ use regex::Regex;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h1() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.keep_alive(KeepAlive::Disabled)
|
||||
.client_timeout(1000)
|
||||
@ -39,11 +39,13 @@ async fn test_h1() {
|
||||
|
||||
let response = srv.get("/").send().await.unwrap();
|
||||
assert!(response.status().is_success());
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h1_2() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.keep_alive(KeepAlive::Disabled)
|
||||
.client_timeout(1000)
|
||||
@ -59,6 +61,8 @@ async fn test_h1_2() {
|
||||
|
||||
let response = srv.get("/").send().await.unwrap();
|
||||
assert!(response.status().is_success());
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
@ -73,7 +77,7 @@ impl From<ExpectFailed> for Response<AnyBody> {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_expect_continue() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.expect(fn_service(|req: Request| {
|
||||
if req.head().uri.query() == Some("yes=") {
|
||||
@ -98,11 +102,13 @@ async fn test_expect_continue() {
|
||||
let mut data = String::new();
|
||||
let _ = stream.read_to_string(&mut data);
|
||||
assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n"));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_expect_continue_h1() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.expect(fn_service(|req: Request| {
|
||||
sleep(Duration::from_millis(20)).then(move |_| {
|
||||
@ -129,6 +135,8 @@ async fn test_expect_continue_h1() {
|
||||
let mut data = String::new();
|
||||
let _ = stream.read_to_string(&mut data);
|
||||
assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n"));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -136,7 +144,7 @@ async fn test_chunked_payload() {
|
||||
let chunk_sizes = vec![32768, 32, 32768];
|
||||
let total_size: usize = chunk_sizes.iter().sum();
|
||||
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(fn_service(|mut request: Request| {
|
||||
request
|
||||
@ -183,15 +191,18 @@ async fn test_chunked_payload() {
|
||||
Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(),
|
||||
None => panic!("Failed to find size in HTTP Response: {}", data),
|
||||
};
|
||||
|
||||
size
|
||||
};
|
||||
|
||||
assert_eq!(returned_size, total_size);
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_slow_request() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.client_timeout(100)
|
||||
.finish(|_| ok::<_, Infallible>(Response::ok()))
|
||||
@ -204,11 +215,13 @@ async fn test_slow_request() {
|
||||
let mut data = String::new();
|
||||
let _ = stream.read_to_string(&mut data);
|
||||
assert!(data.starts_with("HTTP/1.1 408 Request Timeout"));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_http1_malformed_request() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok()))
|
||||
.tcp()
|
||||
@ -220,11 +233,13 @@ async fn test_http1_malformed_request() {
|
||||
let mut data = String::new();
|
||||
let _ = stream.read_to_string(&mut data);
|
||||
assert!(data.starts_with("HTTP/1.1 400 Bad Request"));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_http1_keepalive() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok()))
|
||||
.tcp()
|
||||
@ -241,11 +256,13 @@ async fn test_http1_keepalive() {
|
||||
let mut data = vec![0; 1024];
|
||||
let _ = stream.read(&mut data);
|
||||
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_http1_keepalive_timeout() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.keep_alive(1)
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok()))
|
||||
@ -263,11 +280,13 @@ async fn test_http1_keepalive_timeout() {
|
||||
let mut data = vec![0; 1024];
|
||||
let res = stream.read(&mut data).unwrap();
|
||||
assert_eq!(res, 0);
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_http1_keepalive_close() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok()))
|
||||
.tcp()
|
||||
@ -284,11 +303,13 @@ async fn test_http1_keepalive_close() {
|
||||
let mut data = vec![0; 1024];
|
||||
let res = stream.read(&mut data).unwrap();
|
||||
assert_eq!(res, 0);
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_http10_keepalive_default_close() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok()))
|
||||
.tcp()
|
||||
@ -304,11 +325,13 @@ async fn test_http10_keepalive_default_close() {
|
||||
let mut data = vec![0; 1024];
|
||||
let res = stream.read(&mut data).unwrap();
|
||||
assert_eq!(res, 0);
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_http10_keepalive() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok()))
|
||||
.tcp()
|
||||
@ -331,11 +354,13 @@ async fn test_http10_keepalive() {
|
||||
let mut data = vec![0; 1024];
|
||||
let res = stream.read(&mut data).unwrap();
|
||||
assert_eq!(res, 0);
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_http1_keepalive_disabled() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.keep_alive(KeepAlive::Disabled)
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok()))
|
||||
@ -352,6 +377,8 @@ async fn test_http1_keepalive_disabled() {
|
||||
let mut data = vec![0; 1024];
|
||||
let res = stream.read(&mut data).unwrap();
|
||||
assert_eq!(res, 0);
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -361,7 +388,7 @@ async fn test_content_length() {
|
||||
StatusCode,
|
||||
};
|
||||
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|req: Request| {
|
||||
let indx: usize = req.uri().path()[1..].parse().unwrap();
|
||||
@ -399,6 +426,8 @@ async fn test_content_length() {
|
||||
assert_eq!(response.headers().get(&header), Some(&value));
|
||||
}
|
||||
}
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -438,6 +467,8 @@ async fn test_h1_headers() {
|
||||
// read response
|
||||
let bytes = srv.load_body(response).await.unwrap();
|
||||
assert_eq!(bytes, Bytes::from(data2));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
|
||||
@ -477,6 +508,8 @@ async fn test_h1_body() {
|
||||
// read response
|
||||
let bytes = srv.load_body(response).await.unwrap();
|
||||
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -502,6 +535,8 @@ async fn test_h1_head_empty() {
|
||||
// read response
|
||||
let bytes = srv.load_body(response).await.unwrap();
|
||||
assert!(bytes.is_empty());
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -527,11 +562,13 @@ async fn test_h1_head_binary() {
|
||||
// read response
|
||||
let bytes = srv.load_body(response).await.unwrap();
|
||||
assert!(bytes.is_empty());
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h1_head_binary2() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
.tcp()
|
||||
@ -548,6 +585,8 @@ async fn test_h1_head_binary2() {
|
||||
.unwrap();
|
||||
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
|
||||
}
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -570,6 +609,8 @@ async fn test_h1_body_length() {
|
||||
// read response
|
||||
let bytes = srv.load_body(response).await.unwrap();
|
||||
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -605,6 +646,8 @@ async fn test_h1_body_chunked_explicit() {
|
||||
|
||||
// decode
|
||||
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -634,6 +677,8 @@ async fn test_h1_body_chunked_implicit() {
|
||||
// read response
|
||||
let bytes = srv.load_body(response).await.unwrap();
|
||||
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -661,6 +706,8 @@ async fn test_h1_response_http_error_handling() {
|
||||
bytes,
|
||||
Bytes::from_static(b"error processing HTTP: failed to parse header value")
|
||||
);
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
@ -677,7 +724,7 @@ impl From<BadRequest> for Response<AnyBody> {
|
||||
async fn test_h1_service_error() {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| err::<Response<Body>, _>(BadRequest))
|
||||
.h1(|_| err::<Response<AnyBody>, _>(BadRequest))
|
||||
.tcp()
|
||||
})
|
||||
.await;
|
||||
@ -688,11 +735,13 @@ async fn test_h1_service_error() {
|
||||
// read response
|
||||
let bytes = srv.load_body(response).await.unwrap();
|
||||
assert_eq!(bytes, Bytes::from_static(b"error"));
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h1_on_connect() {
|
||||
let srv = test_server(|| {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.on_connect_ext(|_, data| {
|
||||
data.insert(20isize);
|
||||
@ -707,4 +756,93 @@ async fn test_h1_on_connect() {
|
||||
|
||||
let response = srv.get("/").send().await.unwrap();
|
||||
assert!(response.status().is_success());
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
||||
/// Tests compliance with 304 Not Modified spec in RFC 7232 §4.1.
|
||||
/// https://datatracker.ietf.org/doc/html/rfc7232#section-4.1
|
||||
#[actix_rt::test]
|
||||
async fn test_not_modified_spec_h1() {
|
||||
// TODO: this test needing a few seconds to complete reveals some weirdness with either the
|
||||
// dispatcher or the client, though similar hangs occur on other tests in this file, only
|
||||
// succeeding, it seems, because of the keepalive timer
|
||||
|
||||
static CL: header::HeaderName = header::CONTENT_LENGTH;
|
||||
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|req: Request| {
|
||||
let res: Response<AnyBody> = match req.path() {
|
||||
// with no content-length
|
||||
"/none" => {
|
||||
Response::with_body(StatusCode::NOT_MODIFIED, AnyBody::None)
|
||||
}
|
||||
|
||||
// with no content-length
|
||||
"/body" => Response::with_body(
|
||||
StatusCode::NOT_MODIFIED,
|
||||
AnyBody::from("1234"),
|
||||
),
|
||||
|
||||
// with manual content-length header and specific None body
|
||||
"/cl-none" => {
|
||||
let mut res =
|
||||
Response::with_body(StatusCode::NOT_MODIFIED, AnyBody::None);
|
||||
res.headers_mut()
|
||||
.insert(CL.clone(), header::HeaderValue::from_static("24"));
|
||||
res
|
||||
}
|
||||
|
||||
// with manual content-length header and ignore-able body
|
||||
"/cl-body" => {
|
||||
let mut res = Response::with_body(
|
||||
StatusCode::NOT_MODIFIED,
|
||||
AnyBody::from("1234"),
|
||||
);
|
||||
res.headers_mut()
|
||||
.insert(CL.clone(), header::HeaderValue::from_static("4"));
|
||||
res
|
||||
}
|
||||
|
||||
_ => panic!("unknown route"),
|
||||
};
|
||||
|
||||
ok::<_, Infallible>(res)
|
||||
})
|
||||
.tcp()
|
||||
})
|
||||
.await;
|
||||
|
||||
let res = srv.get("/none").send().await.unwrap();
|
||||
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
|
||||
assert_eq!(res.headers().get(&CL), None);
|
||||
assert!(srv.load_body(res).await.unwrap().is_empty());
|
||||
|
||||
let res = srv.get("/body").send().await.unwrap();
|
||||
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
|
||||
assert_eq!(res.headers().get(&CL), None);
|
||||
assert!(srv.load_body(res).await.unwrap().is_empty());
|
||||
|
||||
let res = srv.get("/cl-none").send().await.unwrap();
|
||||
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
|
||||
assert_eq!(
|
||||
res.headers().get(&CL),
|
||||
Some(&header::HeaderValue::from_static("24")),
|
||||
);
|
||||
assert!(srv.load_body(res).await.unwrap().is_empty());
|
||||
|
||||
let res = srv.get("/cl-body").send().await.unwrap();
|
||||
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
|
||||
assert_eq!(
|
||||
res.headers().get(&CL),
|
||||
Some(&header::HeaderValue::from_static("4")),
|
||||
);
|
||||
// server does not prevent payload from being sent but clients may choose not to read it
|
||||
// TODO: this is probably a bug, especially since CL header can differ in length from the body
|
||||
assert!(!srv.load_body(res).await.unwrap().is_empty());
|
||||
|
||||
// TODO: add stream response tests
|
||||
|
||||
srv.stop().await;
|
||||
}
|
||||
|
@ -3,6 +3,27 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.4.0-beta.9 - 2021-12-01
|
||||
* Polling `Field` after dropping `Multipart` now fails immediately instead of hanging forever. [#2463]
|
||||
|
||||
[#2463]: https://github.com/actix/actix-web/pull/2463
|
||||
|
||||
|
||||
## 0.4.0-beta.8 - 2021-11-22
|
||||
* Ensure a correct Content-Disposition header is included in every part of a multipart message. [#2451]
|
||||
* Added `MultipartError::NoContentDisposition` variant. [#2451]
|
||||
* Since Content-Disposition is now ensured, `Field::content_disposition` is now infallible. [#2451]
|
||||
* Added `Field::name` method for getting the field name. [#2451]
|
||||
* `MultipartError` now marks variants with inner errors as the source. [#2451]
|
||||
* `MultipartError` is now marked as non-exhaustive. [#2451]
|
||||
|
||||
[#2451]: https://github.com/actix/actix-web/pull/2451
|
||||
|
||||
|
||||
## 0.4.0-beta.7 - 2021-10-20
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 0.4.0-beta.6 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-multipart"
|
||||
version = "0.4.0-beta.6"
|
||||
version = "0.4.0-beta.9"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Multipart form support for Actix Web"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
@ -14,13 +14,12 @@ name = "actix_multipart"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
actix-web = { version = "4.0.0-beta.11", default-features = false }
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
bytes = "1"
|
||||
derive_more = "0.99.5"
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
httparse = "1.3"
|
||||
local-waker = "0.1"
|
||||
log = "0.4"
|
||||
@ -29,6 +28,7 @@ twoway = "0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-http = "3.0.0-beta.14"
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
tokio = { version = "1", features = ["sync"] }
|
||||
tokio-stream = "0.1"
|
||||
|
@ -3,15 +3,15 @@
|
||||
> Multipart form support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.6)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.9)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.6)
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.9)
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-multipart)
|
||||
- Minimum Supported Rust Version (MSRV): 1.51.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -2,39 +2,52 @@
|
||||
use actix_web::error::{ParseError, PayloadError};
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::ResponseError;
|
||||
use derive_more::{Display, From};
|
||||
use derive_more::{Display, Error, From};
|
||||
|
||||
/// A set of errors that can occur during parsing multipart streams
|
||||
#[derive(Debug, Display, From)]
|
||||
#[non_exhaustive]
|
||||
#[derive(Debug, Display, From, Error)]
|
||||
pub enum MultipartError {
|
||||
/// Content-Disposition header is not found or is not equal to "form-data".
|
||||
///
|
||||
/// According to [RFC 7578 §4.2](https://datatracker.ietf.org/doc/html/rfc7578#section-4.2) a
|
||||
/// Content-Disposition header must always be present and equal to "form-data".
|
||||
#[display(fmt = "No Content-Disposition `form-data` header")]
|
||||
NoContentDisposition,
|
||||
|
||||
/// Content-Type header is not found
|
||||
#[display(fmt = "No Content-type header found")]
|
||||
#[display(fmt = "No Content-Type header found")]
|
||||
NoContentType,
|
||||
|
||||
/// Can not parse Content-Type header
|
||||
#[display(fmt = "Can not parse Content-Type header")]
|
||||
ParseContentType,
|
||||
|
||||
/// Multipart boundary is not found
|
||||
#[display(fmt = "Multipart boundary is not found")]
|
||||
Boundary,
|
||||
|
||||
/// Nested multipart is not supported
|
||||
#[display(fmt = "Nested multipart is not supported")]
|
||||
Nested,
|
||||
|
||||
/// Multipart stream is incomplete
|
||||
#[display(fmt = "Multipart stream is incomplete")]
|
||||
Incomplete,
|
||||
|
||||
/// Error during field parsing
|
||||
#[display(fmt = "{}", _0)]
|
||||
Parse(ParseError),
|
||||
|
||||
/// Payload error
|
||||
#[display(fmt = "{}", _0)]
|
||||
Payload(PayloadError),
|
||||
|
||||
/// Not consumed
|
||||
#[display(fmt = "Multipart stream is not consumed")]
|
||||
NotConsumed,
|
||||
}
|
||||
|
||||
impl std::error::Error for MultipartError {}
|
||||
|
||||
/// Return `BadRequest` for `MultipartError`
|
||||
impl ResponseError for MultipartError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
|
@ -33,7 +33,6 @@ use crate::server::Multipart;
|
||||
impl FromRequest for Multipart {
|
||||
type Error = Error;
|
||||
type Future = Ready<Result<Multipart, Error>>;
|
||||
type Config = ();
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
|
@ -1,18 +1,22 @@
|
||||
//! Multipart response payload support.
|
||||
|
||||
use std::cell::{Cell, RefCell, RefMut};
|
||||
use std::convert::TryFrom;
|
||||
use std::marker::PhantomData;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::task::{Context, Poll};
|
||||
use std::{cmp, fmt};
|
||||
use std::{
|
||||
cell::{Cell, RefCell, RefMut},
|
||||
cmp,
|
||||
convert::TryFrom,
|
||||
fmt,
|
||||
marker::PhantomData,
|
||||
pin::Pin,
|
||||
rc::Rc,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use actix_web::error::{ParseError, PayloadError};
|
||||
use actix_web::http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue};
|
||||
use actix_web::{
|
||||
error::{ParseError, PayloadError},
|
||||
http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue},
|
||||
};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures_core::stream::{LocalBoxStream, Stream};
|
||||
use futures_util::stream::StreamExt as _;
|
||||
use local_waker::LocalWaker;
|
||||
|
||||
use crate::error::MultipartError;
|
||||
@ -28,7 +32,7 @@ const MAX_HEADERS: usize = 32;
|
||||
pub struct Multipart {
|
||||
safety: Safety,
|
||||
error: Option<MultipartError>,
|
||||
inner: Option<Rc<RefCell<InnerMultipart>>>,
|
||||
inner: Option<InnerMultipart>,
|
||||
}
|
||||
|
||||
enum InnerMultipartItem {
|
||||
@ -40,10 +44,13 @@ enum InnerMultipartItem {
|
||||
enum InnerState {
|
||||
/// Stream eof
|
||||
Eof,
|
||||
|
||||
/// Skip data until first boundary
|
||||
FirstBoundary,
|
||||
|
||||
/// Reading boundary
|
||||
Boundary,
|
||||
|
||||
/// Reading Headers,
|
||||
Headers,
|
||||
}
|
||||
@ -59,7 +66,7 @@ impl Multipart {
|
||||
/// Create multipart instance for boundary.
|
||||
pub fn new<S>(headers: &HeaderMap, stream: S) -> Multipart
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + Unpin + 'static,
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||
{
|
||||
match Self::boundary(headers) {
|
||||
Ok(boundary) => Multipart::from_boundary(boundary, stream),
|
||||
@ -69,39 +76,32 @@ impl Multipart {
|
||||
|
||||
/// Extract boundary info from headers.
|
||||
pub(crate) fn boundary(headers: &HeaderMap) -> Result<String, MultipartError> {
|
||||
if let Some(content_type) = headers.get(&header::CONTENT_TYPE) {
|
||||
if let Ok(content_type) = content_type.to_str() {
|
||||
if let Ok(ct) = content_type.parse::<mime::Mime>() {
|
||||
if let Some(boundary) = ct.get_param(mime::BOUNDARY) {
|
||||
Ok(boundary.as_str().to_owned())
|
||||
} else {
|
||||
Err(MultipartError::Boundary)
|
||||
}
|
||||
} else {
|
||||
Err(MultipartError::ParseContentType)
|
||||
}
|
||||
} else {
|
||||
Err(MultipartError::ParseContentType)
|
||||
}
|
||||
} else {
|
||||
Err(MultipartError::NoContentType)
|
||||
}
|
||||
headers
|
||||
.get(&header::CONTENT_TYPE)
|
||||
.ok_or(MultipartError::NoContentType)?
|
||||
.to_str()
|
||||
.ok()
|
||||
.and_then(|content_type| content_type.parse::<mime::Mime>().ok())
|
||||
.ok_or(MultipartError::ParseContentType)?
|
||||
.get_param(mime::BOUNDARY)
|
||||
.map(|boundary| boundary.as_str().to_owned())
|
||||
.ok_or(MultipartError::Boundary)
|
||||
}
|
||||
|
||||
/// Create multipart instance for given boundary and stream
|
||||
pub(crate) fn from_boundary<S>(boundary: String, stream: S) -> Multipart
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + Unpin + 'static,
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||
{
|
||||
Multipart {
|
||||
error: None,
|
||||
safety: Safety::new(),
|
||||
inner: Some(Rc::new(RefCell::new(InnerMultipart {
|
||||
inner: Some(InnerMultipart {
|
||||
boundary,
|
||||
payload: PayloadRef::new(PayloadBuffer::new(Box::new(stream))),
|
||||
payload: PayloadRef::new(PayloadBuffer::new(stream)),
|
||||
state: InnerState::FirstBoundary,
|
||||
item: InnerMultipartItem::None,
|
||||
}))),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -118,20 +118,27 @@ impl Multipart {
|
||||
impl Stream for Multipart {
|
||||
type Item = Result<Field, MultipartError>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
if let Some(err) = self.error.take() {
|
||||
Poll::Ready(Some(Err(err)))
|
||||
} else if self.safety.current() {
|
||||
let this = self.get_mut();
|
||||
let mut inner = this.inner.as_mut().unwrap().borrow_mut();
|
||||
if let Some(mut payload) = inner.payload.get_mut(&this.safety) {
|
||||
payload.poll_stream(cx)?;
|
||||
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let this = self.get_mut();
|
||||
|
||||
match this.inner.as_mut() {
|
||||
Some(inner) => {
|
||||
if let Some(mut buffer) = inner.payload.get_mut(&this.safety) {
|
||||
// check safety and poll read payload to buffer.
|
||||
buffer.poll_stream(cx)?;
|
||||
} else if !this.safety.is_clean() {
|
||||
// safety violation
|
||||
return Poll::Ready(Some(Err(MultipartError::NotConsumed)));
|
||||
} else {
|
||||
return Poll::Pending;
|
||||
}
|
||||
|
||||
inner.poll(&this.safety, cx)
|
||||
}
|
||||
inner.poll(&this.safety, cx)
|
||||
} else if !self.safety.is_clean() {
|
||||
Poll::Ready(Some(Err(MultipartError::NotConsumed)))
|
||||
} else {
|
||||
Poll::Pending
|
||||
None => Poll::Ready(Some(Err(this
|
||||
.error
|
||||
.take()
|
||||
.expect("Multipart polled after finish")))),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -152,17 +159,15 @@ impl InnerMultipart {
|
||||
Ok(httparse::Status::Complete((_, hdrs))) => {
|
||||
// convert headers
|
||||
let mut headers = HeaderMap::with_capacity(hdrs.len());
|
||||
|
||||
for h in hdrs {
|
||||
if let Ok(name) = HeaderName::try_from(h.name) {
|
||||
if let Ok(value) = HeaderValue::try_from(h.value) {
|
||||
headers.append(name, value);
|
||||
} else {
|
||||
return Err(ParseError::Header.into());
|
||||
}
|
||||
} else {
|
||||
return Err(ParseError::Header.into());
|
||||
}
|
||||
let name =
|
||||
HeaderName::try_from(h.name).map_err(|_| ParseError::Header)?;
|
||||
let value = HeaderValue::try_from(h.value)
|
||||
.map_err(|_| ParseError::Header)?;
|
||||
headers.append(name, value);
|
||||
}
|
||||
|
||||
Ok(Some(headers))
|
||||
}
|
||||
Ok(httparse::Status::Partial) => Err(ParseError::Header.into()),
|
||||
@ -332,31 +337,55 @@ impl InnerMultipart {
|
||||
return Poll::Pending;
|
||||
};
|
||||
|
||||
// content type
|
||||
let mut mt = mime::APPLICATION_OCTET_STREAM;
|
||||
if let Some(content_type) = headers.get(&header::CONTENT_TYPE) {
|
||||
if let Ok(content_type) = content_type.to_str() {
|
||||
if let Ok(ct) = content_type.parse::<mime::Mime>() {
|
||||
mt = ct;
|
||||
}
|
||||
}
|
||||
}
|
||||
// According to RFC 7578 §4.2, a Content-Disposition header must always be present and
|
||||
// set to "form-data".
|
||||
|
||||
let content_disposition = headers
|
||||
.get(&header::CONTENT_DISPOSITION)
|
||||
.and_then(|cd| ContentDisposition::from_raw(cd).ok())
|
||||
.filter(|content_disposition| {
|
||||
let is_form_data =
|
||||
content_disposition.disposition == header::DispositionType::FormData;
|
||||
|
||||
let has_field_name = content_disposition
|
||||
.parameters
|
||||
.iter()
|
||||
.any(|param| matches!(param, header::DispositionParam::Name(_)));
|
||||
|
||||
is_form_data && has_field_name
|
||||
});
|
||||
|
||||
let cd = if let Some(content_disposition) = content_disposition {
|
||||
content_disposition
|
||||
} else {
|
||||
return Poll::Ready(Some(Err(MultipartError::NoContentDisposition)));
|
||||
};
|
||||
|
||||
let ct: mime::Mime = headers
|
||||
.get(&header::CONTENT_TYPE)
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
.and_then(|ct| ct.parse().ok())
|
||||
.unwrap_or(mime::APPLICATION_OCTET_STREAM);
|
||||
|
||||
self.state = InnerState::Boundary;
|
||||
|
||||
// nested multipart stream
|
||||
if mt.type_() == mime::MULTIPART {
|
||||
Poll::Ready(Some(Err(MultipartError::Nested)))
|
||||
} else {
|
||||
let field = Rc::new(RefCell::new(InnerField::new(
|
||||
self.payload.clone(),
|
||||
self.boundary.clone(),
|
||||
&headers,
|
||||
)?));
|
||||
self.item = InnerMultipartItem::Field(Rc::clone(&field));
|
||||
|
||||
Poll::Ready(Some(Ok(Field::new(safety.clone(cx), headers, mt, field))))
|
||||
// nested multipart stream is not supported
|
||||
if ct.type_() == mime::MULTIPART {
|
||||
return Poll::Ready(Some(Err(MultipartError::Nested)));
|
||||
}
|
||||
|
||||
let field =
|
||||
InnerField::new_in_rc(self.payload.clone(), self.boundary.clone(), &headers)?;
|
||||
|
||||
self.item = InnerMultipartItem::Field(Rc::clone(&field));
|
||||
|
||||
Poll::Ready(Some(Ok(Field::new(
|
||||
safety.clone(cx),
|
||||
headers,
|
||||
ct,
|
||||
cd,
|
||||
field,
|
||||
))))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -371,6 +400,7 @@ impl Drop for InnerMultipart {
|
||||
/// A single field in a multipart stream
|
||||
pub struct Field {
|
||||
ct: mime::Mime,
|
||||
cd: ContentDisposition,
|
||||
headers: HeaderMap,
|
||||
inner: Rc<RefCell<InnerField>>,
|
||||
safety: Safety,
|
||||
@ -381,35 +411,51 @@ impl Field {
|
||||
safety: Safety,
|
||||
headers: HeaderMap,
|
||||
ct: mime::Mime,
|
||||
cd: ContentDisposition,
|
||||
inner: Rc<RefCell<InnerField>>,
|
||||
) -> Self {
|
||||
Field {
|
||||
ct,
|
||||
cd,
|
||||
headers,
|
||||
inner,
|
||||
safety,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a map of headers
|
||||
/// Returns a reference to the field's header map.
|
||||
pub fn headers(&self) -> &HeaderMap {
|
||||
&self.headers
|
||||
}
|
||||
|
||||
/// Get the content type of the field
|
||||
/// Returns a reference to the field's content (mime) type.
|
||||
pub fn content_type(&self) -> &mime::Mime {
|
||||
&self.ct
|
||||
}
|
||||
|
||||
/// Get the content disposition of the field, if it exists
|
||||
pub fn content_disposition(&self) -> Option<ContentDisposition> {
|
||||
// RFC 7578: 'Each part MUST contain a Content-Disposition header field
|
||||
// where the disposition type is "form-data".'
|
||||
if let Some(content_disposition) = self.headers.get(&header::CONTENT_DISPOSITION) {
|
||||
ContentDisposition::from_raw(content_disposition).ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
/// Returns the field's Content-Disposition.
|
||||
///
|
||||
/// Per [RFC 7578 §4.2]: 'Each part MUST contain a Content-Disposition header field where the
|
||||
/// disposition type is "form-data". The Content-Disposition header field MUST also contain an
|
||||
/// additional parameter of "name"; the value of the "name" parameter is the original field name
|
||||
/// from the form.'
|
||||
///
|
||||
/// This crate validates that it exists before returning a `Field`. As such, it is safe to
|
||||
/// unwrap `.content_disposition().get_name()`. The [name](Self::name) method is provided as
|
||||
/// a convenience.
|
||||
///
|
||||
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||
pub fn content_disposition(&self) -> &ContentDisposition {
|
||||
&self.cd
|
||||
}
|
||||
|
||||
/// Returns the field's name.
|
||||
///
|
||||
/// See [content_disposition] regarding guarantees about
|
||||
pub fn name(&self) -> &str {
|
||||
self.content_disposition()
|
||||
.get_name()
|
||||
.expect("field name should be guaranteed to exist in multipart form-data")
|
||||
}
|
||||
}
|
||||
|
||||
@ -417,17 +463,19 @@ impl Stream for Field {
|
||||
type Item = Result<Bytes, MultipartError>;
|
||||
|
||||
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
if self.safety.current() {
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
if let Some(mut payload) = inner.payload.as_ref().unwrap().get_mut(&self.safety) {
|
||||
payload.poll_stream(cx)?;
|
||||
}
|
||||
inner.poll(&self.safety)
|
||||
} else if !self.safety.is_clean() {
|
||||
Poll::Ready(Some(Err(MultipartError::NotConsumed)))
|
||||
let this = self.get_mut();
|
||||
let mut inner = this.inner.borrow_mut();
|
||||
if let Some(mut buffer) = inner.payload.as_ref().unwrap().get_mut(&this.safety) {
|
||||
// check safety and poll read payload to buffer.
|
||||
buffer.poll_stream(cx)?;
|
||||
} else if !this.safety.is_clean() {
|
||||
// safety violation
|
||||
return Poll::Ready(Some(Err(MultipartError::NotConsumed)));
|
||||
} else {
|
||||
Poll::Pending
|
||||
return Poll::Pending;
|
||||
}
|
||||
|
||||
inner.poll(&this.safety)
|
||||
}
|
||||
}
|
||||
|
||||
@ -451,20 +499,23 @@ struct InnerField {
|
||||
}
|
||||
|
||||
impl InnerField {
|
||||
fn new_in_rc(
|
||||
payload: PayloadRef,
|
||||
boundary: String,
|
||||
headers: &HeaderMap,
|
||||
) -> Result<Rc<RefCell<InnerField>>, PayloadError> {
|
||||
Self::new(payload, boundary, headers).map(|this| Rc::new(RefCell::new(this)))
|
||||
}
|
||||
|
||||
fn new(
|
||||
payload: PayloadRef,
|
||||
boundary: String,
|
||||
headers: &HeaderMap,
|
||||
) -> Result<InnerField, PayloadError> {
|
||||
let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) {
|
||||
if let Ok(s) = len.to_str() {
|
||||
if let Ok(len) = s.parse::<u64>() {
|
||||
Some(len)
|
||||
} else {
|
||||
return Err(PayloadError::Incomplete(None));
|
||||
}
|
||||
} else {
|
||||
return Err(PayloadError::Incomplete(None));
|
||||
match len.to_str().ok().and_then(|len| len.parse::<u64>().ok()) {
|
||||
Some(len) => Some(len),
|
||||
None => return Err(PayloadError::Incomplete(None)),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
@ -638,10 +689,7 @@ impl PayloadRef {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_mut<'a, 'b>(&'a self, s: &'b Safety) -> Option<RefMut<'a, PayloadBuffer>>
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
fn get_mut(&self, s: &Safety) -> Option<RefMut<'_, PayloadBuffer>> {
|
||||
if s.current() {
|
||||
Some(self.payload.borrow_mut())
|
||||
} else {
|
||||
@ -658,9 +706,11 @@ impl Clone for PayloadRef {
|
||||
}
|
||||
}
|
||||
|
||||
/// Counter. It tracks of number of clones of payloads and give access to
|
||||
/// payload only to top most task panics if Safety get destroyed and it not top
|
||||
/// most task.
|
||||
/// Counter. It tracks of number of clones of payloads and give access to payload only to top most.
|
||||
/// * When dropped, parent task is awakened. This is to support the case where Field is
|
||||
/// dropped in a separate task than Multipart.
|
||||
/// * Assumes that parent owners don't move to different tasks; only the top-most is allowed to.
|
||||
/// * If dropped and is not top most owner, is_clean flag is set to false.
|
||||
#[derive(Debug)]
|
||||
struct Safety {
|
||||
task: LocalWaker,
|
||||
@ -703,15 +753,16 @@ impl Safety {
|
||||
|
||||
impl Drop for Safety {
|
||||
fn drop(&mut self) {
|
||||
// parent task is dead
|
||||
if Rc::strong_count(&self.payload) != self.level {
|
||||
self.clean.set(true);
|
||||
// Multipart dropped leaving a Field
|
||||
self.clean.set(false);
|
||||
}
|
||||
|
||||
self.task.wake();
|
||||
}
|
||||
}
|
||||
|
||||
/// Payload buffer
|
||||
/// Payload buffer.
|
||||
struct PayloadBuffer {
|
||||
eof: bool,
|
||||
buf: BytesMut,
|
||||
@ -719,7 +770,7 @@ struct PayloadBuffer {
|
||||
}
|
||||
|
||||
impl PayloadBuffer {
|
||||
/// Create new `PayloadBuffer` instance
|
||||
/// Constructs new `PayloadBuffer` instance.
|
||||
fn new<S>(stream: S) -> Self
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||
@ -727,7 +778,7 @@ impl PayloadBuffer {
|
||||
PayloadBuffer {
|
||||
eof: false,
|
||||
buf: BytesMut::new(),
|
||||
stream: stream.boxed_local(),
|
||||
stream: Box::pin(stream),
|
||||
}
|
||||
}
|
||||
|
||||
@ -767,7 +818,7 @@ impl PayloadBuffer {
|
||||
}
|
||||
|
||||
/// Read until specified ending
|
||||
pub fn read_until(&mut self, line: &[u8]) -> Result<Option<Bytes>, MultipartError> {
|
||||
fn read_until(&mut self, line: &[u8]) -> Result<Option<Bytes>, MultipartError> {
|
||||
let res = twoway::find_bytes(&self.buf, line)
|
||||
.map(|idx| self.buf.split_to(idx + line.len()).freeze());
|
||||
|
||||
@ -779,12 +830,12 @@ impl PayloadBuffer {
|
||||
}
|
||||
|
||||
/// Read bytes until new line delimiter
|
||||
pub fn readline(&mut self) -> Result<Option<Bytes>, MultipartError> {
|
||||
fn readline(&mut self) -> Result<Option<Bytes>, MultipartError> {
|
||||
self.read_until(b"\n")
|
||||
}
|
||||
|
||||
/// Read bytes until new line delimiter or eof
|
||||
pub fn readline_or_eof(&mut self) -> Result<Option<Bytes>, MultipartError> {
|
||||
fn readline_or_eof(&mut self) -> Result<Option<Bytes>, MultipartError> {
|
||||
match self.readline() {
|
||||
Err(MultipartError::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())),
|
||||
line => line,
|
||||
@ -792,7 +843,7 @@ impl PayloadBuffer {
|
||||
}
|
||||
|
||||
/// Put unprocessed data back to the buffer
|
||||
pub fn unprocessed(&mut self, data: Bytes) {
|
||||
fn unprocessed(&mut self, data: Bytes) {
|
||||
let buf = BytesMut::from(data.as_ref());
|
||||
let buf = std::mem::replace(&mut self.buf, buf);
|
||||
self.buf.extend_from_slice(&buf);
|
||||
@ -805,10 +856,12 @@ mod tests {
|
||||
|
||||
use actix_http::h1::Payload;
|
||||
use actix_web::http::header::{DispositionParam, DispositionType};
|
||||
use actix_web::rt;
|
||||
use actix_web::test::TestRequest;
|
||||
use actix_web::FromRequest;
|
||||
use bytes::Bytes;
|
||||
use futures_util::future::lazy;
|
||||
use futures_util::{future::lazy, StreamExt};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
|
||||
@ -914,6 +967,7 @@ mod tests {
|
||||
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||
test\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||
data\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
|
||||
@ -965,7 +1019,7 @@ mod tests {
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
match multipart.next().await {
|
||||
Some(Ok(mut field)) => {
|
||||
let cd = field.content_disposition().unwrap();
|
||||
let cd = field.content_disposition();
|
||||
assert_eq!(cd.disposition, DispositionType::FormData);
|
||||
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
|
||||
|
||||
@ -1027,7 +1081,7 @@ mod tests {
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
match multipart.next().await.unwrap() {
|
||||
Ok(mut field) => {
|
||||
let cd = field.content_disposition().unwrap();
|
||||
let cd = field.content_disposition();
|
||||
assert_eq!(cd.disposition, DispositionType::FormData);
|
||||
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
|
||||
|
||||
@ -1182,4 +1236,99 @@ mod tests {
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn no_content_disposition() {
|
||||
let bytes = Bytes::from(
|
||||
"testasdadsad\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||
test\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||
),
|
||||
);
|
||||
let payload = SlowStream::new(bytes);
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let res = multipart.next().await.unwrap();
|
||||
assert!(res.is_err());
|
||||
assert!(matches!(
|
||||
res.unwrap_err(),
|
||||
MultipartError::NoContentDisposition,
|
||||
));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn no_name_in_content_disposition() {
|
||||
let bytes = Bytes::from(
|
||||
"testasdadsad\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Disposition: form-data; filename=\"fn.txt\"\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||
test\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||
),
|
||||
);
|
||||
let payload = SlowStream::new(bytes);
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let res = multipart.next().await.unwrap();
|
||||
assert!(res.is_err());
|
||||
assert!(matches!(
|
||||
res.unwrap_err(),
|
||||
MultipartError::NoContentDisposition,
|
||||
));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_drop_multipart_dont_hang() {
|
||||
let (sender, payload) = create_stream();
|
||||
let (bytes, headers) = create_simple_request_with_header();
|
||||
sender.send(Ok(bytes)).unwrap();
|
||||
drop(sender); // eof
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let mut field = multipart.next().await.unwrap().unwrap();
|
||||
|
||||
drop(multipart);
|
||||
|
||||
// should fail immediately
|
||||
match field.next().await {
|
||||
Some(Err(MultipartError::NotConsumed)) => {}
|
||||
_ => panic!(),
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_drop_field_awaken_multipart() {
|
||||
let (sender, payload) = create_stream();
|
||||
let (bytes, headers) = create_simple_request_with_header();
|
||||
sender.send(Ok(bytes)).unwrap();
|
||||
drop(sender); // eof
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let mut field = multipart.next().await.unwrap().unwrap();
|
||||
|
||||
let task = rt::spawn(async move {
|
||||
rt::time::sleep(Duration::from_secs(1)).await;
|
||||
assert_eq!(field.next().await.unwrap().unwrap(), "test");
|
||||
drop(field);
|
||||
});
|
||||
|
||||
// dropping field should awaken current task
|
||||
let _ = multipart.next().await.unwrap().unwrap();
|
||||
task.await.unwrap();
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 0.5.0-beta.2 - 2021-09-09
|
||||
|
@ -30,7 +30,7 @@ serde = "1"
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
firestorm = { version = "0.4", features = ["enable_system_time"] }
|
||||
http = "0.2.3"
|
||||
http = "0.2.5"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
|
||||
[[bench]]
|
||||
|
@ -394,9 +394,7 @@ impl ResourceDef {
|
||||
pub fn set_name(&mut self, name: impl Into<String>) {
|
||||
let name = name.into();
|
||||
|
||||
if name.is_empty() {
|
||||
panic!("resource name should not be empty");
|
||||
}
|
||||
assert!(!name.is_empty(), "resource name should not be empty");
|
||||
|
||||
self.name = Some(name)
|
||||
}
|
||||
@ -978,9 +976,7 @@ impl ResourceDef {
|
||||
|
||||
let (name, pattern) = match param.find(':') {
|
||||
Some(idx) => {
|
||||
if tail {
|
||||
panic!("custom regex is not supported for tail match");
|
||||
}
|
||||
assert!(!tail, "custom regex is not supported for tail match");
|
||||
|
||||
let (name, pattern) = param.split_at(idx);
|
||||
(name, &pattern[1..])
|
||||
@ -1087,12 +1083,12 @@ impl ResourceDef {
|
||||
re.push_str(&escape(unprocessed));
|
||||
}
|
||||
|
||||
if dyn_segment_count > MAX_DYNAMIC_SEGMENTS {
|
||||
panic!(
|
||||
"Only {} dynamic segments are allowed, provided: {}",
|
||||
MAX_DYNAMIC_SEGMENTS, dyn_segment_count
|
||||
);
|
||||
}
|
||||
assert!(
|
||||
dyn_segment_count <= MAX_DYNAMIC_SEGMENTS,
|
||||
"Only {} dynamic segments are allowed, provided: {}",
|
||||
MAX_DYNAMIC_SEGMENTS,
|
||||
dyn_segment_count
|
||||
);
|
||||
|
||||
// Store the pattern in capture group #1 to have context info outside it
|
||||
let mut re = format!("({})", re);
|
||||
@ -1774,6 +1770,12 @@ mod tests {
|
||||
match_methods_agree!(["/v{v}", "/ver/{v}"] => "", "s/v", "/v1", "/v1/xx", "/ver/i3/5", "/ver/1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn duplicate_segment_name() {
|
||||
ResourceDef::new("/user/{id}/post/{id}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn invalid_dynamic_segment_delimiter() {
|
||||
|
@ -6,8 +6,9 @@ use crate::{IntoPatterns, Resource, ResourceDef, ResourcePath};
|
||||
pub struct ResourceId(pub u16);
|
||||
|
||||
/// Information about current resource
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ResourceInfo {
|
||||
#[allow(dead_code)]
|
||||
resource: ResourceId,
|
||||
}
|
||||
|
||||
|
@ -3,6 +3,23 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.1.0-beta.7 - 2021-11-22
|
||||
* Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
|
||||
|
||||
[#2408]: https://github.com/actix/actix-web/pull/2408
|
||||
|
||||
|
||||
## 0.1.0-beta.6 - 2021-11-15
|
||||
* No significant changes from `0.1.0-beta.5`.
|
||||
|
||||
|
||||
## 0.1.0-beta.5 - 2021-10-20
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 0.1.0-beta.4 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
@ -1,32 +1,41 @@
|
||||
[package]
|
||||
name = "actix-test"
|
||||
version = "0.1.0-beta.4"
|
||||
version = "0.1.0-beta.7"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
edition = "2018"
|
||||
description = "Integration testing tools for Actix Web applications"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
# rustls
|
||||
rustls = ["tls-rustls", "actix-http/rustls"]
|
||||
rustls = ["tls-rustls", "actix-http/rustls", "awc/rustls"]
|
||||
|
||||
# openssl
|
||||
openssl = ["tls-openssl", "actix-http/openssl"]
|
||||
openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"]
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-http-test = "3.0.0-beta.5"
|
||||
actix-codec = "0.4.1"
|
||||
actix-http = "3.0.0-beta.14"
|
||||
actix-http-test = "3.0.0-beta.7"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
|
||||
actix-web = { version = "4.0.0-beta.11", default-features = false, features = ["cookies"] }
|
||||
actix-rt = "2.1"
|
||||
awc = { version = "3.0.0-beta.8", default-features = false, features = ["cookies"] }
|
||||
awc = { version = "3.0.0-beta.11", default-features = false, features = ["cookies"] }
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
futures-util = { version = "0.3.7", default-features = false, features = [] }
|
||||
@ -35,4 +44,5 @@ serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_urlencoded = "0.7"
|
||||
tls-openssl = { package = "openssl", version = "0.10.9", optional = true }
|
||||
tls-rustls = { package = "rustls", version = "0.19.0", optional = true }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0", optional = true }
|
||||
tokio = { version = "1.2", features = ["sync"] }
|
||||
|
@ -31,7 +31,7 @@ extern crate tls_openssl as openssl;
|
||||
#[cfg(feature = "rustls")]
|
||||
extern crate tls_rustls as rustls;
|
||||
|
||||
use std::{error::Error as StdError, fmt, net, sync::mpsc, thread, time};
|
||||
use std::{error::Error as StdError, fmt, net, thread, time::Duration};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
pub use actix_http::test::TestBuffer;
|
||||
@ -41,8 +41,9 @@ use actix_http::{
|
||||
};
|
||||
use actix_service::{map_config, IntoServiceFactory, ServiceFactory, ServiceFactoryExt as _};
|
||||
use actix_web::{
|
||||
dev::{AppConfig, MessageBody, Server, Service},
|
||||
rt, web, Error,
|
||||
dev::{AppConfig, MessageBody, Server, ServerHandle, Service},
|
||||
rt::{self, System},
|
||||
web, Error,
|
||||
};
|
||||
use awc::{error::PayloadError, Client, ClientRequest, ClientResponse, Connector};
|
||||
use futures_core::Stream;
|
||||
@ -52,6 +53,7 @@ pub use actix_web::test::{
|
||||
call_service, default_service, init_service, load_stream, ok_service, read_body,
|
||||
read_body_json, read_response, read_response_json, TestRequest,
|
||||
};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
/// Start default [`TestServer`].
|
||||
///
|
||||
@ -64,7 +66,7 @@ pub use actix_web::test::{
|
||||
/// Ok(HttpResponse::Ok())
|
||||
/// }
|
||||
///
|
||||
/// #[actix_rt::test]
|
||||
/// #[actix_web::test]
|
||||
/// async fn test_example() {
|
||||
/// let srv = actix_test::start(||
|
||||
/// App::new().service(my_handler)
|
||||
@ -104,7 +106,7 @@ where
|
||||
/// Ok(HttpResponse::Ok())
|
||||
/// }
|
||||
///
|
||||
/// #[actix_rt::test]
|
||||
/// #[actix_web::test]
|
||||
/// async fn test_example() {
|
||||
/// let srv = actix_test::start_with(actix_test::config().h1(), ||
|
||||
/// App::new().service(my_handler)
|
||||
@ -128,7 +130,11 @@ where
|
||||
B: MessageBody + 'static,
|
||||
B::Error: Into<Box<dyn StdError>>,
|
||||
{
|
||||
let (tx, rx) = mpsc::channel();
|
||||
// for sending handles and server info back from the spawned thread
|
||||
let (started_tx, started_rx) = std::sync::mpsc::channel();
|
||||
|
||||
// for signaling the shutdown of spawned server and system
|
||||
let (thread_stop_tx, thread_stop_rx) = mpsc::channel(1);
|
||||
|
||||
let tls = match cfg.stream {
|
||||
StreamType::Tcp => false,
|
||||
@ -138,154 +144,189 @@ where
|
||||
StreamType::Rustls(_) => true,
|
||||
};
|
||||
|
||||
// run server in separate thread
|
||||
// run server in separate orphaned thread
|
||||
thread::spawn(move || {
|
||||
let sys = rt::System::new();
|
||||
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
|
||||
let local_addr = tcp.local_addr().unwrap();
|
||||
let factory = factory.clone();
|
||||
let srv_cfg = cfg.clone();
|
||||
let timeout = cfg.client_timeout;
|
||||
let builder = Server::build().workers(1).disable_signals();
|
||||
rt::System::new().block_on(async move {
|
||||
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
|
||||
let local_addr = tcp.local_addr().unwrap();
|
||||
let factory = factory.clone();
|
||||
let srv_cfg = cfg.clone();
|
||||
let timeout = cfg.client_timeout;
|
||||
|
||||
let srv = match srv_cfg.stream {
|
||||
StreamType::Tcp => match srv_cfg.tp {
|
||||
HttpVer::Http1 => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let builder = Server::build().workers(1).disable_signals().system_exit();
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
let srv = match srv_cfg.stream {
|
||||
StreamType::Tcp => match srv_cfg.tp {
|
||||
HttpVer::Http1 => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h1(map_config(fac, move |_| app_cfg.clone()))
|
||||
.tcp()
|
||||
}),
|
||||
HttpVer::Http2 => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h1(map_config(fac, move |_| app_cfg.clone()))
|
||||
.tcp()
|
||||
}),
|
||||
HttpVer::Http2 => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h2(map_config(fac, move |_| app_cfg.clone()))
|
||||
.tcp()
|
||||
}),
|
||||
HttpVer::Both => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h2(map_config(fac, move |_| app_cfg.clone()))
|
||||
.tcp()
|
||||
}),
|
||||
HttpVer::Both => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.finish(map_config(fac, move |_| app_cfg.clone()))
|
||||
.tcp()
|
||||
}),
|
||||
},
|
||||
#[cfg(feature = "openssl")]
|
||||
StreamType::Openssl(acceptor) => match cfg.tp {
|
||||
HttpVer::Http1 => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.finish(map_config(fac, move |_| app_cfg.clone()))
|
||||
.tcp()
|
||||
}),
|
||||
},
|
||||
#[cfg(feature = "openssl")]
|
||||
StreamType::Openssl(acceptor) => match cfg.tp {
|
||||
HttpVer::Http1 => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h1(map_config(fac, move |_| app_cfg.clone()))
|
||||
.openssl(acceptor.clone())
|
||||
}),
|
||||
HttpVer::Http2 => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h1(map_config(fac, move |_| app_cfg.clone()))
|
||||
.openssl(acceptor.clone())
|
||||
}),
|
||||
HttpVer::Http2 => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h2(map_config(fac, move |_| app_cfg.clone()))
|
||||
.openssl(acceptor.clone())
|
||||
}),
|
||||
HttpVer::Both => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h2(map_config(fac, move |_| app_cfg.clone()))
|
||||
.openssl(acceptor.clone())
|
||||
}),
|
||||
HttpVer::Both => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.finish(map_config(fac, move |_| app_cfg.clone()))
|
||||
.openssl(acceptor.clone())
|
||||
}),
|
||||
},
|
||||
#[cfg(feature = "rustls")]
|
||||
StreamType::Rustls(config) => match cfg.tp {
|
||||
HttpVer::Http1 => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.finish(map_config(fac, move |_| app_cfg.clone()))
|
||||
.openssl(acceptor.clone())
|
||||
}),
|
||||
},
|
||||
#[cfg(feature = "rustls")]
|
||||
StreamType::Rustls(config) => match cfg.tp {
|
||||
HttpVer::Http1 => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h1(map_config(fac, move |_| app_cfg.clone()))
|
||||
.rustls(config.clone())
|
||||
}),
|
||||
HttpVer::Http2 => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h1(map_config(fac, move |_| app_cfg.clone()))
|
||||
.rustls(config.clone())
|
||||
}),
|
||||
HttpVer::Http2 => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h2(map_config(fac, move |_| app_cfg.clone()))
|
||||
.rustls(config.clone())
|
||||
}),
|
||||
HttpVer::Both => builder.listen("test", tcp, move || {
|
||||
let app_cfg =
|
||||
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.h2(map_config(fac, move |_| app_cfg.clone()))
|
||||
.rustls(config.clone())
|
||||
}),
|
||||
HttpVer::Both => builder.listen("test", tcp, move || {
|
||||
let app_cfg = AppConfig::__priv_test_new(
|
||||
false,
|
||||
local_addr.to_string(),
|
||||
local_addr,
|
||||
);
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.finish(map_config(fac, move |_| app_cfg.clone()))
|
||||
.rustls(config.clone())
|
||||
}),
|
||||
},
|
||||
}
|
||||
.unwrap();
|
||||
let fac = factory()
|
||||
.into_factory()
|
||||
.map_err(|err| err.into().error_response());
|
||||
|
||||
HttpService::build()
|
||||
.client_timeout(timeout)
|
||||
.finish(map_config(fac, move |_| app_cfg.clone()))
|
||||
.rustls(config.clone())
|
||||
}),
|
||||
},
|
||||
}
|
||||
.expect("test server could not be created");
|
||||
|
||||
sys.block_on(async {
|
||||
let srv = srv.run();
|
||||
tx.send((rt::System::current(), srv, local_addr)).unwrap();
|
||||
started_tx
|
||||
.send((System::current(), srv.handle(), local_addr))
|
||||
.unwrap();
|
||||
|
||||
// drive server loop
|
||||
srv.await.unwrap();
|
||||
|
||||
// notify TestServer that server and system have shut down
|
||||
// all thread managed resources should be dropped at this point
|
||||
});
|
||||
|
||||
sys.run()
|
||||
let _ = thread_stop_tx.send(());
|
||||
});
|
||||
|
||||
let (system, server, addr) = rx.recv().unwrap();
|
||||
let (system, server, addr) = started_rx.recv().unwrap();
|
||||
|
||||
let client = {
|
||||
let connector = {
|
||||
@ -299,15 +340,15 @@ where
|
||||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
.ssl(builder.build())
|
||||
}
|
||||
#[cfg(not(feature = "openssl"))]
|
||||
{
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
}
|
||||
};
|
||||
|
||||
@ -315,11 +356,12 @@ where
|
||||
};
|
||||
|
||||
TestServer {
|
||||
addr,
|
||||
server,
|
||||
thread_stop_rx,
|
||||
client,
|
||||
system,
|
||||
addr,
|
||||
tls,
|
||||
server,
|
||||
}
|
||||
}
|
||||
|
||||
@ -405,11 +447,12 @@ impl TestServerConfig {
|
||||
///
|
||||
/// See [`start`] for usage example.
|
||||
pub struct TestServer {
|
||||
addr: net::SocketAddr,
|
||||
server: ServerHandle,
|
||||
thread_stop_rx: mpsc::Receiver<()>,
|
||||
client: awc::Client,
|
||||
system: rt::System,
|
||||
addr: net::SocketAddr,
|
||||
tls: bool,
|
||||
server: Server,
|
||||
}
|
||||
|
||||
impl TestServer {
|
||||
@ -504,16 +547,31 @@ impl TestServer {
|
||||
self.client.headers()
|
||||
}
|
||||
|
||||
/// Gracefully stop HTTP server.
|
||||
pub async fn stop(self) {
|
||||
self.server.stop(true).await;
|
||||
/// Stop HTTP server.
|
||||
///
|
||||
/// Waits for spawned `Server` and `System` to shutdown (force) shutdown.
|
||||
pub async fn stop(mut self) {
|
||||
// signal server to stop
|
||||
self.server.stop(false).await;
|
||||
|
||||
// also signal system to stop
|
||||
// though this is handled by `ServerBuilder::exit_system` too
|
||||
self.system.stop();
|
||||
rt::time::sleep(time::Duration::from_millis(100)).await;
|
||||
|
||||
// wait for thread to be stopped but don't care about result
|
||||
let _ = self.thread_stop_rx.recv().await;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.system.stop()
|
||||
// calls in this Drop impl should be enough to shut down the server, system, and thread
|
||||
// without needing to await anything
|
||||
|
||||
// signal server to stop
|
||||
let _ = self.server.stop(true);
|
||||
|
||||
// signal system to stop
|
||||
self.system.stop();
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 4.0.0-beta.7 - 2021-09-09
|
||||
|
@ -15,9 +15,9 @@ path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix = { version = "0.12.0", default-features = false }
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
actix-codec = "0.4.1"
|
||||
actix-http = "3.0.0-beta.14"
|
||||
actix-web = { version = "4.0.0-beta.11", default-features = false }
|
||||
|
||||
bytes = "1"
|
||||
bytestring = "1"
|
||||
@ -27,8 +27,8 @@ tokio = { version = "1", features = ["sync"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
actix-test = "0.1.0-beta.7"
|
||||
|
||||
awc = { version = "3.0.0-beta.8", default-features = false }
|
||||
env_logger = "0.8"
|
||||
awc = { version = "3.0.0-beta.11", default-features = false }
|
||||
env_logger = "0.9"
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7)
|
||||
@ -14,4 +14,4 @@
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-web-actors)
|
||||
- Minimum supported Rust version: 1.51 or later
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -3,6 +3,15 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.5.0-beta.5 - 2021-10-20
|
||||
* Improve error recovery potential when macro input is invalid. [#2410]
|
||||
* Add `#[actix_web::test]` macro for setting up tests with a runtime. [#2409]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
[#2410]: https://github.com/actix/actix-web/pull/2410
|
||||
[#2409]: https://github.com/actix/actix-web/pull/2409
|
||||
|
||||
|
||||
## 0.5.0-beta.4 - 2021-09-09
|
||||
* In routing macros, paths are now validated at compile time. [#2350]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
@ -1,12 +1,13 @@
|
||||
[package]
|
||||
name = "actix-web-codegen"
|
||||
version = "0.5.0-beta.4"
|
||||
version = "0.5.0-beta.5"
|
||||
description = "Routing and runtime macros for Actix Web"
|
||||
readme = "README.md"
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
documentation = "https://docs.rs/actix-web-codegen"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -21,9 +22,10 @@ actix-router = "0.5.0-beta.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
actix-macros = "0.2.3"
|
||||
actix-test = "0.1.0-beta.7"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = "4.0.0-beta.9"
|
||||
actix-web = "4.0.0-beta.11"
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
trybuild = "1"
|
||||
|
@ -3,18 +3,18 @@
|
||||
> Routing and runtime macros for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.4)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.4)
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.5)
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-web-codegen)
|
||||
- Minimum supported Rust version: 1.51 or later.
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
||||
## Compile Testing
|
||||
|
||||
|
@ -59,13 +59,14 @@
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
|
||||
mod route;
|
||||
|
||||
/// Creates resource handler, allowing multiple HTTP method guards.
|
||||
///
|
||||
/// # Syntax
|
||||
/// ```text
|
||||
/// ```plain
|
||||
/// #[route("path", method="HTTP_METHOD"[, attributes])]
|
||||
/// ```
|
||||
///
|
||||
@ -111,7 +112,7 @@ concat!("
|
||||
Creates route handler with `actix_web::guard::", stringify!($variant), "`.
|
||||
|
||||
# Syntax
|
||||
```text
|
||||
```plain
|
||||
#[", stringify!($method), r#"("path"[, attributes])]
|
||||
```
|
||||
|
||||
@ -157,24 +158,41 @@ method_macro! {
|
||||
}
|
||||
|
||||
/// Marks async main function as the actix system entry-point.
|
||||
///
|
||||
/// # Actix Web Re-export
|
||||
/// This macro can be applied with `#[actix_web::main]` when used in Actix Web applications.
|
||||
///
|
||||
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// #[actix_web_codegen::main]
|
||||
/// #[actix_web::main]
|
||||
/// async fn main() {
|
||||
/// async { println!("Hello world"); }.await
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn main(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||
use quote::quote;
|
||||
let input = syn::parse_macro_input!(item as syn::ItemFn);
|
||||
(quote! {
|
||||
#[actix_web::rt::main(system = "::actix_web::rt::System")]
|
||||
#input
|
||||
let mut output: TokenStream = (quote! {
|
||||
#[::actix_web::rt::main(system = "::actix_web::rt::System")]
|
||||
})
|
||||
.into()
|
||||
.into();
|
||||
|
||||
output.extend(item);
|
||||
output
|
||||
}
|
||||
|
||||
/// Marks async test functions to use the actix system entry-point.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// #[actix_web::test]
|
||||
/// async fn test() {
|
||||
/// assert_eq!(async { "Hello world" }.await, "Hello world");
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn test(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let mut output: TokenStream = (quote! {
|
||||
#[::actix_web::rt::test(system = "::actix_web::rt::System")]
|
||||
})
|
||||
.into();
|
||||
|
||||
output.extend(item);
|
||||
output
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ fn guess_resource_type(typ: &syn::Type) -> ResourceType {
|
||||
impl Route {
|
||||
pub fn new(
|
||||
args: AttributeArgs,
|
||||
input: TokenStream,
|
||||
ast: syn::ItemFn,
|
||||
method: Option<MethodType>,
|
||||
) -> syn::Result<Self> {
|
||||
if args.is_empty() {
|
||||
@ -234,14 +234,11 @@ impl Route {
|
||||
),
|
||||
));
|
||||
}
|
||||
let ast: syn::ItemFn = syn::parse(input)?;
|
||||
|
||||
let name = ast.sig.ident.clone();
|
||||
|
||||
// Try and pull out the doc comments so that we can reapply them to the
|
||||
// generated struct.
|
||||
//
|
||||
// Note that multi line doc comments are converted to multiple doc
|
||||
// attributes.
|
||||
// Try and pull out the doc comments so that we can reapply them to the generated struct.
|
||||
// Note that multi line doc comments are converted to multiple doc attributes.
|
||||
let doc_attributes = ast
|
||||
.attrs
|
||||
.iter()
|
||||
@ -349,8 +346,28 @@ pub(crate) fn with_method(
|
||||
input: TokenStream,
|
||||
) -> TokenStream {
|
||||
let args = parse_macro_input!(args as syn::AttributeArgs);
|
||||
match Route::new(args, input, method) {
|
||||
|
||||
let ast = match syn::parse::<syn::ItemFn>(input.clone()) {
|
||||
Ok(ast) => ast,
|
||||
// on parse error, make IDEs happy; see fn docs
|
||||
Err(err) => return input_and_compile_error(input, err),
|
||||
};
|
||||
|
||||
match Route::new(args, ast, method) {
|
||||
Ok(route) => route.into_token_stream().into(),
|
||||
Err(err) => err.to_compile_error().into(),
|
||||
// on macro related error, make IDEs happy; see fn docs
|
||||
Err(err) => input_and_compile_error(input, err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the error to a token stream and appends it to the original input.
|
||||
///
|
||||
/// Returning the original input in addition to the error is good for IDEs which can gracefully
|
||||
/// recover and show more precise errors within the macro body.
|
||||
///
|
||||
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
|
||||
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
|
||||
let compile_err = TokenStream::from(err.to_compile_error());
|
||||
item.extend(compile_err);
|
||||
item
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ async fn test_auto_async() {
|
||||
assert!(response.status().is_success());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_wrap() {
|
||||
let srv = actix_test::start(|| App::new().service(get_wrap));
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#[rustversion::stable(1.51)] // MSRV
|
||||
#[rustversion::stable(1.52)] // MSRV
|
||||
#[test]
|
||||
fn compile_macros() {
|
||||
let t = trybuild::TestCases::new();
|
||||
@ -13,4 +13,6 @@ fn compile_macros() {
|
||||
t.compile_fail("tests/trybuild/route-malformed-path-fail.rs");
|
||||
|
||||
t.pass("tests/trybuild/docstring-ok.rs");
|
||||
|
||||
t.pass("tests/trybuild/test-runtime.rs");
|
||||
}
|
||||
|
@ -4,8 +4,8 @@ error: HTTP method defined more than once: `GET`
|
||||
3 | #[route("/", method="GET", method="GET")]
|
||||
| ^^^^^
|
||||
|
||||
error[E0425]: cannot find value `index` in this scope
|
||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||
--> $DIR/route-duplicate-method-fail.rs:12:55
|
||||
|
|
||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||
| ^^^^^ not found in this scope
|
||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||
|
@ -6,8 +6,8 @@ error: The #[route(..)] macro requires at least one `method` attribute
|
||||
|
|
||||
= note: this error originates in an attribute macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error[E0425]: cannot find value `index` in this scope
|
||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||
--> $DIR/route-missing-method-fail.rs:12:55
|
||||
|
|
||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||
| ^^^^^ not found in this scope
|
||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||
|
@ -4,8 +4,8 @@ error: Unexpected HTTP method: `UNEXPECTED`
|
||||
3 | #[route("/", method="UNEXPECTED")]
|
||||
| ^^^^^^^^^^^^
|
||||
|
||||
error[E0425]: cannot find value `index` in this scope
|
||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||
--> $DIR/route-unexpected-method-fail.rs:12:55
|
||||
|
|
||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||
| ^^^^^ not found in this scope
|
||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||
|
6
actix-web-codegen/tests/trybuild/test-runtime.rs
Normal file
6
actix-web-codegen/tests/trybuild/test-runtime.rs
Normal file
@ -0,0 +1,6 @@
|
||||
#[actix_web::test]
|
||||
async fn my_test() {
|
||||
assert!(async { 1 }.await, 1);
|
||||
}
|
||||
|
||||
fn main() {}
|
@ -3,6 +3,26 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.12 - 2021-11-30
|
||||
* Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
||||
|
||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
||||
|
||||
|
||||
## 3.0.0-beta.11 - 2021-11-22
|
||||
* No significant changes from `3.0.0-beta.10`.
|
||||
|
||||
|
||||
## 3.0.0-beta.10 - 2021-11-15
|
||||
* No significant changes from `3.0.0-beta.9`.
|
||||
|
||||
|
||||
## 3.0.0-beta.9 - 2021-10-20
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-09-09
|
||||
### Changed
|
||||
* Send headers within the redirect requests. [#2310]
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "awc"
|
||||
version = "3.0.0-beta.8"
|
||||
version = "3.0.0-beta.12"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"fakeshadow <24548779@qq.com>",
|
||||
@ -14,7 +14,7 @@ categories = [
|
||||
"web-programming::websocket",
|
||||
]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -30,10 +30,10 @@ features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-z
|
||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
# openssl
|
||||
openssl = ["tls-openssl", "actix-http/openssl"]
|
||||
openssl = ["tls-openssl", "actix-tls/openssl"]
|
||||
|
||||
# rustls
|
||||
rustls = ["tls-rustls", "actix-http/rustls"]
|
||||
rustls = ["tls-rustls", "actix-tls/rustls"]
|
||||
|
||||
# Brotli algorithm content-encoding support
|
||||
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
||||
@ -46,24 +46,34 @@ compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
||||
cookies = ["cookie"]
|
||||
|
||||
# trust-dns as dns resolver
|
||||
trust-dns = ["actix-http/trust-dns"]
|
||||
trust-dns = ["trust-dns-resolver"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-service = "2.0.0"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-rt = { version = "2.1", default-features = false }
|
||||
# Enable dangerous feature for testing and local network usage:
|
||||
# - HTTP/2 over TCP(No Tls).
|
||||
# DO NOT enable this over any internet use case.
|
||||
dangerous-h2c = []
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.1"
|
||||
actix-service = "2.0.0"
|
||||
actix-http = "3.0.0-beta.14"
|
||||
actix-rt = { version = "2.1", default-features = false }
|
||||
actix-tls = { version = "3.0.0-rc.1", features = ["connect", "uri"] }
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
ahash = "0.7"
|
||||
base64 = "0.13"
|
||||
bytes = "1"
|
||||
cfg-if = "1"
|
||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
||||
derive_more = "0.99.5"
|
||||
futures-core = { version = "0.3.7", default-features = false }
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
h2 = "0.3"
|
||||
http = "0.2.5"
|
||||
itoa = "0.4"
|
||||
log =" 0.4"
|
||||
mime = "0.3"
|
||||
@ -73,24 +83,30 @@ rand = "0.8"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
||||
tls-rustls = { version = "0.19.0", package = "rustls", optional = true, features = ["dangerous_configuration"] }
|
||||
tokio = { version = "1", features = ["sync"] }
|
||||
|
||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
||||
|
||||
tls-openssl = { package = "openssl", version = "0.10.9", optional = true }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0", optional = true, features = ["dangerous_configuration"] }
|
||||
|
||||
trust-dns-resolver = { version = "0.20.0", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-beta.9", features = ["openssl"] }
|
||||
actix-http = { version = "3.0.0-beta.10", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-web = { version = "4.0.0-beta.11", features = ["openssl"] }
|
||||
actix-http = { version = "3.0.0-beta.14", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.7", features = ["openssl"] }
|
||||
actix-utils = "3.0.0"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["openssl", "rustls"] }
|
||||
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] }
|
||||
actix-server = "2.0.0-beta.9"
|
||||
actix-tls = { version = "3.0.0-rc.1", features = ["openssl", "rustls"] }
|
||||
actix-test = { version = "0.1.0-beta.7", features = ["openssl", "rustls"] }
|
||||
|
||||
brotli2 = "0.3.2"
|
||||
env_logger = "0.8"
|
||||
env_logger = "0.9"
|
||||
flate2 = "1.0.13"
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
rcgen = "0.8"
|
||||
webpki = "0.21"
|
||||
rustls-pemfile = "0.2"
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user