1
0
mirror of https://github.com/fafhrd91/actix-web synced 2025-07-04 09:56:22 +02:00

Compare commits

...

79 Commits

Author SHA1 Message Date
ab5eb7c1aa prepare actix-multipart release 0.4.0-beta.8 2021-11-22 18:48:14 +00:00
18b8ef0765 prepare actix-test release 0.1.0-beta.7 2021-11-22 18:47:43 +00:00
b806b4773c prepare actix-http-test release 3.0.0-beta.7 2021-11-22 18:46:58 +00:00
0062d99b6f prepare actix-files release 0.6.0-beta.9 2021-11-22 18:46:19 +00:00
99e6a9c26d prepare awc release 3.0.0-beta.11 2021-11-22 18:41:43 +00:00
5f5bd2184e prepare actix-web release 4.0.0-beta.12 2021-11-22 18:20:55 +00:00
88e074879d prepare actix-http release 3.0.0-beta.13 2021-11-22 18:19:09 +00:00
e7987e7429 awc: support http2 over plain tcp with feature flag (#2439)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-11-22 18:16:56 +00:00
a172f5968d prepare for actix-tls v3 beta 9 (#2456) 2021-11-22 15:37:23 +00:00
a2a42ec152 use anybody in doc test 2021-11-22 01:35:33 +00:00
dd347e0bd0 implement io-uring for actix-files (#2408)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-11-22 01:19:09 +00:00
194a691537 files: 304 Not Modified responses omit Content-Length header (#2453) 2021-11-19 14:04:12 +00:00
56ee97f722 add files path traversal tests 2021-11-18 18:14:34 +00:00
66620a1012 simplify handler.rs (#2450) 2021-11-17 20:11:35 +00:00
e33618ed6d ensure content disposition header in multipart (#2451)
Co-authored-by: Craig Pastro <craig.pastro@gmail.com>
2021-11-17 17:44:50 +00:00
1fe309bcc6 increase ci test timeout 2021-11-17 15:32:42 +00:00
168a7284d3 fix actix_http::Error conversion. (#2449) 2021-11-17 13:13:05 +00:00
68a3acb9c2 bump zstd dep 2021-11-16 23:22:29 +00:00
84c6d25fd3 bump env logger dep 2021-11-16 23:07:08 +00:00
0a135c7dc9 bump actix-codec to 0.4.1 2021-11-16 22:41:24 +00:00
668a33c793 remove internal usage of Body 2021-11-16 22:10:30 +00:00
d8cbb879dd make AnyBody generic on Body type (#2448) 2021-11-16 21:41:35 +00:00
13cf5a9e44 remove chunked encoding header for websockets 2021-11-16 16:55:45 +00:00
4df1cd78b7 simplify AnyBody and BodySize (#2446) 2021-11-16 09:21:10 +00:00
e8a0e16863 run tarpaulin on workspace 2021-11-15 18:11:51 +00:00
a2f59c02f7 bump actix-server to beta 9 (#2442) 2021-11-15 04:03:33 +00:00
2754608f3c fix codegen tests 2021-11-08 02:46:43 +00:00
c020cedb63 Log internal server errors (#2387) 2021-11-07 17:02:23 +00:00
5e554dca35 fix awc clippy warning (#2431) 2021-11-04 15:57:55 +00:00
6ec2d7b909 add keep alive to h2 through ping pong (#2433) 2021-11-04 15:15:23 +00:00
ec6d284a8e improve "data no configured" message (#2429) 2021-10-31 13:19:21 +00:00
be9530eb72 avoid building actix-tls with no-default-features (#2426) 2021-10-26 13:16:48 +01:00
855e260fdb Add html_utf8 content type. (#2423) 2021-10-26 09:24:38 +01:00
d13854505f move actix_http::client module to awc (#2425) 2021-10-26 00:37:40 +01:00
d40b6748bc remove dead dep (#2420) 2021-10-22 00:22:58 +01:00
c79b9a0df3 prepare actix-files release 0.6.0-beta.8 2021-10-20 23:32:46 +01:00
4af414064b prepare actix-multipart release 0.4.0-beta.7 2021-10-20 23:31:46 +01:00
9abe166d52 actix-web beta 10 releases (#2417) 2021-10-20 22:32:05 +01:00
c09ec6af4c split off coverage ci job 2021-10-20 02:27:30 +01:00
37f2bf5625 clippy 2021-10-20 02:06:51 +01:00
4f6f0b0137 chore: Bump rustls to 0.20.0 (#2416)
Co-authored-by: Kirill Mironov <vetrokm@gmail.com>
2021-10-20 02:00:11 +01:00
591abc37c3 add test runtime macro (#2409) 2021-10-19 17:30:32 +01:00
ad22cc4e7f bump msrv to 1.52.1 2021-10-19 01:59:28 +01:00
efdf3ab1c3 clippy 2021-10-19 01:32:58 +01:00
6b3ea4fc61 copy original route macro input with compile errors (#2410) 2021-10-14 18:06:31 +01:00
99985fc4ec web: implement into_inner for Data<T: ?Sized> (#2407) 2021-10-12 18:35:33 +01:00
a6707fb7ee Remove checked_expr (#2401) 2021-10-11 18:28:09 +01:00
a3806cde19 fix changelog 2021-09-12 22:41:08 +01:00
efefa0d0ce web: add option to not require content type header for Json (#2362)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-09-11 17:27:50 +01:00
450ff5fa1d improve extract docs (#2384) 2021-09-11 16:48:47 +01:00
8ae278cb68 Remove FromRequest::Config (#2233)
Co-authored-by: Jonas Platte <jplatte@users.noreply.github.com>
Co-authored-by: Igor Aleksanov <popzxc@yandex.ru>
Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-09-11 01:11:16 +01:00
46699e3429 remove time dep from actix-http (#2383) 2021-09-11 00:01:01 +01:00
ba88d3b4bf prepare actix-web beta.9 releases (#2381)
* prepare actix-router release 0.5.0-beta.2

* prepare actix-web-codegen release 0.5.0-beta.4

* prepare actix-http release 3.0.0-beta.10

* prepare awc release 3.0.0-beta.8

* prepare actix-web release 4.0.0-beta.9

* prepare actix-http-test release 3.0.0-beta.6

* prepare actix-test release 0.1.0-beta.4

* prepare actix-files release 0.6.0-beta.7

* prepare actix-multipart release 0.4.0-beta.6

* prepare actix-web-actors release 4.0.0-beta.7

* fix http test version

* re-add patch

* update router repo url

* fix http test readme version
2021-09-09 01:35:41 +01:00
8dd30611fa accept owned strings in TestRequest::param (#2172)
* accept owned strings in TestRequest::param

* bump actix-router to 0.4.0

* update changelog

Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-09-09 00:42:40 +01:00
1383c7d701 speed up ci 2021-09-08 17:42:14 +01:00
d8a0f46f26 refactor web module (#2379) 2021-09-03 18:00:43 +01:00
53ec66caf4 Send headers within the redirect requests. (#2310) 2021-09-01 20:16:41 +01:00
93112644d3 non exhaustive content encoding (#2377) 2021-09-01 09:53:26 +01:00
ddc8c16cb3 Fix quality parse error in Accept-Encoding HTTP header (#2344) 2021-09-01 09:08:29 +01:00
373b3f91df rework ResourceMap internals (#2337) 2021-09-01 04:48:43 +01:00
7d01ece355 ResourceDef: support multiple-patterns as prefix (#2356)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-08-31 14:15:22 +01:00
c50eef6166 "deprecate" calls to NormalizePath::default 2021-08-31 04:19:02 +01:00
dade818eba add middleware composition tests (#2375) 2021-08-31 04:18:54 +01:00
ae35e69382 use rust 1.51 features 2021-08-31 02:52:29 +01:00
5128b1bdfc bump msrv to 1.51 2021-08-30 23:19:03 +01:00
168b2f227d compile time validation of path (#2350)
* compile time validation of path

* added trybuild err message

* Update Cargo.toml

* add changelog entry

* test more cases of path validation

* fmt

Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-08-30 21:50:40 +01:00
4bb32fb19b [fix] Bump actix-http dependency to 3.0.0-beta.9, up from 3.0.0-beta.8 (#2360)
Fixes https://rustsec.org/advisories/RUSTSEC-2021-0081
2021-08-30 20:07:12 +01:00
f9da6e48e0 ResourceDef: define behavior for prefix with trailing slash (#2355)
* ResourceDef: define behavior

* fix tests

* add scope test

* revert firestorm bump

* update changelog

* fmt

Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-08-30 20:05:49 +01:00
ff07816b65 update httparse for uninit header parsing (#2374) 2021-08-29 01:42:22 +01:00
5f412c67db clippy 2021-08-13 18:49:58 +01:00
a0c0bff944 Don't create a slice to potential uninit data on h1 encoder (#2364)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2021-08-13 18:41:19 +01:00
384164cc14 update graphs 2021-08-12 21:04:26 +01:00
e965d8298f HRS security fixes (#2363) 2021-08-12 20:18:09 +01:00
f6e69919ed update to router 0.5.0 beta (#2339) 2021-08-06 22:42:31 +01:00
293c52c3ef re-export ServiceFactory (#2325) 2021-07-12 16:55:41 +01:00
5a14ffeef2 clippy fixes (#2296) 2021-07-12 16:55:24 +01:00
7ae132cb68 Update MIGRATION.md (#2315)
Minor edit
2021-07-12 02:02:19 +01:00
d8deed0475 fix tests with tokio 1.8.1 (#2317) 2021-07-09 23:57:21 +01:00
2504c2ecb0 Move dev module to separate file, update description (#2293) 2021-06-27 07:44:56 +01:00
178 changed files with 9479 additions and 3302 deletions

View File

@ -1,9 +1,12 @@
[alias] [alias]
chk = "check --workspace --all-features --tests --examples --bins" lint = "clippy --workspace --tests --examples --bins -- -Dclippy::todo"
lint = "clippy --workspace --tests --examples" lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dclippy::todo"
ci-min = "hack check --workspace --no-default-features"
ci-min-test = "hack check --workspace --no-default-features --tests --examples" # lib checking
ci-default = "check --workspace --bins --tests --examples" ci-check-min = "hack --workspace check --no-default-features"
ci-full = "check --workspace --all-features --bins --tests --examples" ci-check-default = "hack --workspace check"
ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture" ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,io-uring check"
ci-doctest = "hack test --workspace --all-features --doc --no-fail-fast -- --nocapture" ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
# testing
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"

View File

@ -8,7 +8,7 @@ PR_TYPE
## PR Checklist ## PR Checklist
<!-- Check your PR fulfills the following items. ->> <!-- Check your PR fulfills the following items. -->
<!-- For draft PRs check the boxes as you complete them. --> <!-- For draft PRs check the boxes as you complete them. -->
- [ ] Tests for the changes have been added / updated. - [ ] Tests for the changes have been added / updated.

View File

@ -14,9 +14,9 @@ jobs:
target: target:
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu } - { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin } - { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc } - { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
version: version:
- 1.46.0 # MSRV - 1.52.0 # MSRV
- stable - stable
- nightly - nightly
@ -24,12 +24,16 @@ jobs:
runs-on: ${{ matrix.target.os }} runs-on: ${{ matrix.target.os }}
env: env:
CI: 1
CARGO_INCREMENTAL: 0
VCPKGRS_DYNAMIC: 1 VCPKGRS_DYNAMIC: 1
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
# install OpenSSL on Windows # install OpenSSL on Windows
# TODO: GitHub actions docs state that OpenSSL is
# already installed on these Windows machines somewhere
- name: Set vcpkg root - name: Set vcpkg root
if: matrix.target.triple == 'x86_64-pc-windows-msvc' if: matrix.target.triple == 'x86_64-pc-windows-msvc'
run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
@ -46,8 +50,7 @@ jobs:
- name: Generate Cargo.lock - name: Generate Cargo.lock
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with: { command: generate-lockfile }
command: generate-lockfile
- name: Cache Dependencies - name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0 uses: Swatinem/rust-cache@v1.2.0
@ -59,52 +62,128 @@ jobs:
- name: check minimal - name: check minimal
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: { command: ci-min } with: { command: ci-check-min }
- name: check minimal + tests
uses: actions-rs/cargo@v1
with: { command: ci-min-test }
- name: check default - name: check default
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: { command: ci-default } with: { command: ci-check-default }
- name: check full
uses: actions-rs/cargo@v1
with: { command: ci-full }
- name: tests - name: tests
uses: actions-rs/cargo@v1 timeout-minutes: 60
timeout-minutes: 40
with:
command: ci-test
args: --skip=test_reading_deflate_encoding_large_random_rustls
- name: doc tests
# due to unknown issue with running doc tests on macOS
if: matrix.target.os == 'ubuntu-latest'
uses: actions-rs/cargo@v1
timeout-minutes: 40
with: { command: ci-doctest }
- name: Generate coverage file
if: >
matrix.target.os == 'ubuntu-latest'
&& matrix.version == 'stable'
&& github.ref == 'refs/heads/master'
run: | run: |
cargo install cargo-tarpaulin --vers "^0.13" cargo test --lib --tests -p=actix-router --all-features
cargo tarpaulin --out Xml --verbose cargo test --lib --tests -p=actix-http --all-features
- name: Upload to Codecov cargo test --lib --tests -p=actix-web --features=rustls,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
if: > cargo test --lib --tests -p=actix-web-codegen --all-features
matrix.target.os == 'ubuntu-latest' cargo test --lib --tests -p=awc --all-features
&& matrix.version == 'stable' cargo test --lib --tests -p=actix-http-test --all-features
&& github.ref == 'refs/heads/master' cargo test --lib --tests -p=actix-test --all-features
uses: codecov/codecov-action@v1 cargo test --lib --tests -p=actix-files
with: cargo test --lib --tests -p=actix-multipart --all-features
file: cobertura.xml cargo test --lib --tests -p=actix-web-actors --all-features
- name: tests (io-uring)
if: matrix.target.os == 'ubuntu-latest'
timeout-minutes: 60
run: >
sudo bash -c "ulimit -Sl 512
&& ulimit -Hl 512
&& PATH=$PATH:/usr/share/rust/.cargo/bin
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
- name: Clear the cargo caches - name: Clear the cargo caches
run: | run: |
cargo install cargo-cache --version 0.6.2 --no-default-features --features ci-autoclean cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
cargo-cache cargo-cache
ci_feature_powerset_check:
name: Verify Feature Combinations
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0
- name: Install cargo-hack
uses: actions-rs/cargo@v1
with:
command: install
args: cargo-hack
- name: check feature combinations
uses: actions-rs/cargo@v1
with: { command: ci-check-all-feature-powerset }
- name: check feature combinations
uses: actions-rs/cargo@v1
with: { command: ci-check-all-feature-powerset-linux }
coverage:
name: coverage
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0
- name: Generate coverage file
if: github.ref == 'refs/heads/master'
run: |
cargo install cargo-tarpaulin --vers "^0.13"
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
- name: Upload to Codecov
if: github.ref == 'refs/heads/master'
uses: codecov/codecov-action@v1
with: { file: cobertura.xml }
rustdoc:
name: rustdoc
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Rust (nightly)
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.3.0
# - name: Install cargo-hack
# uses: actions-rs/cargo@v1
# with:
# command: install
# args: cargo-hack
- name: doc tests
uses: actions-rs/cargo@v1
timeout-minutes: 60
with: { command: ci-doctest }

View File

@ -3,6 +3,76 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 4.0.0-beta.12 - 2021-11-22
### Changed
* Compress middleware's response type is now `AnyBody<Encoder<B>>`. [#2448]
### Fixed
* Relax `Unpin` bound on `S` (stream) parameter of `HttpResponseBuilder::streaming`. [#2448]
### Removed
* `dev::ResponseBody` re-export; is function is replaced by the new `dev::AnyBody` enum. [#2446]
[#2446]: https://github.com/actix/actix-web/pull/2446
[#2448]: https://github.com/actix/actix-web/pull/2448
## 4.0.0-beta.11 - 2021-11-15
### Added
* Re-export `dev::ServerHandle` from `actix-server`. [#2442]
### Changed
* `ContentType::html` now produces `text/html; charset=utf-8` instead of `text/html`. [#2423]
* Update `actix-server` to `2.0.0-beta.9`. [#2442]
[#2423]: https://github.com/actix/actix-web/pull/2423
[#2442]: https://github.com/actix/actix-web/pull/2442
## 4.0.0-beta.10 - 2021-10-20
### Added
* Option to allow `Json` extractor to work without a `Content-Type` header present. [#2362]
* `#[actix_web::test]` macro for setting up tests with a runtime. [#2409]
### Changed
* Associated type `FromRequest::Config` was removed. [#2233]
* Inner field made private on `web::Payload`. [#2384]
* `Data::into_inner` and `Data::get_ref` no longer requires `T: Sized`. [#2403]
* Updated rustls to v0.20. [#2414]
* Minimum supported Rust version (MSRV) is now 1.52.
### Removed
* Useless `ServiceResponse::checked_expr` method. [#2401]
[#2233]: https://github.com/actix/actix-web/pull/2233
[#2362]: https://github.com/actix/actix-web/pull/2362
[#2384]: https://github.com/actix/actix-web/pull/2384
[#2401]: https://github.com/actix/actix-web/pull/2401
[#2403]: https://github.com/actix/actix-web/pull/2403
[#2409]: https://github.com/actix/actix-web/pull/2409
[#2414]: https://github.com/actix/actix-web/pull/2414
## 4.0.0-beta.9 - 2021-09-09
### Added
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
### Changed
* Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344]
* Move `BaseHttpResponse` to `dev::Response`. [#2379]
* Enable `TestRequest::param` to accept more than just static strings. [#2172]
* Minimum supported Rust version (MSRV) is now 1.51.
### Fixed
* Fix quality parse error in Accept-Encoding header. [#2344]
* Re-export correct type at `web::HttpResponse`. [#2379]
[#2172]: https://github.com/actix/actix-web/pull/2172
[#2325]: https://github.com/actix/actix-web/pull/2325
[#2344]: https://github.com/actix/actix-web/pull/2344
[#2379]: https://github.com/actix/actix-web/pull/2379
## 4.0.0-beta.8 - 2021-06-26 ## 4.0.0-beta.8 - 2021-06-26
### Added ### Added
* Add `ServiceRequest::parts_mut`. [#2177] * Add `ServiceRequest::parts_mut`. [#2177]

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-web" name = "actix-web"
version = "4.0.0-beta.8" version = "4.0.0-beta.12"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust" description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
keywords = ["actix", "http", "web", "framework", "async"] keywords = ["actix", "http", "web", "framework", "async"]
@ -11,19 +11,21 @@ categories = [
"web-programming::websocket" "web-programming::websocket"
] ]
homepage = "https://actix.rs" homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web" repository = "https://github.com/actix/actix-web.git"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2018" edition = "2018"
[package.metadata.docs.rs] [package.metadata.docs.rs]
# features that docs.rs will build with # features that docs.rs will build with
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"] features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
rustdoc-args = ["--cfg", "docsrs"]
[lib] [lib]
name = "actix_web" name = "actix_web"
path = "src/lib.rs" path = "src/lib.rs"
[workspace] [workspace]
resolver = "2"
members = [ members = [
".", ".",
"awc", "awc",
@ -34,9 +36,8 @@ members = [
"actix-web-codegen", "actix-web-codegen",
"actix-http-test", "actix-http-test",
"actix-test", "actix-test",
"actix-router",
] ]
# enable when MSRV is 1.51+
# resolver = "2"
[features] [features]
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"] default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
@ -60,22 +61,25 @@ openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
# rustls # rustls
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"] rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
# Internal (PRIVATE!) features used to aid testing and cheking feature status. # Internal (PRIVATE!) features used to aid testing and checking feature status.
# Don't rely on these whatsoever. They may disappear at anytime. # Don't rely on these whatsoever. They may disappear at anytime.
__compress = [] __compress = []
# io-uring feature only avaiable for Linux OSes.
experimental-io-uring = ["actix-server/io-uring"]
[dependencies] [dependencies]
actix-codec = "0.4.0" actix-codec = "0.4.1"
actix-macros = "0.2.1" actix-macros = "0.2.3"
actix-router = "0.2.7" actix-rt = "2.3"
actix-rt = "2.2" actix-server = "2.0.0-beta.9"
actix-server = "2.0.0-beta.3"
actix-service = "2.0.0" actix-service = "2.0.0"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true } actix-tls = { version = "3.0.0-beta.9", default-features = false, optional = true }
actix-web-codegen = "0.5.0-beta.2" actix-http = "3.0.0-beta.13"
actix-http = "3.0.0-beta.8" actix-router = "0.5.0-beta.2"
actix-web-codegen = "0.5.0-beta.5"
ahash = "0.7" ahash = "0.7"
bytes = "1" bytes = "1"
@ -97,24 +101,30 @@ regex = "1.4"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
serde_urlencoded = "0.7" serde_urlencoded = "0.7"
smallvec = "1.6" smallvec = "1.6.1"
socket2 = "0.4.0" socket2 = "0.4.0"
time = { version = "0.2.23", default-features = false, features = ["std"] } time = { version = "0.3", default-features = false, features = ["formatting"] }
url = "2.1" url = "2.1"
[dev-dependencies] [dev-dependencies]
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] } actix-test = { version = "0.1.0-beta.7", features = ["openssl", "rustls"] }
awc = { version = "3.0.0-beta.7", features = ["openssl"] } awc = { version = "3.0.0-beta.11", features = ["openssl"] }
brotli2 = "0.3.2" brotli2 = "0.3.2"
criterion = { version = "0.3", features = ["html_reports"] } criterion = { version = "0.3", features = ["html_reports"] }
env_logger = "0.8" env_logger = "0.9"
flate2 = "1.0.13" flate2 = "1.0.13"
zstd = "0.7" futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
rand = "0.8" rand = "0.8"
rcgen = "0.8" rcgen = "0.8"
rustls-pemfile = "0.2"
tls-openssl = { package = "openssl", version = "0.10.9" } tls-openssl = { package = "openssl", version = "0.10.9" }
tls-rustls = { package = "rustls", version = "0.19.0" } tls-rustls = { package = "rustls", version = "0.20.0" }
zstd = "0.9"
[profile.dev]
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
debug = 0
[profile.release] [profile.release]
lto = true lto = true
@ -126,6 +136,7 @@ actix-files = { path = "actix-files" }
actix-http = { path = "actix-http" } actix-http = { path = "actix-http" }
actix-http-test = { path = "actix-http-test" } actix-http-test = { path = "actix-http-test" }
actix-multipart = { path = "actix-multipart" } actix-multipart = { path = "actix-multipart" }
actix-router = { path = "actix-router" }
actix-test = { path = "actix-test" } actix-test = { path = "actix-test" }
actix-web = { path = "." } actix-web = { path = "." }
actix-web-actors = { path = "actix-web-actors" } actix-web-actors = { path = "actix-web-actors" }

View File

@ -3,13 +3,16 @@
* The default `NormalizePath` behavior now strips trailing slashes by default. This was * The default `NormalizePath` behavior now strips trailing slashes by default. This was
previously documented to be the case in v3 but the behavior now matches. The effect is that previously documented to be the case in v3 but the behavior now matches. The effect is that
routes defined with trailing slashes will become inaccessible when routes defined with trailing slashes will become inaccessible when
using `NormalizePath::default()`. using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
It is advised that the `new` method be used instead.
Before: `#[get("/test/")` Before: `#[get("/test/")]`
After: `#[get("/test")` After: `#[get("/test")]`
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`. Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
* The `type Config` of `FromRequest` was removed.
* Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd). * Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
By default all compression algorithms are enabled. By default all compression algorithms are enabled.
To select algorithm you want to include with `middleware::Compress` use following flags: To select algorithm you want to include with `middleware::Compress` use following flags:

View File

@ -6,10 +6,10 @@
<p> <p>
[![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web) [![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web)
[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-beta.8)](https://docs.rs/actix-web/4.0.0-beta.8) [![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-beta.12)](https://docs.rs/actix-web/4.0.0-beta.12)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.52+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg)
[![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-web/4.0.0-beta.8) [![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-beta.12/status.svg)](https://deps.rs/crate/actix-web/4.0.0-beta.12)
<br /> <br />
[![build status](https://github.com/actix/actix-web/workflows/CI%20%28Linux%29/badge.svg?branch=master&event=push)](https://github.com/actix/actix-web/actions) [![build status](https://github.com/actix/actix-web/workflows/CI%20%28Linux%29/badge.svg?branch=master&event=push)](https://github.com/actix/actix-web/actions)
[![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web) [![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web)
@ -32,7 +32,7 @@
* SSL support using OpenSSL or Rustls * SSL support using OpenSSL or Rustls
* Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/)) * Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
* Includes an async [HTTP client](https://docs.rs/awc/) * Includes an async [HTTP client](https://docs.rs/awc/)
* Runs on stable Rust 1.46+ * Runs on stable Rust 1.52+
## Documentation ## Documentation

View File

@ -3,6 +3,26 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.6.0-beta.9 - 2021-11-22
* Add crate feature `experimental-io-uring`, enabling async file I/O to be utilized. This feature is only available on Linux OSes with recent kernel versions. This feature is semver-exempt. [#2408]
* Add `NamedFile::open_async`. [#2408]
* Fix 304 Not Modified responses to omit the Content-Length header, as per the spec. [#2453]
* The `Responder` impl for `NamedFile` now has a boxed future associated type. [#2408]
* The `Service` impl for `NamedFileService` now has a boxed future associated type. [#2408]
* Add `impl Clone` for `FilesService`. [#2408]
[#2408]: https://github.com/actix/actix-web/pull/2408
[#2453]: https://github.com/actix/actix-web/pull/2453
## 0.6.0-beta.8 - 2021-10-20
* Minimum supported Rust version (MSRV) is now 1.52.
## 0.6.0-beta.7 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 0.6.0-beta.6 - 2021-06-26 ## 0.6.0-beta.6 - 2021-06-26
* Added `Files::path_filter()`. [#2274] * Added `Files::path_filter()`. [#2274]
* `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228] * `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]

View File

@ -1,7 +1,11 @@
[package] [package]
name = "actix-files" name = "actix-files"
version = "0.6.0-beta.6" version = "0.6.0-beta.9"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"fakeshadow <24548779@qq.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Static file serving for Actix Web" description = "Static file serving for Actix Web"
keywords = ["actix", "http", "async", "futures"] keywords = ["actix", "http", "async", "futures"]
homepage = "https://actix.rs" homepage = "https://actix.rs"
@ -14,11 +18,13 @@ edition = "2018"
name = "actix_files" name = "actix_files"
path = "src/lib.rs" path = "src/lib.rs"
[features]
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
[dependencies] [dependencies]
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.11", default-features = false }
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.13"
actix-service = "2.0.0" actix-service = "2.0.0"
actix-utils = "3.0.0"
askama_escape = "0.10" askama_escape = "0.10"
bitflags = "1" bitflags = "1"
@ -30,8 +36,11 @@ log = "0.4"
mime = "0.3" mime = "0.3"
mime_guess = "2.0.1" mime_guess = "2.0.1"
percent-encoding = "2.1" percent-encoding = "2.1"
pin-project-lite = "0.2.7"
tokio-uring = { version = "0.1", optional = true }
[dev-dependencies] [dev-dependencies]
actix-rt = "2.2" actix-rt = "2.2"
actix-web = "4.0.0-beta.8" actix-web = "4.0.0-beta.11"
actix-test = "0.1.0-beta.3" actix-test = "0.1.0-beta.7"

View File

@ -3,11 +3,11 @@
> Static file serving for Actix Web > Static file serving for Actix Web
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files) [![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.6)](https://docs.rs/actix-files/0.6.0-beta.6) [![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.9)](https://docs.rs/actix-files/0.6.0-beta.9)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.52+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
![License](https://img.shields.io/crates/l/actix-files.svg) ![License](https://img.shields.io/crates/l/actix-files.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.6/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.6) [![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.9/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.9)
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files) [![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
@ -15,4 +15,4 @@
- [API Documentation](https://docs.rs/actix-files/) - [API Documentation](https://docs.rs/actix-files/)
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index) - [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
- Minimum supported Rust version: 1.46 or later - Minimum Supported Rust Version (MSRV): 1.52

View File

@ -1,98 +1,278 @@
use std::{ use std::{
cmp, fmt, cmp, fmt,
fs::File,
future::Future, future::Future,
io::{self, Read, Seek}, io,
pin::Pin, pin::Pin,
task::{Context, Poll}, task::{Context, Poll},
}; };
use actix_web::{ use actix_web::error::Error;
error::{BlockingError, Error},
rt::task::{spawn_blocking, JoinHandle},
};
use bytes::Bytes; use bytes::Bytes;
use futures_core::{ready, Stream}; use futures_core::{ready, Stream};
use pin_project_lite::pin_project;
#[doc(hidden)] use super::named::File;
/// A helper created from a `std::fs::File` which reads the file
/// chunk-by-chunk on a `ThreadPool`.
pub struct ChunkedReadFile {
size: u64,
offset: u64,
state: ChunkedReadFileState,
counter: u64,
}
enum ChunkedReadFileState { pin_project! {
File(Option<File>), /// Adapter to read a `std::file::File` in chunks.
Future(JoinHandle<Result<(File, Bytes), io::Error>>), #[doc(hidden)]
} pub struct ChunkedReadFile<F, Fut> {
size: u64,
impl ChunkedReadFile { offset: u64,
pub(crate) fn new(size: u64, offset: u64, file: File) -> Self { #[pin]
Self { state: ChunkedReadFileState<Fut>,
size, counter: u64,
offset, callback: F,
state: ChunkedReadFileState::File(Some(file)),
counter: 0,
}
} }
} }
impl fmt::Debug for ChunkedReadFile { #[cfg(not(feature = "experimental-io-uring"))]
pin_project! {
#[project = ChunkedReadFileStateProj]
#[project_replace = ChunkedReadFileStateProjReplace]
enum ChunkedReadFileState<Fut> {
File { file: Option<File>, },
Future { #[pin] fut: Fut },
}
}
#[cfg(feature = "experimental-io-uring")]
pin_project! {
#[project = ChunkedReadFileStateProj]
#[project_replace = ChunkedReadFileStateProjReplace]
enum ChunkedReadFileState<Fut> {
File { file: Option<(File, BytesMut)> },
Future { #[pin] fut: Fut },
}
}
impl<F, Fut> fmt::Debug for ChunkedReadFile<F, Fut> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("ChunkedReadFile") f.write_str("ChunkedReadFile")
} }
} }
impl Stream for ChunkedReadFile { pub(crate) fn new_chunked_read(
size: u64,
offset: u64,
file: File,
) -> impl Stream<Item = Result<Bytes, Error>> {
ChunkedReadFile {
size,
offset,
#[cfg(not(feature = "experimental-io-uring"))]
state: ChunkedReadFileState::File { file: Some(file) },
#[cfg(feature = "experimental-io-uring")]
state: ChunkedReadFileState::File {
file: Some((file, BytesMut::new())),
},
counter: 0,
callback: chunked_read_file_callback,
}
}
#[cfg(not(feature = "experimental-io-uring"))]
async fn chunked_read_file_callback(
mut file: File,
offset: u64,
max_bytes: usize,
) -> Result<(File, Bytes), Error> {
use io::{Read as _, Seek as _};
let res = actix_web::rt::task::spawn_blocking(move || {
let mut buf = Vec::with_capacity(max_bytes);
file.seek(io::SeekFrom::Start(offset))?;
let n_bytes = file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
if n_bytes == 0 {
Err(io::Error::from(io::ErrorKind::UnexpectedEof))
} else {
Ok((file, Bytes::from(buf)))
}
})
.await
.map_err(|_| actix_web::error::BlockingError)??;
Ok(res)
}
#[cfg(feature = "experimental-io-uring")]
async fn chunked_read_file_callback(
file: File,
offset: u64,
max_bytes: usize,
mut bytes_mut: BytesMut,
) -> io::Result<(File, Bytes, BytesMut)> {
bytes_mut.reserve(max_bytes);
let (res, mut bytes_mut) = file.read_at(bytes_mut, offset).await;
let n_bytes = res?;
if n_bytes == 0 {
return Err(io::ErrorKind::UnexpectedEof.into());
}
let bytes = bytes_mut.split_to(n_bytes).freeze();
Ok((file, bytes, bytes_mut))
}
#[cfg(feature = "experimental-io-uring")]
impl<F, Fut> Stream for ChunkedReadFile<F, Fut>
where
F: Fn(File, u64, usize, BytesMut) -> Fut,
Fut: Future<Output = io::Result<(File, Bytes, BytesMut)>>,
{
type Item = Result<Bytes, Error>; type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.as_mut().get_mut(); let mut this = self.as_mut().project();
match this.state { match this.state.as_mut().project() {
ChunkedReadFileState::File(ref mut file) => { ChunkedReadFileStateProj::File { file } => {
let size = this.size; let size = *this.size;
let offset = this.offset; let offset = *this.offset;
let counter = this.counter; let counter = *this.counter;
if size == counter { if size == counter {
Poll::Ready(None) Poll::Ready(None)
} else { } else {
let mut file = file let max_bytes = cmp::min(size.saturating_sub(counter), 65_536) as usize;
let (file, bytes_mut) = file
.take() .take()
.expect("ChunkedReadFile polled after completion"); .expect("ChunkedReadFile polled after completion");
let fut = spawn_blocking(move || { let fut = (this.callback)(file, offset, max_bytes, bytes_mut);
let max_bytes = cmp::min(size.saturating_sub(counter), 65_536) as usize;
let mut buf = Vec::with_capacity(max_bytes); this.state
file.seek(io::SeekFrom::Start(offset))?; .project_replace(ChunkedReadFileState::Future { fut });
let n_bytes =
file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
if n_bytes == 0 {
return Err(io::ErrorKind::UnexpectedEof.into());
}
Ok((file, Bytes::from(buf)))
});
this.state = ChunkedReadFileState::Future(fut);
self.poll_next(cx) self.poll_next(cx)
} }
} }
ChunkedReadFileState::Future(ref mut fut) => { ChunkedReadFileStateProj::Future { fut } => {
let (file, bytes) = let (file, bytes, bytes_mut) = ready!(fut.poll(cx))?;
ready!(Pin::new(fut).poll(cx)).map_err(|_| BlockingError)??;
this.state = ChunkedReadFileState::File(Some(file));
this.offset += bytes.len() as u64; this.state.project_replace(ChunkedReadFileState::File {
this.counter += bytes.len() as u64; file: Some((file, bytes_mut)),
});
*this.offset += bytes.len() as u64;
*this.counter += bytes.len() as u64;
Poll::Ready(Some(Ok(bytes))) Poll::Ready(Some(Ok(bytes)))
} }
} }
} }
} }
#[cfg(not(feature = "experimental-io-uring"))]
impl<F, Fut> Stream for ChunkedReadFile<F, Fut>
where
F: Fn(File, u64, usize) -> Fut,
Fut: Future<Output = Result<(File, Bytes), Error>>,
{
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut this = self.as_mut().project();
match this.state.as_mut().project() {
ChunkedReadFileStateProj::File { file } => {
let size = *this.size;
let offset = *this.offset;
let counter = *this.counter;
if size == counter {
Poll::Ready(None)
} else {
let max_bytes = cmp::min(size.saturating_sub(counter), 65_536) as usize;
let file = file
.take()
.expect("ChunkedReadFile polled after completion");
let fut = (this.callback)(file, offset, max_bytes);
this.state
.project_replace(ChunkedReadFileState::Future { fut });
self.poll_next(cx)
}
}
ChunkedReadFileStateProj::Future { fut } => {
let (file, bytes) = ready!(fut.poll(cx))?;
this.state
.project_replace(ChunkedReadFileState::File { file: Some(file) });
*this.offset += bytes.len() as u64;
*this.counter += bytes.len() as u64;
Poll::Ready(Some(Ok(bytes)))
}
}
}
}
#[cfg(feature = "experimental-io-uring")]
use bytes_mut::BytesMut;
// TODO: remove new type and use bytes::BytesMut directly
#[doc(hidden)]
#[cfg(feature = "experimental-io-uring")]
mod bytes_mut {
use std::ops::{Deref, DerefMut};
use tokio_uring::buf::{IoBuf, IoBufMut};
#[derive(Debug)]
pub struct BytesMut(bytes::BytesMut);
impl BytesMut {
pub(super) fn new() -> Self {
Self(bytes::BytesMut::new())
}
}
impl Deref for BytesMut {
type Target = bytes::BytesMut;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for BytesMut {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
unsafe impl IoBuf for BytesMut {
fn stable_ptr(&self) -> *const u8 {
self.0.as_ptr()
}
fn bytes_init(&self) -> usize {
self.0.len()
}
fn bytes_total(&self) -> usize {
self.0.capacity()
}
}
unsafe impl IoBufMut for BytesMut {
fn stable_mut_ptr(&mut self) -> *mut u8 {
self.0.as_mut_ptr()
}
unsafe fn set_init(&mut self, init_len: usize) {
if self.len() < init_len {
self.0.set_len(init_len);
}
}
}
}

View File

@ -21,6 +21,7 @@ impl ResponseError for FilesError {
} }
} }
#[allow(clippy::enum_variant_names)]
#[derive(Display, Debug, PartialEq)] #[derive(Display, Debug, PartialEq)]
pub enum UriSegmentError { pub enum UriSegmentError {
/// The segment started with the wrapped invalid character. /// The segment started with the wrapped invalid character.

View File

@ -6,7 +6,6 @@ use std::{
}; };
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt}; use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
use actix_utils::future::ok;
use actix_web::{ use actix_web::{
dev::{ dev::{
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest, AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest,
@ -20,8 +19,9 @@ use actix_web::{
use futures_core::future::LocalBoxFuture; use futures_core::future::LocalBoxFuture;
use crate::{ use crate::{
directory_listing, named, Directory, DirectoryRenderer, FilesService, HttpNewService, directory_listing, named,
MimeOverride, PathFilter, service::{FilesService, FilesServiceInner},
Directory, DirectoryRenderer, HttpNewService, MimeOverride, PathFilter,
}; };
/// Static files handling service. /// Static files handling service.
@ -106,7 +106,7 @@ impl Files {
}; };
Files { Files {
path: mount_path.to_owned(), path: mount_path.trim_end_matches('/').to_owned(),
directory: dir, directory: dir,
index: None, index: None,
show_index: false, show_index: false,
@ -283,11 +283,17 @@ impl Files {
/// Setting a fallback static file handler: /// Setting a fallback static file handler:
/// ``` /// ```
/// use actix_files::{Files, NamedFile}; /// use actix_files::{Files, NamedFile};
/// use actix_web::dev::{ServiceRequest, ServiceResponse, fn_service};
/// ///
/// # fn run() -> Result<(), actix_web::Error> { /// # fn run() -> Result<(), actix_web::Error> {
/// let files = Files::new("/", "./static") /// let files = Files::new("/", "./static")
/// .index_file("index.html") /// .index_file("index.html")
/// .default_handler(NamedFile::open("./static/404.html")?); /// .default_handler(fn_service(|req: ServiceRequest| async {
/// let (req, _) = req.into_parts();
/// let file = NamedFile::open_async("./static/404.html").await?;
/// let res = file.into_response(&req);
/// Ok(ServiceResponse::new(req, res))
/// }));
/// # Ok(()) /// # Ok(())
/// # } /// # }
/// ``` /// ```
@ -353,7 +359,7 @@ impl ServiceFactory<ServiceRequest> for Files {
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>; type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
fn new_service(&self, _: ()) -> Self::Future { fn new_service(&self, _: ()) -> Self::Future {
let mut srv = FilesService { let mut inner = FilesServiceInner {
directory: self.directory.clone(), directory: self.directory.clone(),
index: self.index.clone(), index: self.index.clone(),
show_index: self.show_index, show_index: self.show_index,
@ -372,14 +378,14 @@ impl ServiceFactory<ServiceRequest> for Files {
Box::pin(async { Box::pin(async {
match fut.await { match fut.await {
Ok(default) => { Ok(default) => {
srv.default = Some(default); inner.default = Some(default);
Ok(srv) Ok(FilesService(Rc::new(inner)))
} }
Err(_) => Err(()), Err(_) => Err(()),
} }
}) })
} else { } else {
Box::pin(ok(srv)) Box::pin(async move { Ok(FilesService(Rc::new(inner))) })
} }
} }
} }

View File

@ -33,12 +33,12 @@ mod path_buf;
mod range; mod range;
mod service; mod service;
pub use crate::chunked::ChunkedReadFile; pub use self::chunked::ChunkedReadFile;
pub use crate::directory::Directory; pub use self::directory::Directory;
pub use crate::files::Files; pub use self::files::Files;
pub use crate::named::NamedFile; pub use self::named::NamedFile;
pub use crate::range::HttpRange; pub use self::range::HttpRange;
pub use crate::service::FilesService; pub use self::service::FilesService;
use self::directory::{directory_listing, DirectoryRenderer}; use self::directory::{directory_listing, DirectoryRenderer};
use self::error::FilesError; use self::error::FilesError;
@ -62,13 +62,12 @@ type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::{ use std::{
fs::{self, File}, fs::{self},
ops::Add, ops::Add,
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
use actix_service::ServiceFactory; use actix_service::ServiceFactory;
use actix_utils::future::ok;
use actix_web::{ use actix_web::{
guard, guard,
http::{ http::{
@ -82,8 +81,9 @@ mod tests {
}; };
use super::*; use super::*;
use crate::named::File;
#[actix_rt::test] #[actix_web::test]
async fn test_file_extension_to_mime() { async fn test_file_extension_to_mime() {
let m = file_extension_to_mime(""); let m = file_extension_to_mime("");
assert_eq!(m, mime::APPLICATION_OCTET_STREAM); assert_eq!(m, mime::APPLICATION_OCTET_STREAM);
@ -100,7 +100,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_if_modified_since_without_if_none_match() { async fn test_if_modified_since_without_if_none_match() {
let file = NamedFile::open("Cargo.toml").unwrap(); let file = NamedFile::open_async("Cargo.toml").await.unwrap();
let since = header::HttpDate::from(SystemTime::now().add(Duration::from_secs(60))); let since = header::HttpDate::from(SystemTime::now().add(Duration::from_secs(60)));
let req = TestRequest::default() let req = TestRequest::default()
@ -112,7 +112,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_if_modified_since_without_if_none_match_same() { async fn test_if_modified_since_without_if_none_match_same() {
let file = NamedFile::open("Cargo.toml").unwrap(); let file = NamedFile::open_async("Cargo.toml").await.unwrap();
let since = file.last_modified().unwrap(); let since = file.last_modified().unwrap();
let req = TestRequest::default() let req = TestRequest::default()
@ -124,7 +124,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_if_modified_since_with_if_none_match() { async fn test_if_modified_since_with_if_none_match() {
let file = NamedFile::open("Cargo.toml").unwrap(); let file = NamedFile::open_async("Cargo.toml").await.unwrap();
let since = header::HttpDate::from(SystemTime::now().add(Duration::from_secs(60))); let since = header::HttpDate::from(SystemTime::now().add(Duration::from_secs(60)));
let req = TestRequest::default() let req = TestRequest::default()
@ -137,7 +137,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_if_unmodified_since() { async fn test_if_unmodified_since() {
let file = NamedFile::open("Cargo.toml").unwrap(); let file = NamedFile::open_async("Cargo.toml").await.unwrap();
let since = file.last_modified().unwrap(); let since = file.last_modified().unwrap();
let req = TestRequest::default() let req = TestRequest::default()
@ -149,7 +149,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_if_unmodified_since_failed() { async fn test_if_unmodified_since_failed() {
let file = NamedFile::open("Cargo.toml").unwrap(); let file = NamedFile::open_async("Cargo.toml").await.unwrap();
let since = header::HttpDate::from(SystemTime::UNIX_EPOCH); let since = header::HttpDate::from(SystemTime::UNIX_EPOCH);
let req = TestRequest::default() let req = TestRequest::default()
@ -161,8 +161,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_text() { async fn test_named_file_text() {
assert!(NamedFile::open("test--").is_err()); assert!(NamedFile::open_async("test--").await.is_err());
let mut file = NamedFile::open("Cargo.toml").unwrap(); let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
{ {
file.file(); file.file();
let _f: &File = &file; let _f: &File = &file;
@ -185,8 +185,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_content_disposition() { async fn test_named_file_content_disposition() {
assert!(NamedFile::open("test--").is_err()); assert!(NamedFile::open_async("test--").await.is_err());
let mut file = NamedFile::open("Cargo.toml").unwrap(); let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
{ {
file.file(); file.file();
let _f: &File = &file; let _f: &File = &file;
@ -202,7 +202,8 @@ mod tests {
"inline; filename=\"Cargo.toml\"" "inline; filename=\"Cargo.toml\""
); );
let file = NamedFile::open("Cargo.toml") let file = NamedFile::open_async("Cargo.toml")
.await
.unwrap() .unwrap()
.disable_content_disposition(); .disable_content_disposition();
let req = TestRequest::default().to_http_request(); let req = TestRequest::default().to_http_request();
@ -212,8 +213,19 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_non_ascii_file_name() { async fn test_named_file_non_ascii_file_name() {
let mut file = let file = {
NamedFile::from_file(File::open("Cargo.toml").unwrap(), "貨物.toml").unwrap(); #[cfg(feature = "experimental-io-uring")]
{
crate::named::File::open("Cargo.toml").await.unwrap()
}
#[cfg(not(feature = "experimental-io-uring"))]
{
crate::named::File::open("Cargo.toml").unwrap()
}
};
let mut file = NamedFile::from_file(file, "貨物.toml").unwrap();
{ {
file.file(); file.file();
let _f: &File = &file; let _f: &File = &file;
@ -236,7 +248,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_set_content_type() { async fn test_named_file_set_content_type() {
let mut file = NamedFile::open("Cargo.toml") let mut file = NamedFile::open_async("Cargo.toml")
.await
.unwrap() .unwrap()
.set_content_type(mime::TEXT_XML); .set_content_type(mime::TEXT_XML);
{ {
@ -261,7 +274,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_image() { async fn test_named_file_image() {
let mut file = NamedFile::open("tests/test.png").unwrap(); let mut file = NamedFile::open_async("tests/test.png").await.unwrap();
{ {
file.file(); file.file();
let _f: &File = &file; let _f: &File = &file;
@ -284,7 +297,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_javascript() { async fn test_named_file_javascript() {
let file = NamedFile::open("tests/test.js").unwrap(); let file = NamedFile::open_async("tests/test.js").await.unwrap();
let req = TestRequest::default().to_http_request(); let req = TestRequest::default().to_http_request();
let resp = file.respond_to(&req).await.unwrap(); let resp = file.respond_to(&req).await.unwrap();
@ -304,7 +317,8 @@ mod tests {
disposition: DispositionType::Attachment, disposition: DispositionType::Attachment,
parameters: vec![DispositionParam::Filename(String::from("test.png"))], parameters: vec![DispositionParam::Filename(String::from("test.png"))],
}; };
let mut file = NamedFile::open("tests/test.png") let mut file = NamedFile::open_async("tests/test.png")
.await
.unwrap() .unwrap()
.set_content_disposition(cd); .set_content_disposition(cd);
{ {
@ -329,7 +343,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_binary() { async fn test_named_file_binary() {
let mut file = NamedFile::open("tests/test.binary").unwrap(); let mut file = NamedFile::open_async("tests/test.binary").await.unwrap();
{ {
file.file(); file.file();
let _f: &File = &file; let _f: &File = &file;
@ -352,7 +366,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_status_code_text() { async fn test_named_file_status_code_text() {
let mut file = NamedFile::open("Cargo.toml") let mut file = NamedFile::open_async("Cargo.toml")
.await
.unwrap() .unwrap()
.set_status_code(StatusCode::NOT_FOUND); .set_status_code(StatusCode::NOT_FOUND);
{ {
@ -568,7 +583,8 @@ mod tests {
async fn test_named_file_content_encoding() { async fn test_named_file_content_encoding() {
let srv = test::init_service(App::new().wrap(Compress::default()).service( let srv = test::init_service(App::new().wrap(Compress::default()).service(
web::resource("/").to(|| async { web::resource("/").to(|| async {
NamedFile::open("Cargo.toml") NamedFile::open_async("Cargo.toml")
.await
.unwrap() .unwrap()
.set_content_encoding(header::ContentEncoding::Identity) .set_content_encoding(header::ContentEncoding::Identity)
}), }),
@ -588,7 +604,8 @@ mod tests {
async fn test_named_file_content_encoding_gzip() { async fn test_named_file_content_encoding_gzip() {
let srv = test::init_service(App::new().wrap(Compress::default()).service( let srv = test::init_service(App::new().wrap(Compress::default()).service(
web::resource("/").to(|| async { web::resource("/").to(|| async {
NamedFile::open("Cargo.toml") NamedFile::open_async("Cargo.toml")
.await
.unwrap() .unwrap()
.set_content_encoding(header::ContentEncoding::Gzip) .set_content_encoding(header::ContentEncoding::Gzip)
}), }),
@ -614,7 +631,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_allowed_method() { async fn test_named_file_allowed_method() {
let req = TestRequest::default().method(Method::GET).to_http_request(); let req = TestRequest::default().method(Method::GET).to_http_request();
let file = NamedFile::open("Cargo.toml").unwrap(); let file = NamedFile::open_async("Cargo.toml").await.unwrap();
let resp = file.respond_to(&req).await.unwrap(); let resp = file.respond_to(&req).await.unwrap();
assert_eq!(resp.status(), StatusCode::OK); assert_eq!(resp.status(), StatusCode::OK);
} }
@ -705,8 +722,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_default_handler_file_missing() { async fn test_default_handler_file_missing() {
let st = Files::new("/", ".") let st = Files::new("/", ".")
.default_handler(|req: ServiceRequest| { .default_handler(|req: ServiceRequest| async {
ok(req.into_response(HttpResponse::Ok().body("default content"))) Ok(req.into_response(HttpResponse::Ok().body("default content")))
}) })
.new_service(()) .new_service(())
.await .await
@ -789,9 +806,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_serve_named_file() { async fn test_serve_named_file() {
let srv = let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
test::init_service(App::new().service(NamedFile::open("Cargo.toml").unwrap())) let srv = test::init_service(App::new().service(factory)).await;
.await;
let req = TestRequest::get().uri("/Cargo.toml").to_request(); let req = TestRequest::get().uri("/Cargo.toml").to_request();
let res = test::call_service(&srv, req).await; let res = test::call_service(&srv, req).await;
@ -808,11 +824,9 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_serve_named_file_prefix() { async fn test_serve_named_file_prefix() {
let srv = test::init_service( let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
App::new() let srv =
.service(web::scope("/test").service(NamedFile::open("Cargo.toml").unwrap())), test::init_service(App::new().service(web::scope("/test").service(factory))).await;
)
.await;
let req = TestRequest::get().uri("/test/Cargo.toml").to_request(); let req = TestRequest::get().uri("/test/Cargo.toml").to_request();
let res = test::call_service(&srv, req).await; let res = test::call_service(&srv, req).await;
@ -829,10 +843,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_named_file_default_service() { async fn test_named_file_default_service() {
let srv = test::init_service( let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
App::new().default_service(NamedFile::open("Cargo.toml").unwrap()), let srv = test::init_service(App::new().default_service(factory)).await;
)
.await;
for route in ["/foobar", "/baz", "/"].iter() { for route in ["/foobar", "/baz", "/"].iter() {
let req = TestRequest::get().uri(route).to_request(); let req = TestRequest::get().uri(route).to_request();
@ -847,8 +859,9 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_default_handler_named_file() { async fn test_default_handler_named_file() {
let factory = NamedFile::open_async("Cargo.toml").await.unwrap();
let st = Files::new("/", ".") let st = Files::new("/", ".")
.default_handler(NamedFile::open("Cargo.toml").unwrap()) .default_handler(factory)
.new_service(()) .new_service(())
.await .await
.unwrap(); .unwrap();
@ -926,8 +939,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_default_handler_filter() { async fn test_default_handler_filter() {
let st = Files::new("/", ".") let st = Files::new("/", ".")
.default_handler(|req: ServiceRequest| { .default_handler(|req: ServiceRequest| async {
ok(req.into_response(HttpResponse::Ok().body("default content"))) Ok(req.into_response(HttpResponse::Ok().body("default content")))
}) })
.path_filter(|path, _| path.extension() == Some("png".as_ref())) .path_filter(|path, _| path.extension() == Some("png".as_ref()))
.new_service(()) .new_service(())

View File

@ -1,17 +1,22 @@
use actix_service::{Service, ServiceFactory}; use std::{
use actix_utils::future::{ok, ready, Ready}; fmt,
use actix_web::dev::{AppService, HttpServiceFactory, ResourceDef}; fs::Metadata,
use std::fs::{File, Metadata}; io,
use std::io; ops::{Deref, DerefMut},
use std::ops::{Deref, DerefMut}; path::{Path, PathBuf},
use std::path::{Path, PathBuf}; time::{SystemTime, UNIX_EPOCH},
use std::time::{SystemTime, UNIX_EPOCH}; };
#[cfg(unix)] #[cfg(unix)]
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
use actix_http::body::AnyBody;
use actix_service::{Service, ServiceFactory};
use actix_web::{ use actix_web::{
dev::{BodyEncoding, ServiceRequest, ServiceResponse, SizedStream}, dev::{
AppService, BodyEncoding, HttpServiceFactory, ResourceDef, ServiceRequest,
ServiceResponse, SizedStream,
},
http::{ http::{
header::{ header::{
self, Charset, ContentDisposition, DispositionParam, DispositionType, ExtendedValue, self, Charset, ContentDisposition, DispositionParam, DispositionType, ExtendedValue,
@ -21,9 +26,9 @@ use actix_web::{
Error, HttpMessage, HttpRequest, HttpResponse, Responder, Error, HttpMessage, HttpRequest, HttpResponse, Responder,
}; };
use bitflags::bitflags; use bitflags::bitflags;
use futures_core::future::LocalBoxFuture;
use mime_guess::from_path; use mime_guess::from_path;
use crate::ChunkedReadFile;
use crate::{encoding::equiv_utf8_text, range::HttpRange}; use crate::{encoding::equiv_utf8_text, range::HttpRange};
bitflags! { bitflags! {
@ -48,9 +53,9 @@ impl Default for Flags {
/// use actix_web::App; /// use actix_web::App;
/// use actix_files::NamedFile; /// use actix_files::NamedFile;
/// ///
/// # fn run() -> Result<(), Box<dyn std::error::Error>> { /// # async fn run() -> Result<(), Box<dyn std::error::Error>> {
/// let app = App::new() /// let file = NamedFile::open_async("./static/index.html").await?;
/// .service(NamedFile::open("./static/index.html")?); /// let app = App::new().service(file);
/// # Ok(()) /// # Ok(())
/// # } /// # }
/// ``` /// ```
@ -62,10 +67,9 @@ impl Default for Flags {
/// ///
/// #[get("/")] /// #[get("/")]
/// async fn index() -> impl Responder { /// async fn index() -> impl Responder {
/// NamedFile::open("./static/index.html") /// NamedFile::open_async("./static/index.html").await
/// } /// }
/// ``` /// ```
#[derive(Debug)]
pub struct NamedFile { pub struct NamedFile {
path: PathBuf, path: PathBuf,
file: File, file: File,
@ -78,6 +82,37 @@ pub struct NamedFile {
pub(crate) encoding: Option<ContentEncoding>, pub(crate) encoding: Option<ContentEncoding>,
} }
impl fmt::Debug for NamedFile {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("NamedFile")
.field("path", &self.path)
.field(
"file",
#[cfg(feature = "experimental-io-uring")]
{
&"tokio_uring::File"
},
#[cfg(not(feature = "experimental-io-uring"))]
{
&self.file
},
)
.field("modified", &self.modified)
.field("md", &self.md)
.field("flags", &self.flags)
.field("status_code", &self.status_code)
.field("content_type", &self.content_type)
.field("content_disposition", &self.content_disposition)
.field("encoding", &self.encoding)
.finish()
}
}
#[cfg(not(feature = "experimental-io-uring"))]
pub(crate) use std::fs::File;
#[cfg(feature = "experimental-io-uring")]
pub(crate) use tokio_uring::fs::File;
impl NamedFile { impl NamedFile {
/// Creates an instance from a previously opened file. /// Creates an instance from a previously opened file.
/// ///
@ -85,8 +120,7 @@ impl NamedFile {
/// `ContentDisposition` headers. /// `ContentDisposition` headers.
/// ///
/// # Examples /// # Examples
/// /// ```ignore
/// ```
/// use actix_files::NamedFile; /// use actix_files::NamedFile;
/// use std::io::{self, Write}; /// use std::io::{self, Write};
/// use std::env; /// use std::env;
@ -147,7 +181,30 @@ impl NamedFile {
(ct, cd) (ct, cd)
}; };
let md = file.metadata()?; let md = {
#[cfg(not(feature = "experimental-io-uring"))]
{
file.metadata()?
}
#[cfg(feature = "experimental-io-uring")]
{
use std::os::unix::prelude::{AsRawFd, FromRawFd};
let fd = file.as_raw_fd();
// SAFETY: fd is borrowed and lives longer than the unsafe block
unsafe {
let file = std::fs::File::from_raw_fd(fd);
let md = file.metadata();
// SAFETY: forget the fd before exiting block in success or error case but don't
// run destructor (that would close file handle)
std::mem::forget(file);
md?
}
}
};
let modified = md.modified().ok(); let modified = md.modified().ok();
let encoding = None; let encoding = None;
@ -164,17 +221,45 @@ impl NamedFile {
}) })
} }
#[cfg(not(feature = "experimental-io-uring"))]
/// Attempts to open a file in read-only mode. /// Attempts to open a file in read-only mode.
/// ///
/// # Examples /// # Examples
///
/// ``` /// ```
/// use actix_files::NamedFile; /// use actix_files::NamedFile;
///
/// let file = NamedFile::open("foo.txt"); /// let file = NamedFile::open("foo.txt");
/// ``` /// ```
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> { pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> {
Self::from_file(File::open(&path)?, path) let file = File::open(&path)?;
Self::from_file(file, path)
}
/// Attempts to open a file asynchronously in read-only mode.
///
/// When the `experimental-io-uring` crate feature is enabled, this will be async.
/// Otherwise, it will be just like [`open`][Self::open].
///
/// # Examples
/// ```
/// use actix_files::NamedFile;
/// # async fn open() {
/// let file = NamedFile::open_async("foo.txt").await.unwrap();
/// # }
/// ```
pub async fn open_async<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> {
let file = {
#[cfg(not(feature = "experimental-io-uring"))]
{
File::open(&path)?
}
#[cfg(feature = "experimental-io-uring")]
{
File::open(&path).await?
}
};
Self::from_file(file, path)
} }
/// Returns reference to the underlying `File` object. /// Returns reference to the underlying `File` object.
@ -186,13 +271,12 @@ impl NamedFile {
/// Retrieve the path of this file. /// Retrieve the path of this file.
/// ///
/// # Examples /// # Examples
///
/// ``` /// ```
/// # use std::io; /// # use std::io;
/// use actix_files::NamedFile; /// use actix_files::NamedFile;
/// ///
/// # fn path() -> io::Result<()> { /// # async fn path() -> io::Result<()> {
/// let file = NamedFile::open("test.txt")?; /// let file = NamedFile::open_async("test.txt").await?;
/// assert_eq!(file.path().as_os_str(), "foo.txt"); /// assert_eq!(file.path().as_os_str(), "foo.txt");
/// # Ok(()) /// # Ok(())
/// # } /// # }
@ -332,7 +416,7 @@ impl NamedFile {
res.encoding(current_encoding); res.encoding(current_encoding);
} }
let reader = ChunkedReadFile::new(self.md.len(), 0, self.file); let reader = super::chunked::new_chunked_read(self.md.len(), 0, self.file);
return res.streaming(reader); return res.streaming(reader);
} }
@ -443,10 +527,10 @@ impl NamedFile {
if precondition_failed { if precondition_failed {
return resp.status(StatusCode::PRECONDITION_FAILED).finish(); return resp.status(StatusCode::PRECONDITION_FAILED).finish();
} else if not_modified { } else if not_modified {
return resp.status(StatusCode::NOT_MODIFIED).finish(); return resp.status(StatusCode::NOT_MODIFIED).body(AnyBody::None);
} }
let reader = ChunkedReadFile::new(length, offset, self.file); let reader = super::chunked::new_chunked_read(length, offset, self.file);
if offset != 0 || length != self.md.len() { if offset != 0 || length != self.md.len() {
resp.status(StatusCode::PARTIAL_CONTENT); resp.status(StatusCode::PARTIAL_CONTENT);
@ -456,20 +540,6 @@ impl NamedFile {
} }
} }
impl Deref for NamedFile {
type Target = File;
fn deref(&self) -> &File {
&self.file
}
}
impl DerefMut for NamedFile {
fn deref_mut(&mut self) -> &mut File {
&mut self.file
}
}
/// Returns true if `req` has no `If-Match` header or one which matches `etag`. /// Returns true if `req` has no `If-Match` header or one which matches `etag`.
fn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool { fn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
match req.get_header::<header::IfMatch>() { match req.get_header::<header::IfMatch>() {
@ -510,6 +580,20 @@ fn none_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
} }
} }
impl Deref for NamedFile {
type Target = File;
fn deref(&self) -> &Self::Target {
&self.file
}
}
impl DerefMut for NamedFile {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.file
}
}
impl Responder for NamedFile { impl Responder for NamedFile {
fn respond_to(self, req: &HttpRequest) -> HttpResponse { fn respond_to(self, req: &HttpRequest) -> HttpResponse {
self.into_response(req) self.into_response(req)
@ -520,14 +604,16 @@ impl ServiceFactory<ServiceRequest> for NamedFile {
type Response = ServiceResponse; type Response = ServiceResponse;
type Error = Error; type Error = Error;
type Config = (); type Config = ();
type InitError = ();
type Service = NamedFileService; type Service = NamedFileService;
type Future = Ready<Result<Self::Service, ()>>; type InitError = ();
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
fn new_service(&self, _: ()) -> Self::Future { fn new_service(&self, _: ()) -> Self::Future {
ok(NamedFileService { let service = NamedFileService {
path: self.path.clone(), path: self.path.clone(),
}) };
Box::pin(async move { Ok(service) })
} }
} }
@ -540,18 +626,19 @@ pub struct NamedFileService {
impl Service<ServiceRequest> for NamedFileService { impl Service<ServiceRequest> for NamedFileService {
type Response = ServiceResponse; type Response = ServiceResponse;
type Error = Error; type Error = Error;
type Future = Ready<Result<Self::Response, Self::Error>>; type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
actix_service::always_ready!(); actix_service::always_ready!();
fn call(&self, req: ServiceRequest) -> Self::Future { fn call(&self, req: ServiceRequest) -> Self::Future {
let (req, _) = req.into_parts(); let (req, _) = req.into_parts();
ready(
NamedFile::open(&self.path) let path = self.path.clone();
.map_err(|e| e.into()) Box::pin(async move {
.map(|f| f.into_response(&req)) let file = NamedFile::open_async(path).await?;
.map(|res| ServiceResponse::new(req, res)), let res = file.into_response(&req);
) Ok(ServiceResponse::new(req, res))
})
} }
} }

View File

@ -1,14 +1,14 @@
use std::{ use std::{
future::{ready, Ready},
path::{Path, PathBuf}, path::{Path, PathBuf},
str::FromStr, str::FromStr,
}; };
use actix_utils::future::{ready, Ready};
use actix_web::{dev::Payload, FromRequest, HttpRequest}; use actix_web::{dev::Payload, FromRequest, HttpRequest};
use crate::error::UriSegmentError; use crate::error::UriSegmentError;
#[derive(Debug)] #[derive(Debug, PartialEq, Eq)]
pub(crate) struct PathBufWrap(PathBuf); pub(crate) struct PathBufWrap(PathBuf);
impl FromStr for PathBufWrap { impl FromStr for PathBufWrap {
@ -21,6 +21,8 @@ impl FromStr for PathBufWrap {
impl PathBufWrap { impl PathBufWrap {
/// Parse a path, giving the choice of allowing hidden files to be considered valid segments. /// Parse a path, giving the choice of allowing hidden files to be considered valid segments.
///
/// Path traversal is guarded by this method.
pub fn parse_path(path: &str, hidden_files: bool) -> Result<Self, UriSegmentError> { pub fn parse_path(path: &str, hidden_files: bool) -> Result<Self, UriSegmentError> {
let mut buf = PathBuf::new(); let mut buf = PathBuf::new();
@ -59,7 +61,6 @@ impl AsRef<Path> for PathBufWrap {
impl FromRequest for PathBufWrap { impl FromRequest for PathBufWrap {
type Error = UriSegmentError; type Error = UriSegmentError;
type Future = Ready<Result<Self, Self::Error>>; type Future = Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
ready(req.match_info().path().parse()) ready(req.match_info().path().parse())
@ -116,4 +117,24 @@ mod tests {
PathBuf::from_iter(vec!["test", ".tt"]) PathBuf::from_iter(vec!["test", ".tt"])
); );
} }
#[test]
fn path_traversal() {
assert_eq!(
PathBufWrap::parse_path("/../README.md", false).unwrap().0,
PathBuf::from_iter(vec!["README.md"])
);
assert_eq!(
PathBufWrap::parse_path("/../README.md", true).unwrap().0,
PathBuf::from_iter(vec!["README.md"])
);
assert_eq!(
PathBufWrap::parse_path("/../../../../../../../../../../etc/passwd", false)
.unwrap()
.0,
PathBuf::from_iter(vec!["etc/passwd"])
);
}
} }

View File

@ -1,7 +1,6 @@
use std::{fmt, io, path::PathBuf, rc::Rc}; use std::{fmt, io, ops::Deref, path::PathBuf, rc::Rc};
use actix_service::Service; use actix_service::Service;
use actix_utils::future::ok;
use actix_web::{ use actix_web::{
dev::{ServiceRequest, ServiceResponse}, dev::{ServiceRequest, ServiceResponse},
error::Error, error::Error,
@ -17,7 +16,18 @@ use crate::{
}; };
/// Assembled file serving service. /// Assembled file serving service.
pub struct FilesService { #[derive(Clone)]
pub struct FilesService(pub(crate) Rc<FilesServiceInner>);
impl Deref for FilesService {
type Target = FilesServiceInner;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
pub struct FilesServiceInner {
pub(crate) directory: PathBuf, pub(crate) directory: PathBuf,
pub(crate) index: Option<String>, pub(crate) index: Option<String>,
pub(crate) show_index: bool, pub(crate) show_index: bool,
@ -31,20 +41,50 @@ pub struct FilesService {
pub(crate) hidden_files: bool, pub(crate) hidden_files: bool,
} }
impl fmt::Debug for FilesServiceInner {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("FilesServiceInner")
}
}
impl FilesService { impl FilesService {
fn handle_err( async fn handle_err(
&self, &self,
err: io::Error, err: io::Error,
req: ServiceRequest, req: ServiceRequest,
) -> LocalBoxFuture<'static, Result<ServiceResponse, Error>> { ) -> Result<ServiceResponse, Error> {
log::debug!("error handling {}: {}", req.path(), err); log::debug!("error handling {}: {}", req.path(), err);
if let Some(ref default) = self.default { if let Some(ref default) = self.default {
Box::pin(default.call(req)) default.call(req).await
} else { } else {
Box::pin(ok(req.error_response(err))) Ok(req.error_response(err))
} }
} }
fn serve_named_file(
&self,
req: ServiceRequest,
mut named_file: NamedFile,
) -> ServiceResponse {
if let Some(ref mime_override) = self.mime_override {
let new_disposition = mime_override(&named_file.content_type.type_());
named_file.content_disposition.disposition = new_disposition;
}
named_file.flags = self.file_flags;
let (req, _) = req.into_parts();
let res = named_file.into_response(&req);
ServiceResponse::new(req, res)
}
fn show_index(&self, req: ServiceRequest, path: PathBuf) -> ServiceResponse {
let dir = Directory::new(self.directory.clone(), path);
let (req, _) = req.into_parts();
(self.renderer)(&dir, &req).unwrap_or_else(|e| ServiceResponse::from_err(e, req))
}
} }
impl fmt::Debug for FilesService { impl fmt::Debug for FilesService {
@ -56,7 +96,7 @@ impl fmt::Debug for FilesService {
impl Service<ServiceRequest> for FilesService { impl Service<ServiceRequest> for FilesService {
type Response = ServiceResponse; type Response = ServiceResponse;
type Error = Error; type Error = Error;
type Future = LocalBoxFuture<'static, Result<ServiceResponse, Error>>; type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
actix_service::always_ready!(); actix_service::always_ready!();
@ -69,103 +109,87 @@ impl Service<ServiceRequest> for FilesService {
matches!(*req.method(), Method::HEAD | Method::GET) matches!(*req.method(), Method::HEAD | Method::GET)
}; };
if !is_method_valid { let this = self.clone();
return Box::pin(ok(req.into_response(
actix_web::HttpResponse::MethodNotAllowed()
.insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
.body("Request did not meet this resource's requirements."),
)));
}
let real_path = Box::pin(async move {
match PathBufWrap::parse_path(req.match_info().path(), self.hidden_files) { if !is_method_valid {
Ok(item) => item, return Ok(req.into_response(
Err(e) => return Box::pin(ok(req.error_response(e))), actix_web::HttpResponse::MethodNotAllowed()
}; .insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
.body("Request did not meet this resource's requirements."),
));
}
if let Some(filter) = &self.path_filter { let real_path =
if !filter(real_path.as_ref(), req.head()) { match PathBufWrap::parse_path(req.match_info().path(), this.hidden_files) {
if let Some(ref default) = self.default { Ok(item) => item,
return Box::pin(default.call(req)); Err(e) => return Ok(req.error_response(e)),
} else { };
return Box::pin(ok(
req.into_response(actix_web::HttpResponse::NotFound().finish()) if let Some(filter) = &this.path_filter {
if !filter(real_path.as_ref(), req.head()) {
if let Some(ref default) = this.default {
return default.call(req).await;
} else {
return Ok(
req.into_response(actix_web::HttpResponse::NotFound().finish())
);
}
}
}
// full file path
let path = this.directory.join(&real_path);
if let Err(err) = path.canonicalize() {
return this.handle_err(err, req).await;
}
if path.is_dir() {
if this.redirect_to_slash
&& !req.path().ends_with('/')
&& (this.index.is_some() || this.show_index)
{
let redirect_to = format!("{}/", req.path());
return Ok(req.into_response(
HttpResponse::Found()
.insert_header((header::LOCATION, redirect_to))
.finish(),
)); ));
} }
}
}
// full file path match this.index {
let path = self.directory.join(&real_path); Some(ref index) => {
if let Err(err) = path.canonicalize() { let named_path = path.join(index);
return Box::pin(self.handle_err(err, req)); match NamedFile::open_async(named_path).await {
} Ok(named_file) => Ok(this.serve_named_file(req, named_file)),
Err(_) if this.show_index => Ok(this.show_index(req, path)),
if path.is_dir() { Err(err) => this.handle_err(err, req).await,
if self.redirect_to_slash }
&& !req.path().ends_with('/')
&& (self.index.is_some() || self.show_index)
{
let redirect_to = format!("{}/", req.path());
return Box::pin(ok(req.into_response(
HttpResponse::Found()
.insert_header((header::LOCATION, redirect_to))
.finish(),
)));
}
let serve_named_file = |req: ServiceRequest, mut named_file: NamedFile| {
if let Some(ref mime_override) = self.mime_override {
let new_disposition = mime_override(&named_file.content_type.type_());
named_file.content_disposition.disposition = new_disposition;
}
named_file.flags = self.file_flags;
let (req, _) = req.into_parts();
let res = named_file.into_response(&req);
Box::pin(ok(ServiceResponse::new(req, res)))
};
let show_index = |req: ServiceRequest| {
let dir = Directory::new(self.directory.clone(), path.clone());
let (req, _) = req.into_parts();
let x = (self.renderer)(&dir, &req);
Box::pin(match x {
Ok(resp) => ok(resp),
Err(err) => ok(ServiceResponse::from_err(err, req)),
})
};
match self.index {
Some(ref index) => match NamedFile::open(path.join(index)) {
Ok(named_file) => serve_named_file(req, named_file),
Err(_) if self.show_index => show_index(req),
Err(err) => self.handle_err(err, req),
},
None if self.show_index => show_index(req),
_ => Box::pin(ok(ServiceResponse::from_err(
FilesError::IsDirectory,
req.into_parts().0,
))),
}
} else {
match NamedFile::open(path) {
Ok(mut named_file) => {
if let Some(ref mime_override) = self.mime_override {
let new_disposition = mime_override(&named_file.content_type.type_());
named_file.content_disposition.disposition = new_disposition;
} }
named_file.flags = self.file_flags; None if this.show_index => Ok(this.show_index(req, path)),
_ => Ok(ServiceResponse::from_err(
let (req, _) = req.into_parts(); FilesError::IsDirectory,
let res = named_file.into_response(&req); req.into_parts().0,
Box::pin(ok(ServiceResponse::new(req, res))) )),
}
} else {
match NamedFile::open_async(&path).await {
Ok(mut named_file) => {
if let Some(ref mime_override) = this.mime_override {
let new_disposition =
mime_override(&named_file.content_type.type_());
named_file.content_disposition.disposition = new_disposition;
}
named_file.flags = this.file_flags;
let (req, _) = req.into_parts();
let res = named_file.into_response(&req);
Ok(ServiceResponse::new(req, res))
}
Err(err) => this.handle_err(err, req).await,
} }
Err(err) => self.handle_err(err, req),
} }
} })
} }
} }

View File

@ -8,7 +8,7 @@ use actix_web::{
App, App,
}; };
#[actix_rt::test] #[actix_web::test]
async fn test_utf8_file_contents() { async fn test_utf8_file_contents() {
// use default ISO-8859-1 encoding // use default ISO-8859-1 encoding
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await; let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;

View File

@ -7,7 +7,7 @@ use actix_web::{
}; };
use bytes::Bytes; use bytes::Bytes;
#[actix_rt::test] #[actix_web::test]
async fn test_guard_filter() { async fn test_guard_filter() {
let srv = test::init_service( let srv = test::init_service(
App::new() App::new()

View File

@ -0,0 +1,27 @@
use actix_files::Files;
use actix_web::{
http::StatusCode,
test::{self, TestRequest},
App,
};
#[actix_rt::test]
async fn test_directory_traversal_prevention() {
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
let req =
TestRequest::with_uri("/../../../../../../../../../../../etc/passwd").to_request();
let res = test::call_service(&srv, req).await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let req = TestRequest::with_uri(
"/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/%2e%2e/etc/passwd",
)
.to_request();
let res = test::call_service(&srv, req).await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let req = TestRequest::with_uri("/%00/etc/passwd%00").to_request();
let res = test::call_service(&srv, req).await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}

View File

@ -3,6 +3,24 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 3.0.0-beta.7 - 2021-11-22
* Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
[#2408]: https://github.com/actix/actix-web/pull/2408
## 3.0.0-beta.6 - 2021-11-15
* `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
* Update `actix-server` to `2.0.0-beta.9`. [#2442]
* Minimum supported Rust version (MSRV) is now 1.52.
[#2442]: https://github.com/actix/actix-web/pull/2442
## 3.0.0-beta.5 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 3.0.0-beta.4 - 2021-04-02 ## 3.0.0-beta.4 - 2021-04-02
* Added `TestServer::client_headers` method. [#2097] * Added `TestServer::client_headers` method. [#2097]

View File

@ -1,18 +1,18 @@
[package] [package]
name = "actix-http-test" name = "actix-http-test"
version = "3.0.0-beta.4" version = "3.0.0-beta.7"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Various helpers for Actix applications to use during testing" description = "Various helpers for Actix applications to use during testing"
readme = "README.md"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs" homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web.git" repository = "https://github.com/actix/actix-web.git"
documentation = "https://docs.rs/actix-http-test/" categories = [
categories = ["network-programming", "asynchronous", "network-programming",
"web-programming::http-server", "asynchronous",
"web-programming::websocket"] "web-programming::http-server",
"web-programming::websocket",
]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
exclude = [".gitignore", ".cargo/config"]
edition = "2018" edition = "2018"
[package.metadata.docs.rs] [package.metadata.docs.rs]
@ -30,26 +30,26 @@ openssl = ["tls-openssl", "awc/openssl"]
[dependencies] [dependencies]
actix-service = "2.0.0" actix-service = "2.0.0"
actix-codec = "0.4.0" actix-codec = "0.4.1"
actix-tls = "3.0.0-beta.5" actix-tls = "3.0.0-beta.9"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-rt = "2.2" actix-rt = "2.2"
actix-server = "2.0.0-beta.3" actix-server = "2.0.0-beta.9"
awc = { version = "3.0.0-beta.7", default-features = false } awc = { version = "3.0.0-beta.11", default-features = false }
base64 = "0.13" base64 = "0.13"
bytes = "1" bytes = "1"
futures-core = { version = "0.3.7", default-features = false } futures-core = { version = "0.3.7", default-features = false }
http = "0.2.2" http = "0.2.5"
log = "0.4" log = "0.4"
socket2 = "0.4" socket2 = "0.4"
serde = "1.0" serde = "1.0"
serde_json = "1.0" serde_json = "1.0"
slab = "0.4" slab = "0.4"
serde_urlencoded = "0.7" serde_urlencoded = "0.7"
time = { version = "0.2.23", default-features = false, features = ["std"] }
tls-openssl = { version = "0.10.9", package = "openssl", optional = true } tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
tokio = { version = "1.2", features = ["sync"] }
[dev-dependencies] [dev-dependencies]
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] } actix-web = { version = "4.0.0-beta.11", default-features = false, features = ["cookies"] }
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.13"

View File

@ -3,15 +3,15 @@
> Various helpers for Actix applications to use during testing. > Various helpers for Actix applications to use during testing.
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test) [![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.4)](https://docs.rs/actix-http-test/3.0.0-beta.4) [![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.7)](https://docs.rs/actix-http-test/3.0.0-beta.7)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.52+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
<br> <br>
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.4/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.4) [![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.7/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.7)
[![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test) [![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-http-test) - [API Documentation](https://docs.rs/actix-http-test)
- Minimum Supported Rust Version (MSRV): 1.46.0 - Minimum Supported Rust Version (MSRV): 1.52

View File

@ -7,8 +7,7 @@
#[cfg(feature = "openssl")] #[cfg(feature = "openssl")]
extern crate tls_openssl as openssl; extern crate tls_openssl as openssl;
use std::sync::mpsc; use std::{net, thread, time::Duration};
use std::{net, thread, time};
use actix_codec::{AsyncRead, AsyncWrite, Framed}; use actix_codec::{AsyncRead, AsyncWrite, Framed};
use actix_rt::{net::TcpStream, System}; use actix_rt::{net::TcpStream, System};
@ -20,29 +19,28 @@ use bytes::Bytes;
use futures_core::stream::Stream; use futures_core::stream::Stream;
use http::Method; use http::Method;
use socket2::{Domain, Protocol, Socket, Type}; use socket2::{Domain, Protocol, Socket, Type};
use tokio::sync::mpsc;
/// Start test server /// Start test server.
/// ///
/// `TestServer` is very simple test server that simplify process of writing /// `TestServer` is very simple test server that simplify process of writing integration tests cases
/// integration tests cases for actix web applications. /// for HTTP applications.
/// ///
/// # Examples /// # Examples
/// /// ```no_run
/// ```
/// use actix_http::HttpService; /// use actix_http::HttpService;
/// use actix_http_test::TestServer; /// use actix_http_test::test_server;
/// use actix_web::{web, App, HttpResponse, Error}; /// use actix_web::{web, App, HttpResponse, Error};
/// ///
/// async fn my_handler() -> Result<HttpResponse, Error> { /// async fn my_handler() -> Result<HttpResponse, Error> {
/// Ok(HttpResponse::Ok().into()) /// Ok(HttpResponse::Ok().into())
/// } /// }
/// ///
/// #[actix_rt::test] /// #[actix_web::test]
/// async fn test_example() { /// async fn test_example() {
/// let mut srv = TestServer::start( /// let mut srv = TestServer::start(||
/// || HttpService::new( /// HttpService::new(
/// App::new().service( /// App::new().service(web::resource("/").to(my_handler))
/// web::resource("/").to(my_handler))
/// ) /// )
/// ); /// );
/// ///
@ -56,72 +54,86 @@ pub async fn test_server<F: ServiceFactory<TcpStream>>(factory: F) -> TestServer
test_server_with_addr(tcp, factory).await test_server_with_addr(tcp, factory).await
} }
/// Start [`test server`](test_server()) on a concrete Address /// Start [`test server`](test_server()) on an existing address binding.
pub async fn test_server_with_addr<F: ServiceFactory<TcpStream>>( pub async fn test_server_with_addr<F: ServiceFactory<TcpStream>>(
tcp: net::TcpListener, tcp: net::TcpListener,
factory: F, factory: F,
) -> TestServer { ) -> TestServer {
let (tx, rx) = mpsc::channel(); let (started_tx, started_rx) = std::sync::mpsc::channel();
let (thread_stop_tx, thread_stop_rx) = mpsc::channel(1);
// run server in separate thread // run server in separate thread
thread::spawn(move || { thread::spawn(move || {
let sys = System::new(); System::new().block_on(async move {
let local_addr = tcp.local_addr().unwrap(); let local_addr = tcp.local_addr().unwrap();
let srv = Server::build() let srv = Server::build()
.listen("test", tcp, factory)? .workers(1)
.workers(1) .disable_signals()
.disable_signals(); .system_exit()
.listen("test", tcp, factory)
.expect("test server could not be created");
sys.block_on(async { let srv = srv.run();
srv.run(); started_tx
tx.send((System::current(), local_addr)).unwrap(); .send((System::current(), srv.handle(), local_addr))
.unwrap();
// drive server loop
srv.await.unwrap();
}); });
sys.run() // notify TestServer that server and system have shut down
// all thread managed resources should be dropped at this point
let _ = thread_stop_tx.send(());
}); });
let (system, addr) = rx.recv().unwrap(); let (system, server, addr) = started_rx.recv().unwrap();
let client = { let client = {
#[cfg(feature = "openssl")]
let connector = { let connector = {
#[cfg(feature = "openssl")] use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
{
use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap(); let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
builder.set_verify(SslVerifyMode::NONE);
let _ = builder builder.set_verify(SslVerifyMode::NONE);
.set_alpn_protos(b"\x02h2\x08http/1.1") let _ = builder
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e)); .set_alpn_protos(b"\x02h2\x08http/1.1")
Connector::new() .map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
.conn_lifetime(time::Duration::from_secs(0))
.timeout(time::Duration::from_millis(30000)) Connector::new()
.ssl(builder.build()) .conn_lifetime(Duration::from_secs(0))
} .timeout(Duration::from_millis(30000))
#[cfg(not(feature = "openssl"))] .ssl(builder.build())
{ };
Connector::new()
.conn_lifetime(time::Duration::from_secs(0)) #[cfg(not(feature = "openssl"))]
.timeout(time::Duration::from_millis(30000)) let connector = {
} Connector::new()
.conn_lifetime(Duration::from_secs(0))
.timeout(Duration::from_millis(30000))
}; };
Client::builder().connector(connector).finish() Client::builder().connector(connector).finish()
}; };
TestServer { TestServer {
addr, server,
client, client,
system, system,
addr,
thread_stop_rx,
} }
} }
/// Test server controller /// Test server controller
pub struct TestServer { pub struct TestServer {
server: actix_server::ServerHandle,
client: awc::Client,
system: actix_rt::System,
addr: net::SocketAddr, addr: net::SocketAddr,
client: Client, thread_stop_rx: mpsc::Receiver<()>,
system: System,
} }
impl TestServer { impl TestServer {
@ -258,15 +270,32 @@ impl TestServer {
self.client.headers() self.client.headers()
} }
/// Stop HTTP server /// Stop HTTP server.
fn stop(&mut self) { ///
/// Waits for spawned `Server` and `System` to (force) shutdown.
pub async fn stop(&mut self) {
// signal server to stop
self.server.stop(false).await;
// also signal system to stop
// though this is handled by `ServerBuilder::exit_system` too
self.system.stop(); self.system.stop();
// wait for thread to be stopped but don't care about result
let _ = self.thread_stop_rx.recv().await;
} }
} }
impl Drop for TestServer { impl Drop for TestServer {
fn drop(&mut self) { fn drop(&mut self) {
self.stop() // calls in this Drop impl should be enough to shut down the server, system, and thread
// without needing to await anything
// signal server to stop
let _ = self.server.stop(true);
// signal system to stop
self.system.stop();
} }
} }

View File

@ -3,6 +3,73 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 3.0.0-beta.13 - 2021-11-22
### Added
* `body::AnyBody::empty` for quickly creating an empty body. [#2446]
* `body::AnyBody::none` for quickly creating a "none" body. [#2456]
* `impl Clone` for `body::AnyBody<S> where S: Clone`. [#2448]
* `body::AnyBody::into_boxed` for quickly converting to a type-erased, boxed body type. [#2448]
### Changed
* Rename `body::AnyBody::{Message => Body}`. [#2446]
* Rename `body::AnyBody::{from_message => new_boxed}`. [#2448]
* Rename `body::AnyBody::{from_slice => copy_from_slice}`. [#2448]
* Rename `body::{BoxAnyBody => BoxBody}`. [#2448]
* Change representation of `AnyBody` to include a type parameter in `Body` variant. Defaults to `BoxBody`. [#2448]
* `Encoder::response` now returns `AnyBody<Encoder<B>>`. [#2448]
### Removed
* `body::AnyBody::Empty`; an empty body can now only be represented as a zero-length `Bytes` variant. [#2446]
* `body::BodySize::Empty`; an empty body can now only be represented as a `Sized(0)` variant. [#2446]
* `EncoderError::Boxed`; it is no longer required. [#2446]
* `body::ResponseBody`; is function is replaced by the new `body::AnyBody` enum. [#2446]
[#2446]: https://github.com/actix/actix-web/pull/2446
[#2448]: https://github.com/actix/actix-web/pull/2448
[#2456]: https://github.com/actix/actix-web/pull/2456
## 3.0.0-beta.12 - 2021-11-15
### Changed
* Update `actix-server` to `2.0.0-beta.9`. [#2442]
### Removed
* `client` module. [#2425]
* `trust-dns` feature. [#2425]
[#2425]: https://github.com/actix/actix-web/pull/2425
[#2442]: https://github.com/actix/actix-web/pull/2442
## 3.0.0-beta.11 - 2021-10-20
### Changed
* Updated rustls to v0.20. [#2414]
* Minimum supported Rust version (MSRV) is now 1.52.
[#2414]: https://github.com/actix/actix-web/pull/2414
## 3.0.0-beta.10 - 2021-09-09
### Changed
* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
* Minimum supported Rust version (MSRV) is now 1.51.
### Fixed
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
* Remove `Into<Error>` bound on `Encoder` body types. [#2375]
* Fix quality parse error in Accept-Encoding header. [#2344]
[#2364]: https://github.com/actix/actix-web/pull/2364
[#2375]: https://github.com/actix/actix-web/pull/2375
[#2344]: https://github.com/actix/actix-web/pull/2344
[#2377]: https://github.com/actix/actix-web/pull/2377
## 3.0.0-beta.9 - 2021-08-09
### Fixed
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
## 3.0.0-beta.8 - 2021-06-26 ## 3.0.0-beta.8 - 2021-06-26
### Changed ### Changed
* Change compression algorithm features flags. [#2250] * Change compression algorithm features flags. [#2250]
@ -210,6 +277,11 @@
[#1878]: https://github.com/actix/actix-web/pull/1878 [#1878]: https://github.com/actix/actix-web/pull/1878
## 2.2.1 - 2021-08-09
### Fixed
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
## 2.2.0 - 2020-11-25 ## 2.2.0 - 2020-11-25
### Added ### Added
* HttpResponse builders for 1xx status codes. [#1768] * HttpResponse builders for 1xx status codes. [#1768]

View File

@ -1,14 +1,17 @@
[package] [package]
name = "actix-http" name = "actix-http"
version = "3.0.0-beta.8" version = "3.0.0-beta.13"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "HTTP primitives for the Actix ecosystem" description = "HTTP primitives for the Actix ecosystem"
keywords = ["actix", "http", "framework", "async", "futures"] keywords = ["actix", "http", "framework", "async", "futures"]
homepage = "https://actix.rs" homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web" repository = "https://github.com/actix/actix-web.git"
categories = ["network-programming", "asynchronous", categories = [
"web-programming::http-server", "network-programming",
"web-programming::websocket"] "asynchronous",
"web-programming::http-server",
"web-programming::websocket",
]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2018" edition = "2018"
@ -24,29 +27,25 @@ path = "src/lib.rs"
default = [] default = []
# openssl # openssl
openssl = ["actix-tls/openssl"] openssl = ["actix-tls/accept", "actix-tls/openssl"]
# rustls support # rustls support
rustls = ["actix-tls/rustls"] rustls = ["actix-tls/accept", "actix-tls/rustls"]
# enable compression support # enable compression support
compress-brotli = ["brotli2", "__compress"] compress-brotli = ["brotli2", "__compress"]
compress-gzip = ["flate2", "__compress"] compress-gzip = ["flate2", "__compress"]
compress-zstd = ["zstd", "__compress"] compress-zstd = ["zstd", "__compress"]
# trust-dns as client dns resolver
trust-dns = ["trust-dns-resolver"]
# Internal (PRIVATE!) features used to aid testing and cheking feature status. # Internal (PRIVATE!) features used to aid testing and cheking feature status.
# Don't rely on these whatsoever. They may disappear at anytime. # Don't rely on these whatsoever. They may disappear at anytime.
__compress = [] __compress = []
[dependencies] [dependencies]
actix-service = "2.0.0" actix-service = "2.0.0"
actix-codec = "0.4.0" actix-codec = "0.4.1"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-rt = "2.2" actix-rt = "2.2"
actix-tls = { version = "3.0.0-beta.5", features = ["accept", "connect"] }
ahash = "0.7" ahash = "0.7"
base64 = "0.13" base64 = "0.13"
@ -58,45 +57,45 @@ encoding_rs = "0.8"
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] } futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] } futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] }
h2 = "0.3.1" h2 = "0.3.1"
http = "0.2.2" http = "0.2.5"
httparse = "1.3" httparse = "1.5.1"
httpdate = "1.0.1"
itoa = "0.4" itoa = "0.4"
language-tags = "0.3" language-tags = "0.3"
local-channel = "0.1" local-channel = "0.1"
once_cell = "1.5"
log = "0.4" log = "0.4"
mime = "0.3" mime = "0.3"
percent-encoding = "2.1" percent-encoding = "2.1"
pin-project = "1.0.0" pin-project = "1.0.0"
pin-project-lite = "0.2" pin-project-lite = "0.2"
rand = "0.8" rand = "0.8"
regex = "1.3"
serde = "1.0"
sha-1 = "0.9" sha-1 = "0.9"
smallvec = "1.6" smallvec = "1.6.1"
time = { version = "0.2.23", default-features = false, features = ["std"] }
tokio = { version = "1.2", features = ["sync"] } # tls
actix-tls = { version = "3.0.0-beta.9", default-features = false, optional = true }
# compression # compression
brotli2 = { version="0.3.2", optional = true } brotli2 = { version="0.3.2", optional = true }
flate2 = { version = "1.0.13", optional = true } flate2 = { version = "1.0.13", optional = true }
zstd = { version = "0.7", optional = true } zstd = { version = "0.9", optional = true }
trust-dns-resolver = { version = "0.20.0", optional = true }
[dev-dependencies] [dev-dependencies]
actix-server = "2.0.0-beta.3" actix-server = "2.0.0-beta.9"
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] } actix-http-test = { version = "3.0.0-beta.7", features = ["openssl"] }
actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] } actix-tls = { version = "3.0.0-beta.9", features = ["openssl"] }
async-stream = "0.3" async-stream = "0.3"
criterion = { version = "0.3", features = ["html_reports"] } criterion = { version = "0.3", features = ["html_reports"] }
env_logger = "0.8" env_logger = "0.9"
rcgen = "0.8" rcgen = "0.8"
regex = "1.3"
rustls-pemfile = "0.2"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
tls-openssl = { version = "0.10", package = "openssl" } static_assertions = "1"
tls-rustls = { version = "0.19", package = "rustls" } tls-openssl = { package = "openssl", version = "0.10.9" }
webpki = { version = "0.21.0" } tls-rustls = { package = "rustls", version = "0.20.0" }
tokio = { version = "1.2", features = ["net", "rt"] }
[[example]] [[example]]
name = "ws" name = "ws"

View File

@ -3,18 +3,18 @@
> HTTP primitives for the Actix ecosystem. > HTTP primitives for the Actix ecosystem.
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.8)](https://docs.rs/actix-http/3.0.0-beta.8) [![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.13)](https://docs.rs/actix-http/3.0.0-beta.13)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.52+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.8) [![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.13/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.13)
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http) [![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-http) - [API Documentation](https://docs.rs/actix-http)
- Minimum Supported Rust Version (MSRV): 1.46.0 - Minimum Supported Rust Version (MSRV): 1.52
## Example ## Example

View File

@ -18,7 +18,8 @@ fn bench_write_camel_case(c: &mut Criterion) {
group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| { group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| {
b.iter(|| { b.iter(|| {
let mut buf = black_box([0; 24]); let mut buf = black_box([0; 24]);
_new::write_camel_case(black_box(bts), &mut buf) let len = black_box(bts.len());
_new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)
}); });
}); });
} }
@ -30,9 +31,12 @@ criterion_group!(benches, bench_write_camel_case);
criterion_main!(benches); criterion_main!(benches);
mod _new { mod _new {
pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) { pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
// first copy entire (potentially wrong) slice to output // first copy entire (potentially wrong) slice to output
buffer[..value.len()].copy_from_slice(value); let buffer = unsafe {
std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len);
std::slice::from_raw_parts_mut(buf, len)
};
let mut iter = value.iter(); let mut iter = value.iter();

View File

@ -1,12 +1,12 @@
use std::io; use std::io;
use actix_http::{body::Body, http::HeaderValue, http::StatusCode}; use actix_http::{body::AnyBody, http::HeaderValue, http::StatusCode};
use actix_http::{Error, HttpService, Request, Response}; use actix_http::{Error, HttpService, Request, Response};
use actix_server::Server; use actix_server::Server;
use bytes::BytesMut; use bytes::BytesMut;
use futures_util::StreamExt as _; use futures_util::StreamExt as _;
async fn handle_request(mut req: Request) -> Result<Response<Body>, Error> { async fn handle_request(mut req: Request) -> Result<Response<AnyBody>, Error> {
let mut body = BytesMut::new(); let mut body = BytesMut::new();
while let Some(item) = req.payload().next().await { while let Some(item) = req.payload().next().await {
body.extend_from_slice(&item?) body.extend_from_slice(&item?)

View File

@ -85,22 +85,31 @@ impl Stream for Heartbeat {
fn tls_config() -> rustls::ServerConfig { fn tls_config() -> rustls::ServerConfig {
use std::io::BufReader; use std::io::BufReader;
use rustls::{ use rustls::{Certificate, PrivateKey};
internal::pemfile::{certs, pkcs8_private_keys}, use rustls_pemfile::{certs, pkcs8_private_keys};
NoClientAuth, ServerConfig,
};
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap(); let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
let cert_file = cert.serialize_pem().unwrap(); let cert_file = cert.serialize_pem().unwrap();
let key_file = cert.serialize_private_key_pem(); let key_file = cert.serialize_private_key_pem();
let mut config = ServerConfig::new(NoClientAuth::new());
let cert_file = &mut BufReader::new(cert_file.as_bytes()); let cert_file = &mut BufReader::new(cert_file.as_bytes());
let key_file = &mut BufReader::new(key_file.as_bytes()); let key_file = &mut BufReader::new(key_file.as_bytes());
let cert_chain = certs(cert_file).unwrap(); let cert_chain = certs(cert_file)
.unwrap()
.into_iter()
.map(Certificate)
.collect();
let mut keys = pkcs8_private_keys(key_file).unwrap(); let mut keys = pkcs8_private_keys(key_file).unwrap();
config.set_single_cert(cert_chain, keys.remove(0)).unwrap();
let mut config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
.unwrap();
config.alpn_protocols.push(b"http/1.1".to_vec());
config.alpn_protocols.push(b"h2".to_vec());
config config
} }

View File

@ -7,54 +7,95 @@ use std::{
}; };
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use futures_core::{ready, Stream}; use futures_core::Stream;
use pin_project::pin_project;
use crate::error::Error; use crate::error::Error;
use super::{BodySize, BodyStream, MessageBody, MessageBodyMapErr, SizedStream}; use super::{BodySize, BodyStream, MessageBody, MessageBodyMapErr, SizedStream};
#[deprecated(since = "4.0.0", note = "Renamed to `AnyBody`.")]
pub type Body = AnyBody; pub type Body = AnyBody;
/// Represents various types of HTTP message body. /// Represents various types of HTTP message body.
pub enum AnyBody { #[pin_project(project = AnyBodyProj)]
#[derive(Clone)]
pub enum AnyBody<B = BoxBody> {
/// Empty response. `Content-Length` header is not set. /// Empty response. `Content-Length` header is not set.
None, None,
/// Zero sized response body. `Content-Length` header is set to `0`. /// Complete, in-memory response body.
Empty,
/// Specific response body.
Bytes(Bytes), Bytes(Bytes),
/// Generic message body. /// Generic / Other message body.
Message(BoxAnyBody), Body(#[pin] B),
} }
impl AnyBody { impl AnyBody {
/// Create body from slice (copy) /// Constructs a "body" representing an empty response.
pub fn from_slice(s: &[u8]) -> Self { pub fn none() -> Self {
Self::Bytes(Bytes::copy_from_slice(s)) Self::None
} }
/// Create body from generic message body. /// Constructs a new, 0-length body.
pub fn from_message<B>(body: B) -> Self pub fn empty() -> Self {
Self::Bytes(Bytes::new())
}
/// Create boxed body from generic message body.
pub fn new_boxed<B>(body: B) -> Self
where where
B: MessageBody + 'static, B: MessageBody + 'static,
B::Error: Into<Box<dyn StdError + 'static>>, B::Error: Into<Box<dyn StdError + 'static>>,
{ {
Self::Message(BoxAnyBody::from_body(body)) Self::Body(BoxBody::from_body(body))
}
/// Constructs new `AnyBody` instance from a slice of bytes by copying it.
///
/// If your bytes container is owned, it may be cheaper to use a `From` impl.
pub fn copy_from_slice(s: &[u8]) -> Self {
Self::Bytes(Bytes::copy_from_slice(s))
}
#[doc(hidden)]
#[deprecated(since = "4.0.0", note = "Renamed to `copy_from_slice`.")]
pub fn from_slice(s: &[u8]) -> Self {
Self::Bytes(Bytes::copy_from_slice(s))
} }
} }
impl MessageBody for AnyBody { impl<B> AnyBody<B>
where
B: MessageBody + 'static,
B::Error: Into<Box<dyn StdError + 'static>>,
{
/// Create body from generic message body.
pub fn new(body: B) -> Self {
Self::Body(body)
}
pub fn into_boxed(self) -> AnyBody {
match self {
Self::None => AnyBody::None,
Self::Bytes(bytes) => AnyBody::Bytes(bytes),
Self::Body(body) => AnyBody::new_boxed(body),
}
}
}
impl<B> MessageBody for AnyBody<B>
where
B: MessageBody,
B::Error: Into<Box<dyn StdError>> + 'static,
{
type Error = Error; type Error = Error;
fn size(&self) -> BodySize { fn size(&self) -> BodySize {
match self { match self {
AnyBody::None => BodySize::None, AnyBody::None => BodySize::None,
AnyBody::Empty => BodySize::Empty,
AnyBody::Bytes(ref bin) => BodySize::Sized(bin.len() as u64), AnyBody::Bytes(ref bin) => BodySize::Sized(bin.len() as u64),
AnyBody::Message(ref body) => body.size(), AnyBody::Body(ref body) => body.size(),
} }
} }
@ -62,10 +103,9 @@ impl MessageBody for AnyBody {
self: Pin<&mut Self>, self: Pin<&mut Self>,
cx: &mut Context<'_>, cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, Self::Error>>> { ) -> Poll<Option<Result<Bytes, Self::Error>>> {
match self.get_mut() { match self.project() {
AnyBody::None => Poll::Ready(None), AnyBodyProj::None => Poll::Ready(None),
AnyBody::Empty => Poll::Ready(None), AnyBodyProj::Bytes(bin) => {
AnyBody::Bytes(ref mut bin) => {
let len = bin.len(); let len = bin.len();
if len == 0 { if len == 0 {
Poll::Ready(None) Poll::Ready(None)
@ -74,93 +114,96 @@ impl MessageBody for AnyBody {
} }
} }
// TODO: MSRV 1.51: poll_map_err AnyBodyProj::Body(body) => body
AnyBody::Message(body) => match ready!(body.as_pin_mut().poll_next(cx)) { .poll_next(cx)
Some(Err(err)) => { .map_err(|err| Error::new_body().with_cause(err)),
Poll::Ready(Some(Err(Error::new_body().with_cause(err))))
}
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
},
} }
} }
} }
impl PartialEq for AnyBody { impl PartialEq for AnyBody {
fn eq(&self, other: &Body) -> bool { fn eq(&self, other: &AnyBody) -> bool {
match *self { match *self {
AnyBody::None => matches!(*other, AnyBody::None), AnyBody::None => matches!(*other, AnyBody::None),
AnyBody::Empty => matches!(*other, AnyBody::Empty),
AnyBody::Bytes(ref b) => match *other { AnyBody::Bytes(ref b) => match *other {
AnyBody::Bytes(ref b2) => b == b2, AnyBody::Bytes(ref b2) => b == b2,
_ => false, _ => false,
}, },
AnyBody::Message(_) => false, AnyBody::Body(_) => false,
} }
} }
} }
impl fmt::Debug for AnyBody { impl<S: fmt::Debug> fmt::Debug for AnyBody<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self { match *self {
AnyBody::None => write!(f, "AnyBody::None"), AnyBody::None => write!(f, "AnyBody::None"),
AnyBody::Empty => write!(f, "AnyBody::Empty"), AnyBody::Bytes(ref bytes) => write!(f, "AnyBody::Bytes({:?})", bytes),
AnyBody::Bytes(ref b) => write!(f, "AnyBody::Bytes({:?})", b), AnyBody::Body(ref stream) => write!(f, "AnyBody::Message({:?})", stream),
AnyBody::Message(_) => write!(f, "AnyBody::Message(_)"),
} }
} }
} }
impl From<&'static str> for AnyBody { impl<B> From<&'static str> for AnyBody<B> {
fn from(s: &'static str) -> Body { fn from(string: &'static str) -> Self {
AnyBody::Bytes(Bytes::from_static(s.as_ref())) Self::Bytes(Bytes::from_static(string.as_ref()))
} }
} }
impl From<&'static [u8]> for AnyBody { impl<B> From<&'static [u8]> for AnyBody<B> {
fn from(s: &'static [u8]) -> Body { fn from(bytes: &'static [u8]) -> Self {
AnyBody::Bytes(Bytes::from_static(s)) Self::Bytes(Bytes::from_static(bytes))
} }
} }
impl From<Vec<u8>> for AnyBody { impl<B> From<Vec<u8>> for AnyBody<B> {
fn from(vec: Vec<u8>) -> Body { fn from(vec: Vec<u8>) -> Self {
AnyBody::Bytes(Bytes::from(vec)) Self::Bytes(Bytes::from(vec))
} }
} }
impl From<String> for AnyBody { impl<B> From<String> for AnyBody<B> {
fn from(s: String) -> Body { fn from(string: String) -> Self {
s.into_bytes().into() Self::Bytes(Bytes::from(string))
} }
} }
impl From<&'_ String> for AnyBody { impl<B> From<&'_ String> for AnyBody<B> {
fn from(s: &String) -> Body { fn from(string: &String) -> Self {
AnyBody::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(&s))) Self::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(&string)))
} }
} }
impl From<Cow<'_, str>> for AnyBody { impl<B> From<Cow<'_, str>> for AnyBody<B> {
fn from(s: Cow<'_, str>) -> Body { fn from(string: Cow<'_, str>) -> Self {
match s { match string {
Cow::Owned(s) => AnyBody::from(s), Cow::Owned(s) => Self::from(s),
Cow::Borrowed(s) => { Cow::Borrowed(s) => {
AnyBody::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(s))) Self::Bytes(Bytes::copy_from_slice(AsRef::<[u8]>::as_ref(s)))
} }
} }
} }
} }
impl From<Bytes> for AnyBody { impl<B> From<Bytes> for AnyBody<B> {
fn from(s: Bytes) -> Body { fn from(bytes: Bytes) -> Self {
AnyBody::Bytes(s) Self::Bytes(bytes)
} }
} }
impl From<BytesMut> for AnyBody { impl<B> From<BytesMut> for AnyBody<B> {
fn from(s: BytesMut) -> Body { fn from(bytes: BytesMut) -> Self {
AnyBody::Bytes(s.freeze()) Self::Bytes(bytes.freeze())
}
}
impl<S, E> From<SizedStream<S>> for AnyBody<SizedStream<S>>
where
S: Stream<Item = Result<Bytes, E>> + 'static,
E: Into<Box<dyn StdError>> + 'static,
{
fn from(stream: SizedStream<S>) -> Self {
AnyBody::new(stream)
} }
} }
@ -169,8 +212,18 @@ where
S: Stream<Item = Result<Bytes, E>> + 'static, S: Stream<Item = Result<Bytes, E>> + 'static,
E: Into<Box<dyn StdError>> + 'static, E: Into<Box<dyn StdError>> + 'static,
{ {
fn from(s: SizedStream<S>) -> Body { fn from(stream: SizedStream<S>) -> Self {
AnyBody::from_message(s) AnyBody::new_boxed(stream)
}
}
impl<S, E> From<BodyStream<S>> for AnyBody<BodyStream<S>>
where
S: Stream<Item = Result<Bytes, E>> + 'static,
E: Into<Box<dyn StdError>> + 'static,
{
fn from(stream: BodyStream<S>) -> Self {
AnyBody::new(stream)
} }
} }
@ -179,15 +232,15 @@ where
S: Stream<Item = Result<Bytes, E>> + 'static, S: Stream<Item = Result<Bytes, E>> + 'static,
E: Into<Box<dyn StdError>> + 'static, E: Into<Box<dyn StdError>> + 'static,
{ {
fn from(s: BodyStream<S>) -> Body { fn from(stream: BodyStream<S>) -> Self {
AnyBody::from_message(s) AnyBody::new_boxed(stream)
} }
} }
/// A boxed message body with boxed errors. /// A boxed message body with boxed errors.
pub struct BoxAnyBody(Pin<Box<dyn MessageBody<Error = Box<dyn StdError + 'static>>>>); pub struct BoxBody(Pin<Box<dyn MessageBody<Error = Box<dyn StdError>>>>);
impl BoxAnyBody { impl BoxBody {
/// Boxes a `MessageBody` and any errors it generates. /// Boxes a `MessageBody` and any errors it generates.
pub fn from_body<B>(body: B) -> Self pub fn from_body<B>(body: B) -> Self
where where
@ -201,18 +254,18 @@ impl BoxAnyBody {
/// Returns a mutable pinned reference to the inner message body type. /// Returns a mutable pinned reference to the inner message body type.
pub fn as_pin_mut( pub fn as_pin_mut(
&mut self, &mut self,
) -> Pin<&mut (dyn MessageBody<Error = Box<dyn StdError + 'static>>)> { ) -> Pin<&mut (dyn MessageBody<Error = Box<dyn StdError>>)> {
self.0.as_mut() self.0.as_mut()
} }
} }
impl fmt::Debug for BoxAnyBody { impl fmt::Debug for BoxBody {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("BoxAnyBody(dyn MessageBody)") f.write_str("BoxAnyBody(dyn MessageBody)")
} }
} }
impl MessageBody for BoxAnyBody { impl MessageBody for BoxBody {
type Error = Error; type Error = Error;
fn size(&self) -> BodySize { fn size(&self) -> BodySize {
@ -223,11 +276,58 @@ impl MessageBody for BoxAnyBody {
mut self: Pin<&mut Self>, mut self: Pin<&mut Self>,
cx: &mut Context<'_>, cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, Self::Error>>> { ) -> Poll<Option<Result<Bytes, Self::Error>>> {
// TODO: MSRV 1.51: poll_map_err self.0
match ready!(self.0.as_mut().poll_next(cx)) { .as_mut()
Some(Err(err)) => Poll::Ready(Some(Err(Error::new_body().with_cause(err)))), .poll_next(cx)
Some(Ok(val)) => Poll::Ready(Some(Ok(val))), .map_err(|err| Error::new_body().with_cause(err))
None => Poll::Ready(None), }
} }
#[cfg(test)]
mod tests {
use std::marker::PhantomPinned;
use static_assertions::{assert_impl_all, assert_not_impl_all};
use super::*;
use crate::body::to_bytes;
struct PinType(PhantomPinned);
impl MessageBody for PinType {
type Error = crate::Error;
fn size(&self) -> BodySize {
unimplemented!()
}
fn poll_next(
self: Pin<&mut Self>,
_cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, Self::Error>>> {
unimplemented!()
}
}
assert_impl_all!(AnyBody<()>: MessageBody, fmt::Debug, Send, Sync, Unpin);
assert_impl_all!(AnyBody<AnyBody<()>>: MessageBody, fmt::Debug, Send, Sync, Unpin);
assert_impl_all!(AnyBody<Bytes>: MessageBody, fmt::Debug, Send, Sync, Unpin);
assert_impl_all!(AnyBody: MessageBody, fmt::Debug, Unpin);
assert_impl_all!(BoxBody: MessageBody, fmt::Debug, Unpin);
assert_impl_all!(AnyBody<PinType>: MessageBody);
assert_not_impl_all!(AnyBody: Send, Sync, Unpin);
assert_not_impl_all!(BoxBody: Send, Sync, Unpin);
assert_not_impl_all!(AnyBody<PinType>: Send, Sync, Unpin);
#[actix_rt::test]
async fn nested_boxed_body() {
let body = AnyBody::copy_from_slice(&[1, 2, 3]);
let boxed_body = BoxBody::from_body(BoxBody::from_body(body));
assert_eq!(
to_bytes(boxed_body).await.unwrap(),
Bytes::from(vec![1, 2, 3]),
);
} }
} }

View File

@ -75,10 +75,22 @@ mod tests {
use derive_more::{Display, Error}; use derive_more::{Display, Error};
use futures_core::ready; use futures_core::ready;
use futures_util::{stream, FutureExt as _}; use futures_util::{stream, FutureExt as _};
use static_assertions::{assert_impl_all, assert_not_impl_all};
use super::*; use super::*;
use crate::body::to_bytes; use crate::body::to_bytes;
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, crate::Error>>>: MessageBody);
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, &'static str>>>: MessageBody);
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, &'static str>>>: MessageBody);
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
assert_not_impl_all!(BodyStream<stream::Empty<Bytes>>: MessageBody);
assert_not_impl_all!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
// crate::Error is not Clone
assert_not_impl_all!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
#[actix_rt::test] #[actix_rt::test]
async fn skips_empty_chunks() { async fn skips_empty_chunks() {
let body = BodyStream::new(stream::iter( let body = BodyStream::new(stream::iter(
@ -124,6 +136,30 @@ mod tests {
assert!(matches!(to_bytes(body).await, Err(StreamErr))); assert!(matches!(to_bytes(body).await, Err(StreamErr)));
} }
#[actix_rt::test]
async fn stream_string_error() {
// `&'static str` does not impl `Error`
// but it does impl `Into<Box<dyn Error>>`
let body = BodyStream::new(stream::once(async { Err("stringy error") }));
assert!(matches!(to_bytes(body).await, Err("stringy error")));
}
#[actix_rt::test]
async fn stream_boxed_error() {
// `Box<dyn Error>` does not impl `Error`
// but it does impl `Into<Box<dyn Error>>`
let body = BodyStream::new(stream::once(async {
Err(Box::<dyn StdError>::from("stringy error"))
}));
assert_eq!(
to_bytes(body).await.unwrap_err().to_string(),
"stringy error"
);
}
#[actix_rt::test] #[actix_rt::test]
async fn stream_delayed_error() { async fn stream_delayed_error() {
let body = let body =

View File

@ -11,8 +11,6 @@ use bytes::{Bytes, BytesMut};
use futures_core::ready; use futures_core::ready;
use pin_project_lite::pin_project; use pin_project_lite::pin_project;
use crate::error::Error;
use super::BodySize; use super::BodySize;
/// An interface for response bodies. /// An interface for response bodies.
@ -33,7 +31,7 @@ impl MessageBody for () {
type Error = Infallible; type Error = Infallible;
fn size(&self) -> BodySize { fn size(&self) -> BodySize {
BodySize::Empty BodySize::Sized(0)
} }
fn poll_next( fn poll_next(
@ -47,7 +45,6 @@ impl MessageBody for () {
impl<B> MessageBody for Box<B> impl<B> MessageBody for Box<B>
where where
B: MessageBody + Unpin, B: MessageBody + Unpin,
B::Error: Into<Error>,
{ {
type Error = B::Error; type Error = B::Error;
@ -66,7 +63,6 @@ where
impl<B> MessageBody for Pin<Box<B>> impl<B> MessageBody for Pin<Box<B>>
where where
B: MessageBody, B: MessageBody,
B::Error: Into<Error>,
{ {
type Error = B::Error; type Error = B::Error;

View File

@ -11,15 +11,14 @@ use futures_core::ready;
mod body; mod body;
mod body_stream; mod body_stream;
mod message_body; mod message_body;
mod response_body;
mod size; mod size;
mod sized_stream; mod sized_stream;
pub use self::body::{AnyBody, Body, BoxAnyBody}; #[allow(deprecated)]
pub use self::body::{AnyBody, Body, BoxBody};
pub use self::body_stream::BodyStream; pub use self::body_stream::BodyStream;
pub use self::message_body::MessageBody; pub use self::message_body::MessageBody;
pub(crate) use self::message_body::MessageBodyMapErr; pub(crate) use self::message_body::MessageBodyMapErr;
pub use self::response_body::ResponseBody;
pub use self::size::BodySize; pub use self::size::BodySize;
pub use self::sized_stream::SizedStream; pub use self::sized_stream::SizedStream;
@ -29,23 +28,24 @@ pub use self::sized_stream::SizedStream;
/// ///
/// # Examples /// # Examples
/// ``` /// ```
/// use actix_http::body::{Body, to_bytes}; /// use actix_http::body::{AnyBody, to_bytes};
/// use bytes::Bytes; /// use bytes::Bytes;
/// ///
/// # async fn test_to_bytes() { /// # async fn test_to_bytes() {
/// let body = Body::Empty; /// let body = AnyBody::none();
/// let bytes = to_bytes(body).await.unwrap(); /// let bytes = to_bytes(body).await.unwrap();
/// assert!(bytes.is_empty()); /// assert!(bytes.is_empty());
/// ///
/// let body = Body::Bytes(Bytes::from_static(b"123")); /// let body = AnyBody::copy_from_slice(b"123");
/// let bytes = to_bytes(body).await.unwrap(); /// let bytes = to_bytes(body).await.unwrap();
/// assert_eq!(bytes, b"123"[..]); /// assert_eq!(bytes, b"123"[..]);
/// # } /// # }
/// ``` /// ```
pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> { pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
let cap = match body.size() { let cap = match body.size() {
BodySize::None | BodySize::Empty | BodySize::Sized(0) => return Ok(Bytes::new()), BodySize::None | BodySize::Sized(0) => return Ok(Bytes::new()),
BodySize::Sized(size) => size as usize, BodySize::Sized(size) => size as usize,
// good enough first guess for chunk size
BodySize::Stream => 32_768, BodySize::Stream => 32_768,
}; };
@ -75,22 +75,25 @@ mod tests {
use actix_utils::future::poll_fn; use actix_utils::future::poll_fn;
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use super::*; use super::{to_bytes, AnyBody as TestAnyBody, BodySize, MessageBody as _};
impl Body { impl AnyBody {
pub(crate) fn get_ref(&self) -> &[u8] { pub(crate) fn get_ref(&self) -> &[u8] {
match *self { match *self {
Body::Bytes(ref bin) => &bin, AnyBody::Bytes(ref bin) => bin,
_ => panic!(), _ => panic!(),
} }
} }
} }
/// AnyBody alias because rustc does not (can not?) infer the default type parameter.
type AnyBody = TestAnyBody;
#[actix_rt::test] #[actix_rt::test]
async fn test_static_str() { async fn test_static_str() {
assert_eq!(Body::from("").size(), BodySize::Sized(0)); assert_eq!(AnyBody::from("").size(), BodySize::Sized(0));
assert_eq!(Body::from("test").size(), BodySize::Sized(4)); assert_eq!(AnyBody::from("test").size(), BodySize::Sized(4));
assert_eq!(Body::from("test").get_ref(), b"test"); assert_eq!(AnyBody::from("test").get_ref(), b"test");
assert_eq!("test".size(), BodySize::Sized(4)); assert_eq!("test".size(), BodySize::Sized(4));
assert_eq!( assert_eq!(
@ -104,13 +107,16 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_static_bytes() { async fn test_static_bytes() {
assert_eq!(Body::from(b"test".as_ref()).size(), BodySize::Sized(4)); assert_eq!(AnyBody::from(b"test".as_ref()).size(), BodySize::Sized(4));
assert_eq!(Body::from(b"test".as_ref()).get_ref(), b"test"); assert_eq!(AnyBody::from(b"test".as_ref()).get_ref(), b"test");
assert_eq!( assert_eq!(
Body::from_slice(b"test".as_ref()).size(), AnyBody::copy_from_slice(b"test".as_ref()).size(),
BodySize::Sized(4) BodySize::Sized(4)
); );
assert_eq!(Body::from_slice(b"test".as_ref()).get_ref(), b"test"); assert_eq!(
AnyBody::copy_from_slice(b"test".as_ref()).get_ref(),
b"test"
);
let sb = Bytes::from(&b"test"[..]); let sb = Bytes::from(&b"test"[..]);
pin!(sb); pin!(sb);
@ -123,8 +129,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_vec() { async fn test_vec() {
assert_eq!(Body::from(Vec::from("test")).size(), BodySize::Sized(4)); assert_eq!(AnyBody::from(Vec::from("test")).size(), BodySize::Sized(4));
assert_eq!(Body::from(Vec::from("test")).get_ref(), b"test"); assert_eq!(AnyBody::from(Vec::from("test")).get_ref(), b"test");
let test_vec = Vec::from("test"); let test_vec = Vec::from("test");
pin!(test_vec); pin!(test_vec);
@ -141,8 +147,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_bytes() { async fn test_bytes() {
let b = Bytes::from("test"); let b = Bytes::from("test");
assert_eq!(Body::from(b.clone()).size(), BodySize::Sized(4)); assert_eq!(AnyBody::from(b.clone()).size(), BodySize::Sized(4));
assert_eq!(Body::from(b.clone()).get_ref(), b"test"); assert_eq!(AnyBody::from(b.clone()).get_ref(), b"test");
pin!(b); pin!(b);
assert_eq!(b.size(), BodySize::Sized(4)); assert_eq!(b.size(), BodySize::Sized(4));
@ -155,8 +161,8 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_bytes_mut() { async fn test_bytes_mut() {
let b = BytesMut::from("test"); let b = BytesMut::from("test");
assert_eq!(Body::from(b.clone()).size(), BodySize::Sized(4)); assert_eq!(AnyBody::from(b.clone()).size(), BodySize::Sized(4));
assert_eq!(Body::from(b.clone()).get_ref(), b"test"); assert_eq!(AnyBody::from(b.clone()).get_ref(), b"test");
pin!(b); pin!(b);
assert_eq!(b.size(), BodySize::Sized(4)); assert_eq!(b.size(), BodySize::Sized(4));
@ -169,10 +175,10 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_string() { async fn test_string() {
let b = "test".to_owned(); let b = "test".to_owned();
assert_eq!(Body::from(b.clone()).size(), BodySize::Sized(4)); assert_eq!(AnyBody::from(b.clone()).size(), BodySize::Sized(4));
assert_eq!(Body::from(b.clone()).get_ref(), b"test"); assert_eq!(AnyBody::from(b.clone()).get_ref(), b"test");
assert_eq!(Body::from(&b).size(), BodySize::Sized(4)); assert_eq!(AnyBody::from(&b).size(), BodySize::Sized(4));
assert_eq!(Body::from(&b).get_ref(), b"test"); assert_eq!(AnyBody::from(&b).get_ref(), b"test");
pin!(b); pin!(b);
assert_eq!(b.size(), BodySize::Sized(4)); assert_eq!(b.size(), BodySize::Sized(4));
@ -184,7 +190,7 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_unit() { async fn test_unit() {
assert_eq!(().size(), BodySize::Empty); assert_eq!(().size(), BodySize::Sized(0));
assert!(poll_fn(|cx| Pin::new(&mut ()).poll_next(cx)) assert!(poll_fn(|cx| Pin::new(&mut ()).poll_next(cx))
.await .await
.is_none()); .is_none());
@ -194,41 +200,44 @@ mod tests {
async fn test_box_and_pin() { async fn test_box_and_pin() {
let val = Box::new(()); let val = Box::new(());
pin!(val); pin!(val);
assert_eq!(val.size(), BodySize::Empty); assert_eq!(val.size(), BodySize::Sized(0));
assert!(poll_fn(|cx| val.as_mut().poll_next(cx)).await.is_none()); assert!(poll_fn(|cx| val.as_mut().poll_next(cx)).await.is_none());
let mut val = Box::pin(()); let mut val = Box::pin(());
assert_eq!(val.size(), BodySize::Empty); assert_eq!(val.size(), BodySize::Sized(0));
assert!(poll_fn(|cx| val.as_mut().poll_next(cx)).await.is_none()); assert!(poll_fn(|cx| val.as_mut().poll_next(cx)).await.is_none());
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_body_eq() { async fn test_body_eq() {
assert!( assert!(
Body::Bytes(Bytes::from_static(b"1")) AnyBody::Bytes(Bytes::from_static(b"1"))
== Body::Bytes(Bytes::from_static(b"1")) == AnyBody::Bytes(Bytes::from_static(b"1"))
); );
assert!(Body::Bytes(Bytes::from_static(b"1")) != Body::None); assert!(AnyBody::Bytes(Bytes::from_static(b"1")) != AnyBody::None);
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_body_debug() { async fn test_body_debug() {
assert!(format!("{:?}", Body::None).contains("Body::None")); assert!(format!("{:?}", AnyBody::None).contains("Body::None"));
assert!(format!("{:?}", Body::Empty).contains("Body::Empty")); assert!(format!("{:?}", AnyBody::from(Bytes::from_static(b"1"))).contains('1'));
assert!(format!("{:?}", Body::Bytes(Bytes::from_static(b"1"))).contains('1'));
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_serde_json() { async fn test_serde_json() {
use serde_json::{json, Value}; use serde_json::{json, Value};
assert_eq!( assert_eq!(
Body::from(serde_json::to_vec(&Value::String("test".to_owned())).unwrap()) AnyBody::from(
.size(), serde_json::to_vec(&Value::String("test".to_owned())).unwrap()
)
.size(),
BodySize::Sized(6) BodySize::Sized(6)
); );
assert_eq!( assert_eq!(
Body::from(serde_json::to_vec(&json!({"test-key":"test-value"})).unwrap()) AnyBody::from(
.size(), serde_json::to_vec(&json!({"test-key":"test-value"})).unwrap()
)
.size(),
BodySize::Sized(25) BodySize::Sized(25)
); );
} }
@ -252,11 +261,11 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_to_bytes() { async fn test_to_bytes() {
let body = Body::Empty; let body = AnyBody::empty();
let bytes = to_bytes(body).await.unwrap(); let bytes = to_bytes(body).await.unwrap();
assert!(bytes.is_empty()); assert!(bytes.is_empty());
let body = Body::Bytes(Bytes::from_static(b"123")); let body = AnyBody::copy_from_slice(b"123");
let bytes = to_bytes(body).await.unwrap(); let bytes = to_bytes(body).await.unwrap();
assert_eq!(bytes, b"123"[..]); assert_eq!(bytes, b"123"[..]);
} }

View File

@ -1,89 +0,0 @@
use std::{
mem,
pin::Pin,
task::{Context, Poll},
};
use bytes::Bytes;
use futures_core::{ready, Stream};
use pin_project::pin_project;
use crate::error::Error;
use super::{Body, BodySize, MessageBody};
#[pin_project(project = ResponseBodyProj)]
pub enum ResponseBody<B> {
Body(#[pin] B),
Other(Body),
}
impl ResponseBody<Body> {
pub fn into_body<B>(self) -> ResponseBody<B> {
match self {
ResponseBody::Body(b) => ResponseBody::Other(b),
ResponseBody::Other(b) => ResponseBody::Other(b),
}
}
}
impl<B> ResponseBody<B> {
pub fn take_body(&mut self) -> ResponseBody<B> {
mem::replace(self, ResponseBody::Other(Body::None))
}
}
impl<B: MessageBody> ResponseBody<B> {
pub fn as_ref(&self) -> Option<&B> {
if let ResponseBody::Body(ref b) = self {
Some(b)
} else {
None
}
}
}
impl<B> MessageBody for ResponseBody<B>
where
B: MessageBody,
B::Error: Into<Error>,
{
type Error = Error;
fn size(&self) -> BodySize {
match self {
ResponseBody::Body(ref body) => body.size(),
ResponseBody::Other(ref body) => body.size(),
}
}
fn poll_next(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, Self::Error>>> {
Stream::poll_next(self, cx)
}
}
impl<B> Stream for ResponseBody<B>
where
B: MessageBody,
B::Error: Into<Error>,
{
type Item = Result<Bytes, Error>;
fn poll_next(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> {
match self.project() {
// TODO: MSRV 1.51: poll_map_err
ResponseBodyProj::Body(body) => match ready!(body.poll_next(cx)) {
Some(Err(err)) => Poll::Ready(Some(Err(err.into()))),
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
},
ResponseBodyProj::Other(body) => Pin::new(body).poll_next(cx),
}
}
}

View File

@ -6,14 +6,9 @@ pub enum BodySize {
/// Will skip writing Content-Length header. /// Will skip writing Content-Length header.
None, None,
/// Zero size body.
///
/// Will write `Content-Length: 0` header.
Empty,
/// Known size body. /// Known size body.
/// ///
/// Will write `Content-Length: N` header. `Sized(0)` is treated the same as `Empty`. /// Will write `Content-Length: N` header.
Sized(u64), Sized(u64),
/// Unknown size body. /// Unknown size body.
@ -25,16 +20,17 @@ pub enum BodySize {
impl BodySize { impl BodySize {
/// Returns true if size hint indicates no or empty body. /// Returns true if size hint indicates no or empty body.
/// ///
/// Streams will return false because it cannot be known without reading the stream.
///
/// ``` /// ```
/// # use actix_http::body::BodySize; /// # use actix_http::body::BodySize;
/// assert!(BodySize::None.is_eof()); /// assert!(BodySize::None.is_eof());
/// assert!(BodySize::Empty.is_eof());
/// assert!(BodySize::Sized(0).is_eof()); /// assert!(BodySize::Sized(0).is_eof());
/// ///
/// assert!(!BodySize::Sized(64).is_eof()); /// assert!(!BodySize::Sized(64).is_eof());
/// assert!(!BodySize::Stream.is_eof()); /// assert!(!BodySize::Stream.is_eof());
/// ``` /// ```
pub fn is_eof(&self) -> bool { pub fn is_eof(&self) -> bool {
matches!(self, BodySize::None | BodySize::Empty | BodySize::Sized(0)) matches!(self, BodySize::None | BodySize::Sized(0))
} }
} }

View File

@ -72,10 +72,22 @@ mod tests {
use actix_rt::pin; use actix_rt::pin;
use actix_utils::future::poll_fn; use actix_utils::future::poll_fn;
use futures_util::stream; use futures_util::stream;
use static_assertions::{assert_impl_all, assert_not_impl_all};
use super::*; use super::*;
use crate::body::to_bytes; use crate::body::to_bytes;
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, crate::Error>>>: MessageBody);
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, &'static str>>>: MessageBody);
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, &'static str>>>: MessageBody);
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
assert_not_impl_all!(SizedStream<stream::Empty<Bytes>>: MessageBody);
assert_not_impl_all!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
// crate::Error is not Clone
assert_not_impl_all!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
#[actix_rt::test] #[actix_rt::test]
async fn skips_empty_chunks() { async fn skips_empty_chunks() {
let body = SizedStream::new( let body = SizedStream::new(
@ -119,4 +131,37 @@ mod tests {
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12"))); assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
} }
#[actix_rt::test]
async fn stream_string_error() {
// `&'static str` does not impl `Error`
// but it does impl `Into<Box<dyn Error>>`
let body = SizedStream::new(0, stream::once(async { Err("stringy error") }));
assert_eq!(to_bytes(body).await, Ok(Bytes::new()));
let body = SizedStream::new(1, stream::once(async { Err("stringy error") }));
assert!(matches!(to_bytes(body).await, Err("stringy error")));
}
#[actix_rt::test]
async fn stream_boxed_error() {
// `Box<dyn Error>` does not impl `Error`
// but it does impl `Into<Box<dyn Error>>`
let body = SizedStream::new(
0,
stream::once(async { Err(Box::<dyn StdError>::from("stringy error")) }),
);
assert_eq!(to_bytes(body).await.unwrap(), Bytes::new());
let body = SizedStream::new(
1,
stream::once(async { Err(Box::<dyn StdError>::from("stringy error")) }),
);
assert_eq!(
to_bytes(body).await.unwrap_err().to_string(),
"stringy error"
);
}
} }

View File

@ -1,26 +1,29 @@
use std::cell::Cell; use std::{
use std::fmt::Write; cell::Cell,
use std::rc::Rc; fmt::{self, Write},
use std::time::Duration; net,
use std::{fmt, net}; rc::Rc,
time::{Duration, SystemTime},
};
use actix_rt::{ use actix_rt::{
task::JoinHandle, task::JoinHandle,
time::{interval, sleep_until, Instant, Sleep}, time::{interval, sleep_until, Instant, Sleep},
}; };
use bytes::BytesMut; use bytes::BytesMut;
use time::OffsetDateTime;
/// "Sun, 06 Nov 1994 08:49:37 GMT".len() /// "Sun, 06 Nov 1994 08:49:37 GMT".len()
const DATE_VALUE_LENGTH: usize = 29; pub(crate) const DATE_VALUE_LENGTH: usize = 29;
#[derive(Debug, PartialEq, Clone, Copy)] #[derive(Debug, PartialEq, Clone, Copy)]
/// Server keep-alive setting /// Server keep-alive setting
pub enum KeepAlive { pub enum KeepAlive {
/// Keep alive in seconds /// Keep alive in seconds
Timeout(usize), Timeout(usize),
/// Rely on OS to shutdown tcp connection /// Rely on OS to shutdown tcp connection
Os, Os,
/// Disabled /// Disabled
Disabled, Disabled,
} }
@ -104,6 +107,8 @@ impl ServiceConfig {
} }
/// Returns the local address that this server is bound to. /// Returns the local address that this server is bound to.
///
/// Returns `None` for connections via UDS (Unix Domain Socket).
#[inline] #[inline]
pub fn local_addr(&self) -> Option<net::SocketAddr> { pub fn local_addr(&self) -> Option<net::SocketAddr> {
self.0.local_addr self.0.local_addr
@ -204,12 +209,7 @@ impl Date {
fn update(&mut self) { fn update(&mut self) {
self.pos = 0; self.pos = 0;
write!( write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
self,
"{}",
OffsetDateTime::now_utc().format("%a, %d %b %Y %H:%M:%S GMT")
)
.unwrap();
} }
} }
@ -267,11 +267,11 @@ impl DateService {
} }
// TODO: move to a util module for testing all spawn handle drop style tasks. // TODO: move to a util module for testing all spawn handle drop style tasks.
#[cfg(test)]
/// Test Module for checking the drop state of certain async tasks that are spawned /// Test Module for checking the drop state of certain async tasks that are spawned
/// with `actix_rt::spawn` /// with `actix_rt::spawn`
/// ///
/// The target task must explicitly generate `NotifyOnDrop` when spawn the task /// The target task must explicitly generate `NotifyOnDrop` when spawn the task
#[cfg(test)]
mod notify_on_drop { mod notify_on_drop {
use std::cell::RefCell; use std::cell::RefCell;
@ -281,9 +281,8 @@ mod notify_on_drop {
/// Check if the spawned task is dropped. /// Check if the spawned task is dropped.
/// ///
/// # Panic: /// # Panics
/// /// Panics when there was no `NotifyOnDrop` instance on current thread.
/// When there was no `NotifyOnDrop` instance on current thread
pub(crate) fn is_dropped() -> bool { pub(crate) fn is_dropped() -> bool {
NOTIFY_DROPPED.with(|bool| { NOTIFY_DROPPED.with(|bool| {
bool.borrow() bool.borrow()
@ -326,7 +325,7 @@ mod notify_on_drop {
mod tests { mod tests {
use super::*; use super::*;
use actix_rt::task::yield_now; use actix_rt::{task::yield_now, time::sleep};
#[actix_rt::test] #[actix_rt::test]
async fn test_date_service_update() { async fn test_date_service_update() {
@ -350,7 +349,14 @@ mod tests {
assert_ne!(buf1, buf2); assert_ne!(buf1, buf2);
drop(settings); drop(settings);
assert!(notify_on_drop::is_dropped());
// Ensure the task will drop eventually
let mut times = 0;
while !notify_on_drop::is_dropped() {
sleep(Duration::from_millis(100)).await;
times += 1;
assert!(times < 10, "Timeout waiting for task drop");
}
} }
#[actix_rt::test] #[actix_rt::test]
@ -372,7 +378,14 @@ mod tests {
assert!(!notify_on_drop::is_dropped()); assert!(!notify_on_drop::is_dropped());
drop(service); drop(service);
assert!(notify_on_drop::is_dropped());
// Ensure the task will drop eventually
let mut times = 0;
while !notify_on_drop::is_dropped() {
sleep(Duration::from_millis(100)).await;
times += 1;
assert!(times < 10, "Timeout waiting for task drop");
}
} }
#[test] #[test]

View File

@ -80,7 +80,7 @@ where
let encoding = headers let encoding = headers
.get(&CONTENT_ENCODING) .get(&CONTENT_ENCODING)
.and_then(|val| val.to_str().ok()) .and_then(|val| val.to_str().ok())
.map(ContentEncoding::from) .and_then(|x| x.parse().ok())
.unwrap_or(ContentEncoding::Identity); .unwrap_or(ContentEncoding::Identity);
Self::new(stream, encoding) Self::new(stream, encoding)

View File

@ -24,12 +24,12 @@ use flate2::write::{GzEncoder, ZlibEncoder};
use zstd::stream::write::Encoder as ZstdEncoder; use zstd::stream::write::Encoder as ZstdEncoder;
use crate::{ use crate::{
body::{Body, BodySize, BoxAnyBody, MessageBody, ResponseBody}, body::{AnyBody, BodySize, MessageBody},
http::{ http::{
header::{ContentEncoding, CONTENT_ENCODING}, header::{ContentEncoding, CONTENT_ENCODING},
HeaderValue, StatusCode, HeaderValue, StatusCode,
}, },
Error, ResponseHead, ResponseHead,
}; };
use super::Writer; use super::Writer;
@ -50,8 +50,8 @@ impl<B: MessageBody> Encoder<B> {
pub fn response( pub fn response(
encoding: ContentEncoding, encoding: ContentEncoding,
head: &mut ResponseHead, head: &mut ResponseHead,
body: ResponseBody<B>, body: AnyBody<B>,
) -> ResponseBody<Encoder<B>> { ) -> AnyBody<Encoder<B>> {
let can_encode = !(head.headers().contains_key(&CONTENT_ENCODING) let can_encode = !(head.headers().contains_key(&CONTENT_ENCODING)
|| head.status == StatusCode::SWITCHING_PROTOCOLS || head.status == StatusCode::SWITCHING_PROTOCOLS
|| head.status == StatusCode::NO_CONTENT || head.status == StatusCode::NO_CONTENT
@ -59,19 +59,15 @@ impl<B: MessageBody> Encoder<B> {
|| encoding == ContentEncoding::Auto); || encoding == ContentEncoding::Auto);
let body = match body { let body = match body {
ResponseBody::Other(b) => match b { AnyBody::None => return AnyBody::None,
Body::None => return ResponseBody::Other(Body::None), AnyBody::Bytes(buf) => {
Body::Empty => return ResponseBody::Other(Body::Empty), if can_encode {
Body::Bytes(buf) => { EncoderBody::Bytes(buf)
if can_encode { } else {
EncoderBody::Bytes(buf) return AnyBody::Bytes(buf);
} else {
return ResponseBody::Other(Body::Bytes(buf));
}
} }
Body::Message(stream) => EncoderBody::BoxedStream(stream), }
}, AnyBody::Body(body) => EncoderBody::Stream(body),
ResponseBody::Body(stream) => EncoderBody::Stream(stream),
}; };
if can_encode { if can_encode {
@ -79,7 +75,8 @@ impl<B: MessageBody> Encoder<B> {
if let Some(enc) = ContentEncoder::encoder(encoding) { if let Some(enc) = ContentEncoder::encoder(encoding) {
update_head(encoding, head); update_head(encoding, head);
head.no_chunking(false); head.no_chunking(false);
return ResponseBody::Body(Encoder {
return AnyBody::Body(Encoder {
body, body,
eof: false, eof: false,
fut: None, fut: None,
@ -88,7 +85,7 @@ impl<B: MessageBody> Encoder<B> {
} }
} }
ResponseBody::Body(Encoder { AnyBody::Body(Encoder {
body, body,
eof: false, eof: false,
fut: None, fut: None,
@ -101,13 +98,11 @@ impl<B: MessageBody> Encoder<B> {
enum EncoderBody<B> { enum EncoderBody<B> {
Bytes(Bytes), Bytes(Bytes),
Stream(#[pin] B), Stream(#[pin] B),
BoxedStream(BoxAnyBody),
} }
impl<B> MessageBody for EncoderBody<B> impl<B> MessageBody for EncoderBody<B>
where where
B: MessageBody, B: MessageBody,
B::Error: Into<Error>,
{ {
type Error = EncoderError<B::Error>; type Error = EncoderError<B::Error>;
@ -115,7 +110,6 @@ where
match self { match self {
EncoderBody::Bytes(ref b) => b.size(), EncoderBody::Bytes(ref b) => b.size(),
EncoderBody::Stream(ref b) => b.size(), EncoderBody::Stream(ref b) => b.size(),
EncoderBody::BoxedStream(ref b) => b.size(),
} }
} }
@ -131,19 +125,7 @@ where
Poll::Ready(Some(Ok(std::mem::take(b)))) Poll::Ready(Some(Ok(std::mem::take(b))))
} }
} }
// TODO: MSRV 1.51: poll_map_err EncoderBodyProj::Stream(b) => b.poll_next(cx).map_err(EncoderError::Body),
EncoderBodyProj::Stream(b) => match ready!(b.poll_next(cx)) {
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Body(err)))),
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
},
EncoderBodyProj::BoxedStream(ref mut b) => {
match ready!(b.as_pin_mut().poll_next(cx)) {
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Boxed(err)))),
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
}
}
} }
} }
} }
@ -151,7 +133,6 @@ where
impl<B> MessageBody for Encoder<B> impl<B> MessageBody for Encoder<B>
where where
B: MessageBody, B: MessageBody,
B::Error: Into<Error>,
{ {
type Error = EncoderError<B::Error>; type Error = EncoderError<B::Error>;
@ -360,9 +341,6 @@ pub enum EncoderError<E> {
#[display(fmt = "body")] #[display(fmt = "body")]
Body(E), Body(E),
#[display(fmt = "boxed")]
Boxed(Box<dyn StdError>),
#[display(fmt = "blocking")] #[display(fmt = "blocking")]
Blocking(BlockingError), Blocking(BlockingError),
@ -374,7 +352,6 @@ impl<E: StdError + 'static> StdError for EncoderError<E> {
fn source(&self) -> Option<&(dyn StdError + 'static)> { fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self { match self {
EncoderError::Body(err) => Some(err), EncoderError::Body(err) => Some(err),
EncoderError::Boxed(err) => Some(&**err),
EncoderError::Blocking(err) => Some(err), EncoderError::Blocking(err) => Some(err),
EncoderError::Io(err) => Some(err), EncoderError::Io(err) => Some(err),
} }

View File

@ -5,10 +5,7 @@ use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Err
use derive_more::{Display, Error, From}; use derive_more::{Display, Error, From};
use http::{uri::InvalidUri, StatusCode}; use http::{uri::InvalidUri, StatusCode};
use crate::{ use crate::{body::AnyBody, ws, Response};
body::{AnyBody, Body},
ws, Response,
};
pub use http::Error as HttpError; pub use http::Error as HttpError;
@ -29,6 +26,11 @@ impl Error {
} }
} }
pub(crate) fn with_cause(mut self, cause: impl Into<Box<dyn StdError>>) -> Self {
self.inner.cause = Some(cause.into());
self
}
pub(crate) fn new_http() -> Self { pub(crate) fn new_http() -> Self {
Self::new(Kind::Http) Self::new(Kind::Http)
} }
@ -49,12 +51,12 @@ impl Error {
Self::new(Kind::SendResponse) Self::new(Kind::SendResponse)
} }
// TODO: remove allow #[allow(unused)] // reserved for future use (TODO: remove allow when being used)
#[allow(dead_code)]
pub(crate) fn new_io() -> Self { pub(crate) fn new_io() -> Self {
Self::new(Kind::Io) Self::new(Kind::Io)
} }
#[allow(unused)] // used in encoder behind feature flag so ignore unused warning
pub(crate) fn new_encoder() -> Self { pub(crate) fn new_encoder() -> Self {
Self::new(Kind::Encoder) Self::new(Kind::Encoder)
} }
@ -62,26 +64,21 @@ impl Error {
pub(crate) fn new_ws() -> Self { pub(crate) fn new_ws() -> Self {
Self::new(Kind::Ws) Self::new(Kind::Ws)
} }
pub(crate) fn with_cause(mut self, cause: impl Into<Box<dyn StdError>>) -> Self {
self.inner.cause = Some(cause.into());
self
}
} }
impl From<Error> for Response<AnyBody> { impl<B> From<Error> for Response<AnyBody<B>> {
fn from(err: Error) -> Self { fn from(err: Error) -> Self {
let status_code = match err.inner.kind { let status_code = match err.inner.kind {
Kind::Parse => StatusCode::BAD_REQUEST, Kind::Parse => StatusCode::BAD_REQUEST,
_ => StatusCode::INTERNAL_SERVER_ERROR, _ => StatusCode::INTERNAL_SERVER_ERROR,
}; };
Response::new(status_code).set_body(Body::from(err.to_string())) Response::new(status_code).set_body(AnyBody::from(err.to_string()))
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
pub enum Kind { pub(crate) enum Kind {
#[display(fmt = "error processing HTTP")] #[display(fmt = "error processing HTTP")]
Http, Http,
@ -194,7 +191,7 @@ pub enum ParseError {
#[display(fmt = "IO error: {}", _0)] #[display(fmt = "IO error: {}", _0)]
Io(io::Error), Io(io::Error),
/// Parsing a field as string failed /// Parsing a field as string failed.
#[display(fmt = "UTF8 error: {}", _0)] #[display(fmt = "UTF8 error: {}", _0)]
Utf8(Utf8Error), Utf8(Utf8Error),
} }

View File

@ -0,0 +1,432 @@
use std::{io, task::Poll};
use bytes::{Buf as _, Bytes, BytesMut};
macro_rules! byte (
($rdr:ident) => ({
if $rdr.len() > 0 {
let b = $rdr[0];
$rdr.advance(1);
b
} else {
return Poll::Pending
}
})
);
#[derive(Debug, PartialEq, Clone)]
pub(super) enum ChunkedState {
Size,
SizeLws,
Extension,
SizeLf,
Body,
BodyCr,
BodyLf,
EndCr,
EndLf,
End,
}
impl ChunkedState {
pub(super) fn step(
&self,
body: &mut BytesMut,
size: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
use self::ChunkedState::*;
match *self {
Size => ChunkedState::read_size(body, size),
SizeLws => ChunkedState::read_size_lws(body),
Extension => ChunkedState::read_extension(body),
SizeLf => ChunkedState::read_size_lf(body, *size),
Body => ChunkedState::read_body(body, size, buf),
BodyCr => ChunkedState::read_body_cr(body),
BodyLf => ChunkedState::read_body_lf(body),
EndCr => ChunkedState::read_end_cr(body),
EndLf => ChunkedState::read_end_lf(body),
End => Poll::Ready(Ok(ChunkedState::End)),
}
}
fn read_size(
rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
let radix = 16;
let rem = match byte!(rdr) {
b @ b'0'..=b'9' => b - b'0',
b @ b'a'..=b'f' => b + 10 - b'a',
b @ b'A'..=b'F' => b + 10 - b'A',
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size line: Invalid Size",
)));
}
};
match size.checked_mul(radix) {
Some(n) => {
*size = n as u64;
*size += rem as u64;
Poll::Ready(Ok(ChunkedState::Size))
}
None => {
log::debug!("chunk size would overflow u64");
Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size line: Size is too big",
)))
}
}
}
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
// LWS can follow the chunk size, but no more digits can come
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size linear white space",
))),
}
}
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
// strictly 0x20 (space) should be disallowed but we don't parse quoted strings here
0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid character in chunk extension",
))),
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
}
}
fn read_size_lf(
rdr: &mut BytesMut,
size: u64,
) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' if size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
b'\n' if size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size LF",
))),
}
}
fn read_body(
rdr: &mut BytesMut,
rem: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
log::trace!("Chunked read, remaining={:?}", rem);
let len = rdr.len() as u64;
if len == 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
let slice;
if *rem > len {
slice = rdr.split().freeze();
*rem -= len;
} else {
slice = rdr.split_to(*rem as usize).freeze();
*rem = 0;
}
*buf = Some(slice);
if *rem > 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
Poll::Ready(Ok(ChunkedState::BodyCr))
}
}
}
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body CR",
))),
}
}
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body LF",
))),
}
}
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end CR",
))),
}
}
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end LF",
))),
}
}
}
#[cfg(test)]
mod tests {
use actix_codec::Decoder as _;
use bytes::{Bytes, BytesMut};
use http::Method;
use crate::{
error::ParseError,
h1::decoder::{MessageDecoder, PayloadItem},
HttpMessage as _, Request,
};
macro_rules! parse_ready {
($e:expr) => {{
match MessageDecoder::<Request>::default().decode($e) {
Ok(Some((msg, _))) => msg,
Ok(_) => unreachable!("Eof during parsing http request"),
Err(err) => unreachable!("Error during parsing http request: {:?}", err),
}
}};
}
macro_rules! expect_parse_err {
($e:expr) => {{
match MessageDecoder::<Request>::default().decode($e) {
Err(err) => match err {
ParseError::Io(_) => unreachable!("Parse error expected"),
_ => {}
},
_ => unreachable!("Error expected"),
}
}};
}
#[test]
fn test_parse_chunked_payload_chunk_extension() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(msg.chunked().unwrap());
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"data"));
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"line"));
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
}
#[test]
fn test_request_chunked() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let req = parse_ready!(&mut buf);
if let Ok(val) = req.chunked() {
assert!(val);
} else {
unreachable!("Error");
}
// intentional typo in "chunked"
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chnked\r\n\r\n",
);
expect_parse_err!(&mut buf);
}
#[test]
fn test_http_request_chunked_payload() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"data"
);
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"line"
);
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn test_http_request_chunked_payload_and_next_message() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
POST /test2 HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n"
.iter(),
);
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"line");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert!(req.chunked().unwrap());
assert_eq!(*req.method(), Method::POST);
assert!(req.chunked().unwrap());
}
#[test]
fn test_http_request_chunked_payload_chunks() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\n1111\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"1111");
buf.extend(b"4\r\ndata\r");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
buf.extend(b"\n4");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\n");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"li");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"li");
//trailers
//buf.feed_data("test: test\r\n");
//not_ready!(reader.parse(&mut buf, &mut readbuf));
buf.extend(b"ne\r\n0\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"ne");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r\n");
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn chunk_extension_quoted() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
2;hello=b;one=\"1 2 3\"\r\n\
xx",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let chunk = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"xx")));
}
#[test]
fn hrs_chunk_extension_invalid() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
2;x\nx\r\n\
4c\r\n\
0\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let err = pl.decode(&mut buf).unwrap_err();
assert!(err
.to_string()
.contains("Invalid character in chunk extension"));
}
#[test]
fn hrs_chunk_size_overflow() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
f0000000000000003\r\n\
abc\r\n\
0\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let err = pl.decode(&mut buf).unwrap_err();
assert!(err
.to_string()
.contains("Invalid chunk size line: Size is too big"));
}
}

View File

@ -120,7 +120,7 @@ impl Decoder for ClientCodec {
debug_assert!(!self.inner.payload.is_some(), "Payload decoder is set"); debug_assert!(!self.inner.payload.is_some(), "Payload decoder is set");
if let Some((req, payload)) = self.inner.decoder.decode(src)? { if let Some((req, payload)) = self.inner.decoder.decode(src)? {
if let Some(ctype) = req.ctype() { if let Some(ctype) = req.conn_type() {
// do not use peer's keep-alive // do not use peer's keep-alive
self.inner.ctype = if ctype == ConnectionType::KeepAlive { self.inner.ctype = if ctype == ConnectionType::KeepAlive {
self.inner.ctype self.inner.ctype

View File

@ -29,7 +29,7 @@ pub struct Codec {
decoder: decoder::MessageDecoder<Request>, decoder: decoder::MessageDecoder<Request>,
payload: Option<PayloadDecoder>, payload: Option<PayloadDecoder>,
version: Version, version: Version,
ctype: ConnectionType, conn_type: ConnectionType,
// encoder part // encoder part
flags: Flags, flags: Flags,
@ -65,7 +65,7 @@ impl Codec {
decoder: decoder::MessageDecoder::default(), decoder: decoder::MessageDecoder::default(),
payload: None, payload: None,
version: Version::HTTP_11, version: Version::HTTP_11,
ctype: ConnectionType::Close, conn_type: ConnectionType::Close,
encoder: encoder::MessageEncoder::default(), encoder: encoder::MessageEncoder::default(),
} }
} }
@ -73,13 +73,13 @@ impl Codec {
/// Check if request is upgrade. /// Check if request is upgrade.
#[inline] #[inline]
pub fn upgrade(&self) -> bool { pub fn upgrade(&self) -> bool {
self.ctype == ConnectionType::Upgrade self.conn_type == ConnectionType::Upgrade
} }
/// Check if last response is keep-alive. /// Check if last response is keep-alive.
#[inline] #[inline]
pub fn keepalive(&self) -> bool { pub fn keepalive(&self) -> bool {
self.ctype == ConnectionType::KeepAlive self.conn_type == ConnectionType::KeepAlive
} }
/// Check if keep-alive enabled on server level. /// Check if keep-alive enabled on server level.
@ -124,11 +124,11 @@ impl Decoder for Codec {
let head = req.head(); let head = req.head();
self.flags.set(Flags::HEAD, head.method == Method::HEAD); self.flags.set(Flags::HEAD, head.method == Method::HEAD);
self.version = head.version; self.version = head.version;
self.ctype = head.connection_type(); self.conn_type = head.connection_type();
if self.ctype == ConnectionType::KeepAlive if self.conn_type == ConnectionType::KeepAlive
&& !self.flags.contains(Flags::KEEPALIVE_ENABLED) && !self.flags.contains(Flags::KEEPALIVE_ENABLED)
{ {
self.ctype = ConnectionType::Close self.conn_type = ConnectionType::Close
} }
match payload { match payload {
PayloadType::None => self.payload = None, PayloadType::None => self.payload = None,
@ -159,14 +159,14 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
res.head_mut().version = self.version; res.head_mut().version = self.version;
// connection status // connection status
self.ctype = if let Some(ct) = res.head().ctype() { self.conn_type = if let Some(ct) = res.head().conn_type() {
if ct == ConnectionType::KeepAlive { if ct == ConnectionType::KeepAlive {
self.ctype self.conn_type
} else { } else {
ct ct
} }
} else { } else {
self.ctype self.conn_type
}; };
// encode message // encode message
@ -177,10 +177,9 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
self.flags.contains(Flags::STREAM), self.flags.contains(Flags::STREAM),
self.version, self.version,
length, length,
self.ctype, self.conn_type,
&self.config, &self.config,
)?; )?;
// self.headers_size = (dst.len() - len) as u32;
} }
Message::Chunk(Some(bytes)) => { Message::Chunk(Some(bytes)) => {
self.encoder.encode_chunk(bytes.as_ref(), dst)?; self.encoder.encode_chunk(bytes.as_ref(), dst)?;
@ -189,6 +188,7 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
self.encoder.encode_eof(dst)?; self.encoder.encode_eof(dst)?;
} }
} }
Ok(()) Ok(())
} }
} }

View File

@ -1,18 +1,18 @@
use std::convert::TryFrom; use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll};
use std::io;
use std::marker::PhantomData;
use std::task::Poll;
use actix_codec::Decoder; use actix_codec::Decoder;
use bytes::{Buf, Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use http::header::{HeaderName, HeaderValue}; use http::header::{HeaderName, HeaderValue};
use http::{header, Method, StatusCode, Uri, Version}; use http::{header, Method, StatusCode, Uri, Version};
use log::{debug, error, trace}; use log::{debug, error, trace};
use crate::error::ParseError; use super::chunked::ChunkedState;
use crate::header::HeaderMap; use crate::{
use crate::message::{ConnectionType, ResponseHead}; error::ParseError,
use crate::request::Request; header::HeaderMap,
message::{ConnectionType, ResponseHead},
request::Request,
};
pub(crate) const MAX_BUFFER_SIZE: usize = 131_072; pub(crate) const MAX_BUFFER_SIZE: usize = 131_072;
const MAX_HEADERS: usize = 96; const MAX_HEADERS: usize = 96;
@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized {
let mut has_upgrade_websocket = false; let mut has_upgrade_websocket = false;
let mut expect = false; let mut expect = false;
let mut chunked = false; let mut chunked = false;
let mut seen_te = false;
let mut content_length = None; let mut content_length = None;
{ {
@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized {
}; };
match name { match name {
header::CONTENT_LENGTH => { header::CONTENT_LENGTH if content_length.is_some() => {
if let Ok(s) = value.to_str() { debug!("multiple Content-Length");
return Err(ParseError::Header);
}
header::CONTENT_LENGTH => match value.to_str() {
Ok(s) if s.trim().starts_with('+') => {
debug!("illegal Content-Length: {:?}", s);
return Err(ParseError::Header);
}
Ok(s) => {
if let Ok(len) = s.parse::<u64>() { if let Ok(len) = s.parse::<u64>() {
if len != 0 { if len != 0 {
content_length = Some(len); content_length = Some(len);
@ -95,15 +105,31 @@ pub(crate) trait MessageType: Sized {
debug!("illegal Content-Length: {:?}", s); debug!("illegal Content-Length: {:?}", s);
return Err(ParseError::Header); return Err(ParseError::Header);
} }
} else { }
Err(_) => {
debug!("illegal Content-Length: {:?}", value); debug!("illegal Content-Length: {:?}", value);
return Err(ParseError::Header); return Err(ParseError::Header);
} }
} },
// transfer-encoding // transfer-encoding
header::TRANSFER_ENCODING if seen_te => {
debug!("multiple Transfer-Encoding not allowed");
return Err(ParseError::Header);
}
header::TRANSFER_ENCODING => { header::TRANSFER_ENCODING => {
seen_te = true;
if let Ok(s) = value.to_str().map(str::trim) { if let Ok(s) = value.to_str().map(str::trim) {
chunked = s.eq_ignore_ascii_case("chunked"); if s.eq_ignore_ascii_case("chunked") {
chunked = true;
} else if s.eq_ignore_ascii_case("identity") {
// allow silently since multiple TE headers are already checked
} else {
debug!("illegal Transfer-Encoding: {:?}", s);
return Err(ParseError::Header);
}
} else { } else {
return Err(ParseError::Header); return Err(ParseError::Header);
} }
@ -186,10 +212,17 @@ impl MessageType for Request {
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY; let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
let (len, method, uri, ver, h_len) = { let (len, method, uri, ver, h_len) = {
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY; // SAFETY:
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
// safe because the type we are claiming to have initialized here is a
// bunch of `MaybeUninit`s, which do not require initialization.
let mut parsed = unsafe {
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
.assume_init()
};
let mut req = httparse::Request::new(&mut parsed); let mut req = httparse::Request::new(&mut []);
match req.parse(src)? { match req.parse_with_uninit_headers(src, &mut parsed)? {
httparse::Status::Complete(len) => { httparse::Status::Complete(len) => {
let method = Method::from_bytes(req.method.unwrap().as_bytes()) let method = Method::from_bytes(req.method.unwrap().as_bytes())
.map_err(|_| ParseError::Method)?; .map_err(|_| ParseError::Method)?;
@ -408,20 +441,6 @@ enum Kind {
Eof, Eof,
} }
#[derive(Debug, PartialEq, Clone)]
enum ChunkedState {
Size,
SizeLws,
Extension,
SizeLf,
Body,
BodyCr,
BodyLf,
EndCr,
EndLf,
End,
}
impl Decoder for PayloadDecoder { impl Decoder for PayloadDecoder {
type Item = PayloadItem; type Item = PayloadItem;
type Error = io::Error; type Error = io::Error;
@ -451,19 +470,23 @@ impl Decoder for PayloadDecoder {
Kind::Chunked(ref mut state, ref mut size) => { Kind::Chunked(ref mut state, ref mut size) => {
loop { loop {
let mut buf = None; let mut buf = None;
// advances the chunked state // advances the chunked state
*state = match state.step(src, size, &mut buf) { *state = match state.step(src, size, &mut buf) {
Poll::Pending => return Ok(None), Poll::Pending => return Ok(None),
Poll::Ready(Ok(state)) => state, Poll::Ready(Ok(state)) => state,
Poll::Ready(Err(e)) => return Err(e), Poll::Ready(Err(e)) => return Err(e),
}; };
if *state == ChunkedState::End { if *state == ChunkedState::End {
trace!("End of chunked stream"); trace!("End of chunked stream");
return Ok(Some(PayloadItem::Eof)); return Ok(Some(PayloadItem::Eof));
} }
if let Some(buf) = buf { if let Some(buf) = buf {
return Ok(Some(PayloadItem::Chunk(buf))); return Ok(Some(PayloadItem::Chunk(buf)));
} }
if src.is_empty() { if src.is_empty() {
return Ok(None); return Ok(None);
} }
@ -480,201 +503,40 @@ impl Decoder for PayloadDecoder {
} }
} }
macro_rules! byte (
($rdr:ident) => ({
if $rdr.len() > 0 {
let b = $rdr[0];
$rdr.advance(1);
b
} else {
return Poll::Pending
}
})
);
impl ChunkedState {
fn step(
&self,
body: &mut BytesMut,
size: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
use self::ChunkedState::*;
match *self {
Size => ChunkedState::read_size(body, size),
SizeLws => ChunkedState::read_size_lws(body),
Extension => ChunkedState::read_extension(body),
SizeLf => ChunkedState::read_size_lf(body, size),
Body => ChunkedState::read_body(body, size, buf),
BodyCr => ChunkedState::read_body_cr(body),
BodyLf => ChunkedState::read_body_lf(body),
EndCr => ChunkedState::read_end_cr(body),
EndLf => ChunkedState::read_end_lf(body),
End => Poll::Ready(Ok(ChunkedState::End)),
}
}
fn read_size(
rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
let radix = 16;
match byte!(rdr) {
b @ b'0'..=b'9' => {
*size *= radix;
*size += u64::from(b - b'0');
}
b @ b'a'..=b'f' => {
*size *= radix;
*size += u64::from(b + 10 - b'a');
}
b @ b'A'..=b'F' => {
*size *= radix;
*size += u64::from(b + 10 - b'A');
}
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size line: Invalid Size",
)));
}
}
Poll::Ready(Ok(ChunkedState::Size))
}
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
trace!("read_size_lws");
match byte!(rdr) {
// LWS can follow the chunk size, but no more digits can come
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size linear white space",
))),
}
}
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
}
}
fn read_size_lf(
rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size LF",
))),
}
}
fn read_body(
rdr: &mut BytesMut,
rem: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
trace!("Chunked read, remaining={:?}", rem);
let len = rdr.len() as u64;
if len == 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
let slice;
if *rem > len {
slice = rdr.split().freeze();
*rem -= len;
} else {
slice = rdr.split_to(*rem as usize).freeze();
*rem = 0;
}
*buf = Some(slice);
if *rem > 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
Poll::Ready(Ok(ChunkedState::BodyCr))
}
}
}
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body CR",
))),
}
}
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body LF",
))),
}
}
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end CR",
))),
}
}
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end LF",
))),
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use http::{Method, Version}; use http::{Method, Version};
use super::*; use super::*;
use crate::error::ParseError; use crate::{
use crate::http::header::{HeaderName, SET_COOKIE}; error::ParseError,
use crate::HttpMessage; http::header::{HeaderName, SET_COOKIE},
HttpMessage as _,
};
impl PayloadType { impl PayloadType {
fn unwrap(self) -> PayloadDecoder { pub(crate) fn unwrap(self) -> PayloadDecoder {
match self { match self {
PayloadType::Payload(pl) => pl, PayloadType::Payload(pl) => pl,
_ => panic!(), _ => panic!(),
} }
} }
fn is_unhandled(&self) -> bool { pub(crate) fn is_unhandled(&self) -> bool {
matches!(self, PayloadType::Stream(_)) matches!(self, PayloadType::Stream(_))
} }
} }
impl PayloadItem { impl PayloadItem {
fn chunk(self) -> Bytes { pub(crate) fn chunk(self) -> Bytes {
match self { match self {
PayloadItem::Chunk(chunk) => chunk, PayloadItem::Chunk(chunk) => chunk,
_ => panic!("error"), _ => panic!("error"),
} }
} }
fn eof(&self) -> bool {
pub(crate) fn eof(&self) -> bool {
matches!(*self, PayloadItem::Eof) matches!(*self, PayloadItem::Eof)
} }
} }
@ -967,34 +829,6 @@ mod tests {
assert!(req.upgrade()); assert!(req.upgrade());
} }
#[test]
fn test_request_chunked() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let req = parse_ready!(&mut buf);
if let Ok(val) = req.chunked() {
assert!(val);
} else {
unreachable!("Error");
}
// intentional typo in "chunked"
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chnked\r\n\r\n",
);
let req = parse_ready!(&mut buf);
if let Ok(val) = req.chunked() {
assert!(!val);
} else {
unreachable!("Error");
}
}
#[test] #[test]
fn test_headers_content_length_err_1() { fn test_headers_content_length_err_1() {
let mut buf = BytesMut::from( let mut buf = BytesMut::from(
@ -1112,126 +946,6 @@ mod tests {
expect_parse_err!(&mut buf); expect_parse_err!(&mut buf);
} }
#[test]
fn test_http_request_chunked_payload() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"data"
);
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"line"
);
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn test_http_request_chunked_payload_and_next_message() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
POST /test2 HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n"
.iter(),
);
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"line");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert!(req.chunked().unwrap());
assert_eq!(*req.method(), Method::POST);
assert!(req.chunked().unwrap());
}
#[test]
fn test_http_request_chunked_payload_chunks() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\n1111\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"1111");
buf.extend(b"4\r\ndata\r");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
buf.extend(b"\n4");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\n");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"li");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"li");
//trailers
//buf.feed_data("test: test\r\n");
//not_ready!(reader.parse(&mut buf, &mut readbuf));
buf.extend(b"ne\r\n0\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"ne");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r\n");
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn test_parse_chunked_payload_chunk_extension() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(msg.chunked().unwrap());
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"data"));
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"line"));
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
}
#[test] #[test]
fn test_response_http10_read_until_eof() { fn test_response_http10_read_until_eof() {
let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data"); let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data");
@ -1243,4 +957,84 @@ mod tests {
let chunk = pl.decode(&mut buf).unwrap().unwrap(); let chunk = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data"))); assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data")));
} }
#[test]
fn hrs_multiple_content_length() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 4\r\n\
Content-Length: 2\r\n\
\r\n\
abcd",
);
expect_parse_err!(&mut buf);
}
#[test]
fn hrs_content_length_plus() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: +3\r\n\
\r\n\
000",
);
expect_parse_err!(&mut buf);
}
#[test]
fn hrs_unknown_transfer_encoding() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Transfer-Encoding: JUNK\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
5\r\n\
hello\r\n\
0",
);
expect_parse_err!(&mut buf);
}
#[test]
fn hrs_multiple_transfer_encoding() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 51\r\n\
Transfer-Encoding: identity\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
0\r\n\
\r\n\
GET /forbidden HTTP/1.1\r\n\
Host: example.com\r\n\r\n",
);
expect_parse_err!(&mut buf);
}
#[test]
fn transfer_encoding_agrees() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 3\r\n\
Transfer-Encoding: identity\r\n\
\r\n\
0\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let chunk = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"0\r\n")));
}
} }

View File

@ -303,9 +303,9 @@ where
body: &impl MessageBody, body: &impl MessageBody,
) -> Result<BodySize, DispatchError> { ) -> Result<BodySize, DispatchError> {
let size = body.size(); let size = body.size();
let mut this = self.project(); let this = self.project();
this.codec this.codec
.encode(Message::Item((message, size)), &mut this.write_buf) .encode(Message::Item((message, size)), this.write_buf)
.map_err(|err| { .map_err(|err| {
if let Some(mut payload) = this.payload.take() { if let Some(mut payload) = this.payload.take() {
payload.set_error(PayloadError::Incomplete(None)); payload.set_error(PayloadError::Incomplete(None));
@ -325,7 +325,7 @@ where
) -> Result<(), DispatchError> { ) -> Result<(), DispatchError> {
let size = self.as_mut().send_response_inner(message, &body)?; let size = self.as_mut().send_response_inner(message, &body)?;
let state = match size { let state = match size {
BodySize::None | BodySize::Empty => State::None, BodySize::None | BodySize::Sized(0) => State::None,
_ => State::SendPayload(body), _ => State::SendPayload(body),
}; };
self.project().state.set(state); self.project().state.set(state);
@ -339,7 +339,7 @@ where
) -> Result<(), DispatchError> { ) -> Result<(), DispatchError> {
let size = self.as_mut().send_response_inner(message, &body)?; let size = self.as_mut().send_response_inner(message, &body)?;
let state = match size { let state = match size {
BodySize::None | BodySize::Empty => State::None, BodySize::None | BodySize::Sized(0) => State::None,
_ => State::SendErrorPayload(body), _ => State::SendErrorPayload(body),
}; };
self.project().state.set(state); self.project().state.set(state);
@ -380,7 +380,7 @@ where
// send_response would update InnerDispatcher state to SendPayload or // send_response would update InnerDispatcher state to SendPayload or
// None(If response body is empty). // None(If response body is empty).
// continue loop to poll it. // continue loop to poll it.
self.as_mut().send_error_response(res, AnyBody::Empty)?; self.as_mut().send_error_response(res, AnyBody::empty())?;
} }
// return with upgrade request and poll it exclusively. // return with upgrade request and poll it exclusively.
@ -425,13 +425,13 @@ where
Poll::Ready(Some(Ok(item))) => { Poll::Ready(Some(Ok(item))) => {
this.codec.encode( this.codec.encode(
Message::Chunk(Some(item)), Message::Chunk(Some(item)),
&mut this.write_buf, this.write_buf,
)?; )?;
} }
Poll::Ready(None) => { Poll::Ready(None) => {
this.codec this.codec
.encode(Message::Chunk(None), &mut this.write_buf)?; .encode(Message::Chunk(None), this.write_buf)?;
// payload stream finished. // payload stream finished.
// set state to None and handle next message // set state to None and handle next message
this.state.set(State::None); this.state.set(State::None);
@ -460,13 +460,13 @@ where
Poll::Ready(Some(Ok(item))) => { Poll::Ready(Some(Ok(item))) => {
this.codec.encode( this.codec.encode(
Message::Chunk(Some(item)), Message::Chunk(Some(item)),
&mut this.write_buf, this.write_buf,
)?; )?;
} }
Poll::Ready(None) => { Poll::Ready(None) => {
this.codec this.codec
.encode(Message::Chunk(None), &mut this.write_buf)?; .encode(Message::Chunk(None), this.write_buf)?;
// payload stream finished. // payload stream finished.
// set state to None and handle next message // set state to None and handle next message
this.state.set(State::None); this.state.set(State::None);
@ -592,7 +592,7 @@ where
let mut updated = false; let mut updated = false;
let mut this = self.as_mut().project(); let mut this = self.as_mut().project();
loop { loop {
match this.codec.decode(&mut this.read_buf) { match this.codec.decode(this.read_buf) {
Ok(Some(msg)) => { Ok(Some(msg)) => {
updated = true; updated = true;
this.flags.insert(Flags::STARTED); this.flags.insert(Flags::STARTED);
@ -772,7 +772,7 @@ where
trace!("Slow request timeout"); trace!("Slow request timeout");
let _ = self.as_mut().send_error_response( let _ = self.as_mut().send_error_response(
Response::with_body(StatusCode::REQUEST_TIMEOUT, ()), Response::with_body(StatusCode::REQUEST_TIMEOUT, ()),
AnyBody::Empty, AnyBody::empty(),
); );
this = self.project(); this = self.project();
this.flags.insert(Flags::STARTED | Flags::SHUTDOWN); this.flags.insert(Flags::STARTED | Flags::SHUTDOWN);
@ -1060,7 +1060,7 @@ mod tests {
fn stabilize_date_header(payload: &mut [u8]) { fn stabilize_date_header(payload: &mut [u8]) {
let mut from = 0; let mut from = 0;
while let Some(pos) = find_slice(&payload, b"date", from) { while let Some(pos) = find_slice(payload, b"date", from) {
payload[(from + pos)..(from + pos + 35)] payload[(from + pos)..(from + pos + 35)]
.copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC"); .copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC");
from += 35; from += 35;
@ -1077,7 +1077,7 @@ mod tests {
fn_service(|req: Request| { fn_service(|req: Request| {
let path = req.path().as_bytes(); let path = req.path().as_bytes();
ready(Ok::<_, Error>( ready(Ok::<_, Error>(
Response::ok().set_body(AnyBody::from_slice(path)), Response::ok().set_body(AnyBody::copy_from_slice(path)),
)) ))
}) })
} }

View File

@ -20,6 +20,7 @@ const AVERAGE_HEADER_SIZE: usize = 30;
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct MessageEncoder<T: MessageType> { pub(crate) struct MessageEncoder<T: MessageType> {
#[allow(dead_code)]
pub length: BodySize, pub length: BodySize,
pub te: TransferEncoding, pub te: TransferEncoding,
_phantom: PhantomData<T>, _phantom: PhantomData<T>,
@ -55,7 +56,7 @@ pub(crate) trait MessageType: Sized {
dst: &mut BytesMut, dst: &mut BytesMut,
version: Version, version: Version,
mut length: BodySize, mut length: BodySize,
ctype: ConnectionType, conn_type: ConnectionType,
config: &ServiceConfig, config: &ServiceConfig,
) -> io::Result<()> { ) -> io::Result<()> {
let chunked = self.chunked(); let chunked = self.chunked();
@ -70,17 +71,27 @@ pub(crate) trait MessageType: Sized {
| StatusCode::PROCESSING | StatusCode::PROCESSING
| StatusCode::NO_CONTENT => { | StatusCode::NO_CONTENT => {
// skip content-length and transfer-encoding headers // skip content-length and transfer-encoding headers
// See https://tools.ietf.org/html/rfc7230#section-3.3.1 // see https://tools.ietf.org/html/rfc7230#section-3.3.1
// and https://tools.ietf.org/html/rfc7230#section-3.3.2 // and https://tools.ietf.org/html/rfc7230#section-3.3.2
skip_len = true; skip_len = true;
length = BodySize::None length = BodySize::None
} }
StatusCode::NOT_MODIFIED => {
// 304 responses should never have a body but should retain a manually set
// content-length header see https://tools.ietf.org/html/rfc7232#section-4.1
skip_len = false;
length = BodySize::None;
}
_ => {} _ => {}
} }
} }
match length { match length {
BodySize::Stream => { BodySize::Stream => {
if chunked { if chunked {
skip_len = true;
if camel_case { if camel_case {
dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n") dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n")
} else { } else {
@ -91,19 +102,16 @@ pub(crate) trait MessageType: Sized {
dst.put_slice(b"\r\n"); dst.put_slice(b"\r\n");
} }
} }
BodySize::Empty => { BodySize::Sized(0) if camel_case => {
if camel_case { dst.put_slice(b"\r\nContent-Length: 0\r\n")
dst.put_slice(b"\r\nContent-Length: 0\r\n");
} else {
dst.put_slice(b"\r\ncontent-length: 0\r\n");
}
} }
BodySize::Sized(0) => dst.put_slice(b"\r\ncontent-length: 0\r\n"),
BodySize::Sized(len) => helpers::write_content_length(len, dst), BodySize::Sized(len) => helpers::write_content_length(len, dst),
BodySize::None => dst.put_slice(b"\r\n"), BodySize::None => dst.put_slice(b"\r\n"),
} }
// Connection // Connection
match ctype { match conn_type {
ConnectionType::Upgrade => dst.put_slice(b"connection: upgrade\r\n"), ConnectionType::Upgrade => dst.put_slice(b"connection: upgrade\r\n"),
ConnectionType::KeepAlive if version < Version::HTTP_11 => { ConnectionType::KeepAlive if version < Version::HTTP_11 => {
if camel_case { if camel_case {
@ -174,7 +182,7 @@ pub(crate) trait MessageType: Sized {
unsafe { unsafe {
if camel_case { if camel_case {
// use Camel-Case headers // use Camel-Case headers
write_camel_case(k, from_raw_parts_mut(buf, k_len)); write_camel_case(k, buf, k_len);
} else { } else {
write_data(k, buf, k_len); write_data(k, buf, k_len);
} }
@ -328,13 +336,13 @@ impl<T: MessageType> MessageEncoder<T> {
stream: bool, stream: bool,
version: Version, version: Version,
length: BodySize, length: BodySize,
ctype: ConnectionType, conn_type: ConnectionType,
config: &ServiceConfig, config: &ServiceConfig,
) -> io::Result<()> { ) -> io::Result<()> {
// transfer encoding // transfer encoding
if !head { if !head {
self.te = match length { self.te = match length {
BodySize::Empty => TransferEncoding::empty(), BodySize::Sized(0) => TransferEncoding::empty(),
BodySize::Sized(len) => TransferEncoding::length(len), BodySize::Sized(len) => TransferEncoding::length(len),
BodySize::Stream => { BodySize::Stream => {
if message.chunked() && !stream { if message.chunked() && !stream {
@ -350,7 +358,7 @@ impl<T: MessageType> MessageEncoder<T> {
} }
message.encode_status(dst)?; message.encode_status(dst)?;
message.encode_headers(dst, version, length, ctype, config) message.encode_headers(dst, version, length, conn_type, config)
} }
} }
@ -364,10 +372,12 @@ pub(crate) struct TransferEncoding {
enum TransferEncodingKind { enum TransferEncodingKind {
/// An Encoder for when Transfer-Encoding includes `chunked`. /// An Encoder for when Transfer-Encoding includes `chunked`.
Chunked(bool), Chunked(bool),
/// An Encoder for when Content-Length is set. /// An Encoder for when Content-Length is set.
/// ///
/// Enforces that the body is not longer than the Content-Length header. /// Enforces that the body is not longer than the Content-Length header.
Length(u64), Length(u64),
/// An Encoder for when Content-Length is not known. /// An Encoder for when Content-Length is not known.
/// ///
/// Application decides when to stop writing. /// Application decides when to stop writing.
@ -472,15 +482,22 @@ impl TransferEncoding {
} }
/// # Safety /// # Safety
/// Callers must ensure that the given length matches given value length. /// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
/// valid for writes of at least `len` bytes.
unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) { unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) {
debug_assert_eq!(value.len(), len); debug_assert_eq!(value.len(), len);
copy_nonoverlapping(value.as_ptr(), buf, len); copy_nonoverlapping(value.as_ptr(), buf, len);
} }
fn write_camel_case(value: &[u8], buffer: &mut [u8]) { /// # Safety
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
/// valid for writes of at least `len` bytes.
unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
// first copy entire (potentially wrong) slice to output // first copy entire (potentially wrong) slice to output
buffer[..value.len()].copy_from_slice(value); write_data(value, buf, len);
// SAFETY: We just initialized the buffer with `value`
let buffer = from_raw_parts_mut(buf, len);
let mut iter = value.iter(); let mut iter = value.iter();
@ -544,7 +561,7 @@ mod tests {
let _ = head.encode_headers( let _ = head.encode_headers(
&mut bytes, &mut bytes,
Version::HTTP_11, Version::HTTP_11,
BodySize::Empty, BodySize::Sized(0),
ConnectionType::Close, ConnectionType::Close,
&ServiceConfig::default(), &ServiceConfig::default(),
); );
@ -615,7 +632,7 @@ mod tests {
let _ = head.encode_headers( let _ = head.encode_headers(
&mut bytes, &mut bytes,
Version::HTTP_11, Version::HTTP_11,
BodySize::Empty, BodySize::Sized(0),
ConnectionType::Close, ConnectionType::Close,
&ServiceConfig::default(), &ServiceConfig::default(),
); );

View File

@ -1,6 +1,8 @@
//! HTTP/1 protocol implementation. //! HTTP/1 protocol implementation.
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
mod chunked;
mod client; mod client;
mod codec; mod codec;
mod decoder; mod decoder;

View File

@ -102,7 +102,6 @@ where
mod openssl { mod openssl {
use super::*; use super::*;
use actix_service::ServiceFactoryExt;
use actix_tls::accept::{ use actix_tls::accept::{
openssl::{Acceptor, SslAcceptor, SslError, TlsStream}, openssl::{Acceptor, SslAcceptor, SslError, TlsStream},
TlsError, TlsError,
@ -133,7 +132,7 @@ mod openssl {
U::Error: fmt::Display + Into<Response<AnyBody>>, U::Error: fmt::Display + Into<Response<AnyBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create openssl based service /// Create OpenSSL based service.
pub fn openssl( pub fn openssl(
self, self,
acceptor: SslAcceptor, acceptor: SslAcceptor,
@ -145,11 +144,13 @@ mod openssl {
InitError = (), InitError = (),
> { > {
Acceptor::new(acceptor) Acceptor::new(acceptor)
.map_err(TlsError::Tls) .map_init_err(|_| {
.map_init_err(|_| panic!()) unreachable!("TLS acceptor service factory does not error on init")
.and_then(|io: TlsStream<TcpStream>| { })
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().peer_addr().ok(); let peer_addr = io.get_ref().peer_addr().ok();
ready(Ok((io, peer_addr))) (io, peer_addr)
}) })
.and_then(self.map_err(TlsError::Service)) .and_then(self.map_err(TlsError::Service))
} }
@ -158,16 +159,17 @@ mod openssl {
#[cfg(feature = "rustls")] #[cfg(feature = "rustls")]
mod rustls { mod rustls {
use super::*;
use std::io; use std::io;
use actix_service::ServiceFactoryExt; use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{ use actix_tls::accept::{
rustls::{Acceptor, ServerConfig, TlsStream}, rustls::{Acceptor, ServerConfig, TlsStream},
TlsError, TlsError,
}; };
use super::*;
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U> impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
where where
S: ServiceFactory<Request, Config = ()>, S: ServiceFactory<Request, Config = ()>,
@ -193,7 +195,7 @@ mod rustls {
U::Error: fmt::Display + Into<Response<AnyBody>>, U::Error: fmt::Display + Into<Response<AnyBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create rustls based service /// Create Rustls based service.
pub fn rustls( pub fn rustls(
self, self,
config: ServerConfig, config: ServerConfig,
@ -205,11 +207,13 @@ mod rustls {
InitError = (), InitError = (),
> { > {
Acceptor::new(config) Acceptor::new(config)
.map_err(TlsError::Tls) .map_init_err(|_| {
.map_init_err(|_| panic!()) unreachable!("TLS acceptor service factory does not error on init")
.and_then(|io: TlsStream<TcpStream>| { })
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok(); let peer_addr = io.get_ref().0.peer_addr().ok();
ready(Ok((io, peer_addr))) (io, peer_addr)
}) })
.and_then(self.map_err(TlsError::Service)) .and_then(self.map_err(TlsError::Service))
} }

View File

@ -63,7 +63,6 @@ where
.is_write_buf_full() .is_write_buf_full()
{ {
let next = let next =
// TODO: MSRV 1.51: poll_map_err
match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) { match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)), Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => {

View File

@ -10,11 +10,15 @@ use std::{
}; };
use actix_codec::{AsyncRead, AsyncWrite}; use actix_codec::{AsyncRead, AsyncWrite};
use actix_rt::time::Sleep;
use actix_service::Service; use actix_service::Service;
use actix_utils::future::poll_fn; use actix_utils::future::poll_fn;
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use futures_core::ready; use futures_core::ready;
use h2::server::{Connection, SendResponse}; use h2::{
server::{Connection, SendResponse},
Ping, PingPong,
};
use http::header::{HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING}; use http::header::{HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING};
use log::{error, trace}; use log::{error, trace};
use pin_project_lite::pin_project; use pin_project_lite::pin_project;
@ -36,29 +40,46 @@ pin_project! {
on_connect_data: OnConnectData, on_connect_data: OnConnectData,
config: ServiceConfig, config: ServiceConfig,
peer_addr: Option<net::SocketAddr>, peer_addr: Option<net::SocketAddr>,
_phantom: PhantomData<B>, ping_pong: Option<H2PingPong>,
_phantom: PhantomData<B>
} }
} }
impl<T, S, B, X, U> Dispatcher<T, S, B, X, U> { impl<T, S, B, X, U> Dispatcher<T, S, B, X, U>
where
T: AsyncRead + AsyncWrite + Unpin,
{
pub(crate) fn new( pub(crate) fn new(
flow: Rc<HttpFlow<S, X, U>>, flow: Rc<HttpFlow<S, X, U>>,
connection: Connection<T, Bytes>, mut connection: Connection<T, Bytes>,
on_connect_data: OnConnectData, on_connect_data: OnConnectData,
config: ServiceConfig, config: ServiceConfig,
peer_addr: Option<net::SocketAddr>, peer_addr: Option<net::SocketAddr>,
) -> Self { ) -> Self {
let ping_pong = config.keep_alive_timer().map(|timer| H2PingPong {
timer: Box::pin(timer),
on_flight: false,
ping_pong: connection.ping_pong().unwrap(),
});
Self { Self {
flow, flow,
config, config,
peer_addr, peer_addr,
connection, connection,
on_connect_data, on_connect_data,
ping_pong,
_phantom: PhantomData, _phantom: PhantomData,
} }
} }
} }
struct H2PingPong {
timer: Pin<Box<Sleep>>,
on_flight: bool,
ping_pong: PingPong,
}
impl<T, S, B, X, U> Future for Dispatcher<T, S, B, X, U> impl<T, S, B, X, U> Future for Dispatcher<T, S, B, X, U>
where where
T: AsyncRead + AsyncWrite + Unpin, T: AsyncRead + AsyncWrite + Unpin,
@ -77,54 +98,92 @@ where
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.get_mut(); let this = self.get_mut();
while let Some((req, tx)) = loop {
ready!(Pin::new(&mut this.connection).poll_accept(cx)?) match Pin::new(&mut this.connection).poll_accept(cx)? {
{ Poll::Ready(Some((req, tx))) => {
let (parts, body) = req.into_parts(); let (parts, body) = req.into_parts();
let pl = crate::h2::Payload::new(body); let pl = crate::h2::Payload::new(body);
let pl = Payload::<crate::payload::PayloadStream>::H2(pl); let pl = Payload::<crate::payload::PayloadStream>::H2(pl);
let mut req = Request::with_payload(pl); let mut req = Request::with_payload(pl);
let head = req.head_mut(); let head = req.head_mut();
head.uri = parts.uri; head.uri = parts.uri;
head.method = parts.method; head.method = parts.method;
head.version = parts.version; head.version = parts.version;
head.headers = parts.headers.into(); head.headers = parts.headers.into();
head.peer_addr = this.peer_addr; head.peer_addr = this.peer_addr;
// merge on_connect_ext data into request extensions // merge on_connect_ext data into request extensions
this.on_connect_data.merge_into(&mut req); this.on_connect_data.merge_into(&mut req);
let fut = this.flow.service.call(req); let fut = this.flow.service.call(req);
let config = this.config.clone(); let config = this.config.clone();
// multiplex request handling with spawn task // multiplex request handling with spawn task
actix_rt::spawn(async move { actix_rt::spawn(async move {
// resolve service call and send response. // resolve service call and send response.
let res = match fut.await { let res = match fut.await {
Ok(res) => handle_response(res.into(), tx, config).await, Ok(res) => handle_response(res.into(), tx, config).await,
Err(err) => { Err(err) => {
let res: Response<AnyBody> = err.into(); let res: Response<AnyBody> = err.into();
handle_response(res, tx, config).await handle_response(res, tx, config).await
} }
}; };
// log error. // log error.
if let Err(err) = res { if let Err(err) = res {
match err { match err {
DispatchError::SendResponse(err) => { DispatchError::SendResponse(err) => {
trace!("Error sending HTTP/2 response: {:?}", err) trace!("Error sending HTTP/2 response: {:?}", err)
}
DispatchError::SendData(err) => warn!("{:?}", err),
DispatchError::ResponseBody(err) => {
error!("Response payload stream error: {:?}", err)
}
}
} }
DispatchError::SendData(err) => warn!("{:?}", err), });
DispatchError::ResponseBody(err) => {
error!("Response payload stream error: {:?}", err)
}
}
} }
}); Poll::Ready(None) => return Poll::Ready(Ok(())),
} Poll::Pending => match this.ping_pong.as_mut() {
Some(ping_pong) => loop {
if ping_pong.on_flight {
// When have on flight ping pong. poll pong and and keep alive timer.
// on success pong received update keep alive timer to determine the next timing of
// ping pong.
match ping_pong.ping_pong.poll_pong(cx)? {
Poll::Ready(_) => {
ping_pong.on_flight = false;
Poll::Ready(Ok(())) let dead_line =
this.config.keep_alive_expire().unwrap();
ping_pong.timer.as_mut().reset(dead_line);
}
Poll::Pending => {
return ping_pong
.timer
.as_mut()
.poll(cx)
.map(|_| Ok(()))
}
}
} else {
// When there is no on flight ping pong. keep alive timer is used to wait for next
// timing of ping pong. Therefore at this point it serves as an interval instead.
ready!(ping_pong.timer.as_mut().poll(cx));
ping_pong.ping_pong.send_ping(Ping::opaque())?;
let dead_line = this.config.keep_alive_expire().unwrap();
ping_pong.timer.as_mut().reset(dead_line);
ping_pong.on_flight = true;
}
},
None => return Poll::Pending,
},
}
}
} }
} }
@ -226,9 +285,11 @@ fn prepare_response(
let _ = match size { let _ = match size {
BodySize::None | BodySize::Stream => None, BodySize::None | BodySize::Stream => None,
BodySize::Empty => res
BodySize::Sized(0) => res
.headers_mut() .headers_mut()
.insert(CONTENT_LENGTH, HeaderValue::from_static("0")), .insert(CONTENT_LENGTH, HeaderValue::from_static("0")),
BodySize::Sized(len) => { BodySize::Sized(len) => {
let mut buf = itoa::Buffer::new(); let mut buf = itoa::Buffer::new();

View File

@ -101,9 +101,11 @@ where
#[cfg(feature = "openssl")] #[cfg(feature = "openssl")]
mod openssl { mod openssl {
use actix_service::{fn_factory, fn_service, ServiceFactoryExt}; use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::openssl::{Acceptor, SslAcceptor, SslError, TlsStream}; use actix_tls::accept::{
use actix_tls::accept::TlsError; openssl::{Acceptor, SslAcceptor, SslError, TlsStream},
TlsError,
};
use super::*; use super::*;
@ -118,7 +120,7 @@ mod openssl {
B: MessageBody + 'static, B: MessageBody + 'static,
B::Error: Into<Box<dyn StdError>>, B::Error: Into<Box<dyn StdError>>,
{ {
/// Create OpenSSL based service /// Create OpenSSL based service.
pub fn openssl( pub fn openssl(
self, self,
acceptor: SslAcceptor, acceptor: SslAcceptor,
@ -130,16 +132,14 @@ mod openssl {
InitError = S::InitError, InitError = S::InitError,
> { > {
Acceptor::new(acceptor) Acceptor::new(acceptor)
.map_err(TlsError::Tls) .map_init_err(|_| {
.map_init_err(|_| panic!()) unreachable!("TLS acceptor service factory does not error on init")
.and_then(fn_factory(|| { })
ready(Ok::<_, S::InitError>(fn_service( .map_err(TlsError::into_service_error)
|io: TlsStream<TcpStream>| { .map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().peer_addr().ok(); let peer_addr = io.get_ref().peer_addr().ok();
ready(Ok((io, peer_addr))) (io, peer_addr)
}, })
)))
}))
.and_then(self.map_err(TlsError::Service)) .and_then(self.map_err(TlsError::Service))
} }
} }
@ -147,12 +147,16 @@ mod openssl {
#[cfg(feature = "rustls")] #[cfg(feature = "rustls")]
mod rustls { mod rustls {
use super::*;
use actix_service::ServiceFactoryExt;
use actix_tls::accept::rustls::{Acceptor, ServerConfig, TlsStream};
use actix_tls::accept::TlsError;
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls::{Acceptor, ServerConfig, TlsStream},
TlsError,
};
use super::*;
impl<S, B> H2Service<TlsStream<TcpStream>, S, B> impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
where where
S: ServiceFactory<Request, Config = ()>, S: ServiceFactory<Request, Config = ()>,
@ -164,7 +168,7 @@ mod rustls {
B: MessageBody + 'static, B: MessageBody + 'static,
B::Error: Into<Box<dyn StdError>>, B::Error: Into<Box<dyn StdError>>,
{ {
/// Create Rustls based service /// Create Rustls based service.
pub fn rustls( pub fn rustls(
self, self,
mut config: ServerConfig, mut config: ServerConfig,
@ -177,19 +181,17 @@ mod rustls {
> { > {
let mut protos = vec![b"h2".to_vec()]; let mut protos = vec![b"h2".to_vec()];
protos.extend_from_slice(&config.alpn_protocols); protos.extend_from_slice(&config.alpn_protocols);
config.set_protocols(&protos); config.alpn_protocols = protos;
Acceptor::new(config) Acceptor::new(config)
.map_err(TlsError::Tls) .map_init_err(|_| {
.map_init_err(|_| panic!()) unreachable!("TLS acceptor service factory does not error on init")
.and_then(fn_factory(|| { })
ready(Ok::<_, S::InitError>(fn_service( .map_err(TlsError::into_service_error)
|io: TlsStream<TcpStream>| { .map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok(); let peer_addr = io.get_ref().0.peer_addr().ok();
ready(Ok((io, peer_addr))) (io, peer_addr)
}, })
)))
}))
.and_then(self.map_err(TlsError::Service)) .and_then(self.map_err(TlsError::Service))
} }
} }

View File

@ -684,7 +684,7 @@ impl<'a> Iterator for Iter<'a> {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
// handle in-progress multi value lists first // handle in-progress multi value lists first
if let Some((ref name, ref mut vals)) = self.multi_inner { if let Some((name, ref mut vals)) = self.multi_inner {
match vals.get(self.multi_idx) { match vals.get(self.multi_idx) {
Some(val) => { Some(val) => {
self.multi_idx += 1; self.multi_idx += 1;

View File

@ -1,5 +1,6 @@
use std::{convert::Infallible, str::FromStr}; use std::{convert::TryFrom, str::FromStr};
use derive_more::{Display, Error};
use http::header::InvalidHeaderValue; use http::header::InvalidHeaderValue;
use crate::{ use crate::{
@ -8,8 +9,16 @@ use crate::{
HttpMessage, HttpMessage,
}; };
/// Error return when a content encoding is unknown.
///
/// Example: 'compress'
#[derive(Debug, Display, Error)]
#[display(fmt = "unsupported content encoding")]
pub struct ContentEncodingParseError;
/// Represents a supported content encoding. /// Represents a supported content encoding.
#[derive(Copy, Clone, PartialEq, Debug)] #[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum ContentEncoding { pub enum ContentEncoding {
/// Automatically select encoding based on encoding negotiation. /// Automatically select encoding based on encoding negotiation.
Auto, Auto,
@ -37,7 +46,7 @@ impl ContentEncoding {
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto) matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
} }
/// Convert content encoding to string /// Convert content encoding to string.
#[inline] #[inline]
pub fn as_str(self) -> &'static str { pub fn as_str(self) -> &'static str {
match self { match self {
@ -48,18 +57,6 @@ impl ContentEncoding {
ContentEncoding::Identity | ContentEncoding::Auto => "identity", ContentEncoding::Identity | ContentEncoding::Auto => "identity",
} }
} }
/// Default Q-factor (quality) value.
#[inline]
pub fn quality(self) -> f64 {
match self {
ContentEncoding::Br => 1.1,
ContentEncoding::Gzip => 1.0,
ContentEncoding::Deflate => 0.9,
ContentEncoding::Identity | ContentEncoding::Auto => 0.1,
ContentEncoding::Zstd => 0.0,
}
}
} }
impl Default for ContentEncoding { impl Default for ContentEncoding {
@ -69,31 +66,33 @@ impl Default for ContentEncoding {
} }
impl FromStr for ContentEncoding { impl FromStr for ContentEncoding {
type Err = Infallible; type Err = ContentEncodingParseError;
fn from_str(val: &str) -> Result<Self, Self::Err> { fn from_str(val: &str) -> Result<Self, Self::Err> {
Ok(Self::from(val))
}
}
impl From<&str> for ContentEncoding {
fn from(val: &str) -> ContentEncoding {
let val = val.trim(); let val = val.trim();
if val.eq_ignore_ascii_case("br") { if val.eq_ignore_ascii_case("br") {
ContentEncoding::Br Ok(ContentEncoding::Br)
} else if val.eq_ignore_ascii_case("gzip") { } else if val.eq_ignore_ascii_case("gzip") {
ContentEncoding::Gzip Ok(ContentEncoding::Gzip)
} else if val.eq_ignore_ascii_case("deflate") { } else if val.eq_ignore_ascii_case("deflate") {
ContentEncoding::Deflate Ok(ContentEncoding::Deflate)
} else if val.eq_ignore_ascii_case("zstd") { } else if val.eq_ignore_ascii_case("zstd") {
ContentEncoding::Zstd Ok(ContentEncoding::Zstd)
} else { } else {
ContentEncoding::default() Err(ContentEncodingParseError)
} }
} }
} }
impl TryFrom<&str> for ContentEncoding {
type Error = ContentEncodingParseError;
fn try_from(val: &str) -> Result<Self, Self::Error> {
val.parse()
}
}
impl IntoHeaderValue for ContentEncoding { impl IntoHeaderValue for ContentEncoding {
type Error = InvalidHeaderValue; type Error = InvalidHeaderValue;

View File

@ -0,0 +1,82 @@
use std::{fmt, io::Write, str::FromStr, time::SystemTime};
use bytes::BytesMut;
use http::header::{HeaderValue, InvalidHeaderValue};
use crate::{
config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue,
helpers::MutWriter,
};
/// A timestamp with HTTP formatting and parsing.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct HttpDate(SystemTime);
impl FromStr for HttpDate {
type Err = ParseError;
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
match httpdate::parse_http_date(s) {
Ok(sys_time) => Ok(HttpDate(sys_time)),
Err(_) => Err(ParseError::Header),
}
}
}
impl fmt::Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let date_str = httpdate::fmt_http_date(self.0);
f.write_str(&date_str)
}
}
impl IntoHeaderValue for HttpDate {
type Error = InvalidHeaderValue;
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH);
let mut wrt = MutWriter(&mut buf);
// unwrap: date output is known to be well formed and of known length
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
HeaderValue::from_maybe_shared(buf.split().freeze())
}
}
impl From<SystemTime> for HttpDate {
fn from(sys_time: SystemTime) -> HttpDate {
HttpDate(sys_time)
}
}
impl From<HttpDate> for SystemTime {
fn from(HttpDate(sys_time): HttpDate) -> SystemTime {
sys_time
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use super::*;
#[test]
fn date_header() {
macro_rules! assert_parsed_date {
($case:expr, $exp:expr) => {
assert_eq!($case.parse::<HttpDate>().unwrap(), $exp);
};
}
// 784198117 = SystemTime::from(datetime!(1994-11-07 08:48:37).assume_utc()).duration_since(SystemTime::UNIX_EPOCH));
let nov_07 = HttpDate(SystemTime::UNIX_EPOCH + Duration::from_secs(784198117));
assert_parsed_date!("Mon, 07 Nov 1994 08:48:37 GMT", nov_07);
assert_parsed_date!("Monday, 07-Nov-94 08:48:37 GMT", nov_07);
assert_parsed_date!("Mon Nov 7 08:48:37 1994", nov_07);
assert!("this-is-no-date".parse::<HttpDate>().is_err());
}
}

View File

@ -1,97 +0,0 @@
use std::{
fmt,
io::Write,
str::FromStr,
time::{SystemTime, UNIX_EPOCH},
};
use bytes::buf::BufMut;
use bytes::BytesMut;
use http::header::{HeaderValue, InvalidHeaderValue};
use time::{OffsetDateTime, PrimitiveDateTime, UtcOffset};
use crate::error::ParseError;
use crate::header::IntoHeaderValue;
use crate::time_parser;
/// A timestamp with HTTP formatting and parsing.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct HttpDate(OffsetDateTime);
impl FromStr for HttpDate {
type Err = ParseError;
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
match time_parser::parse_http_date(s) {
Some(t) => Ok(HttpDate(t.assume_utc())),
None => Err(ParseError::Header),
}
}
}
impl fmt::Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.0.format("%a, %d %b %Y %H:%M:%S GMT"), f)
}
}
impl From<SystemTime> for HttpDate {
fn from(sys: SystemTime) -> HttpDate {
HttpDate(PrimitiveDateTime::from(sys).assume_utc())
}
}
impl IntoHeaderValue for HttpDate {
type Error = InvalidHeaderValue;
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
let mut wrt = BytesMut::with_capacity(29).writer();
write!(
wrt,
"{}",
self.0
.to_offset(UtcOffset::UTC)
.format("%a, %d %b %Y %H:%M:%S GMT")
)
.unwrap();
HeaderValue::from_maybe_shared(wrt.get_mut().split().freeze())
}
}
impl From<HttpDate> for SystemTime {
fn from(date: HttpDate) -> SystemTime {
let dt = date.0;
let epoch = OffsetDateTime::unix_epoch();
UNIX_EPOCH + (dt - epoch)
}
}
#[cfg(test)]
mod tests {
use super::HttpDate;
use time::{date, time, PrimitiveDateTime};
#[test]
fn test_date() {
let nov_07 = HttpDate(
PrimitiveDateTime::new(date!(1994 - 11 - 07), time!(8:48:37)).assume_utc(),
);
assert_eq!(
"Sun, 07 Nov 1994 08:48:37 GMT".parse::<HttpDate>().unwrap(),
nov_07
);
assert_eq!(
"Sunday, 07-Nov-94 08:48:37 GMT"
.parse::<HttpDate>()
.unwrap(),
nov_07
);
assert_eq!(
"Sun Nov 7 08:48:37 1994".parse::<HttpDate>().unwrap(),
nov_07
);
assert!("this-is-no-date".parse::<HttpDate>().is_err());
}
}

View File

@ -3,12 +3,12 @@
mod charset; mod charset;
mod content_encoding; mod content_encoding;
mod extended; mod extended;
mod httpdate; mod http_date;
mod quality_item; mod quality_item;
pub use self::charset::Charset; pub use self::charset::Charset;
pub use self::content_encoding::ContentEncoding; pub use self::content_encoding::ContentEncoding;
pub use self::extended::{parse_extended_value, ExtendedValue}; pub use self::extended::{parse_extended_value, ExtendedValue};
pub use self::httpdate::HttpDate; pub use self::http_date::HttpDate;
pub use self::quality_item::{q, qitem, Quality, QualityItem}; pub use self::quality_item::{q, qitem, Quality, QualityItem};
pub use language_tags::LanguageTag; pub use language_tags::LanguageTag;

View File

@ -1,11 +1,14 @@
use std::{ use std::{
cmp, cmp,
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
fmt, str, fmt,
str::{self, FromStr},
}; };
use derive_more::{Display, Error}; use derive_more::{Display, Error};
use crate::error::ParseError;
const MAX_QUALITY: u16 = 1000; const MAX_QUALITY: u16 = 1000;
const MAX_FLOAT_QUALITY: f32 = 1.0; const MAX_FLOAT_QUALITY: f32 = 1.0;
@ -113,12 +116,12 @@ impl<T: fmt::Display> fmt::Display for QualityItem<T> {
} }
} }
impl<T: str::FromStr> str::FromStr for QualityItem<T> { impl<T: FromStr> FromStr for QualityItem<T> {
type Err = crate::error::ParseError; type Err = ParseError;
fn from_str(qitem_str: &str) -> Result<QualityItem<T>, crate::error::ParseError> { fn from_str(qitem_str: &str) -> Result<Self, Self::Err> {
if !qitem_str.is_ascii() { if !qitem_str.is_ascii() {
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
// Set defaults used if parsing fails. // Set defaults used if parsing fails.
@ -139,7 +142,7 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
if parts[0].len() < 2 { if parts[0].len() < 2 {
// Can't possibly be an attribute since an attribute needs at least a name followed // Can't possibly be an attribute since an attribute needs at least a name followed
// by an equals sign. And bare identifiers are forbidden. // by an equals sign. And bare identifiers are forbidden.
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
let start = &parts[0][0..2]; let start = &parts[0][0..2];
@ -148,25 +151,21 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
let q_val = &parts[0][2..]; let q_val = &parts[0][2..];
if q_val.len() > 5 { if q_val.len() > 5 {
// longer than 5 indicates an over-precise q-factor // longer than 5 indicates an over-precise q-factor
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
let q_value = q_val let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
.parse::<f32>()
.map_err(|_| crate::error::ParseError::Header)?;
if (0f32..=1f32).contains(&q_value) { if (0f32..=1f32).contains(&q_value) {
quality = q_value; quality = q_value;
raw_item = parts[1]; raw_item = parts[1];
} else { } else {
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
} }
} }
let item = raw_item let item = raw_item.parse::<T>().map_err(|_| ParseError::Header)?;
.parse::<T>()
.map_err(|_| crate::error::ParseError::Header)?;
// we already checked above that the quality is within range // we already checked above that the quality is within range
Ok(QualityItem::new(item, Quality::from_f32(quality))) Ok(QualityItem::new(item, Quality::from_f32(quality)))
@ -196,6 +195,7 @@ mod tests {
use super::*; use super::*;
// copy of encoding from actix-web headers // copy of encoding from actix-web headers
#[allow(clippy::enum_variant_names)] // allow Encoding prefix on EncodingExt
#[derive(Clone, PartialEq, Debug)] #[derive(Clone, PartialEq, Debug)]
pub enum Encoding { pub enum Encoding {
Chunked, Chunked,
@ -224,7 +224,7 @@ mod tests {
} }
} }
impl str::FromStr for Encoding { impl FromStr for Encoding {
type Err = crate::error::ParseError; type Err = crate::error::ParseError;
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> { fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
use Encoding::*; use Encoding::*;

View File

@ -14,7 +14,7 @@
//! [rustls]: https://crates.io/crates/rustls //! [rustls]: https://crates.io/crates/rustls
//! [trust-dns]: https://crates.io/crates/trust-dns //! [trust-dns]: https://crates.io/crates/trust-dns
#![deny(rust_2018_idioms, nonstandard_style)] #![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)]
#![allow( #![allow(
clippy::type_complexity, clippy::type_complexity,
clippy::too_many_arguments, clippy::too_many_arguments,
@ -29,7 +29,6 @@ extern crate log;
pub mod body; pub mod body;
mod builder; mod builder;
pub mod client;
mod config; mod config;
#[cfg(feature = "__compress")] #[cfg(feature = "__compress")]
@ -44,7 +43,6 @@ mod request;
mod response; mod response;
mod response_builder; mod response_builder;
mod service; mod service;
mod time_parser;
pub mod error; pub mod error;
pub mod h1; pub mod h1;
@ -104,14 +102,9 @@ type ConnectCallback<IO> = dyn Fn(&IO, &mut Extensions);
/// ///
/// # Implementation Details /// # Implementation Details
/// Uses Option to reduce necessary allocations when merging with request extensions. /// Uses Option to reduce necessary allocations when merging with request extensions.
#[derive(Default)]
pub(crate) struct OnConnectData(Option<Extensions>); pub(crate) struct OnConnectData(Option<Extensions>);
impl Default for OnConnectData {
fn default() -> Self {
Self(None)
}
}
impl OnConnectData { impl OnConnectData {
/// Construct by calling the on-connect callback with the underlying transport I/O. /// Construct by calling the on-connect callback with the underlying transport I/O.
pub(crate) fn from_io<T>( pub(crate) fn from_io<T>(

View File

@ -209,7 +209,7 @@ impl RequestHeadType {
impl AsRef<RequestHead> for RequestHeadType { impl AsRef<RequestHead> for RequestHeadType {
fn as_ref(&self) -> &RequestHead { fn as_ref(&self) -> &RequestHead {
match self { match self {
RequestHeadType::Owned(head) => &head, RequestHeadType::Owned(head) => head,
RequestHeadType::Rc(head, _) => head.as_ref(), RequestHeadType::Rc(head, _) => head.as_ref(),
} }
} }
@ -317,7 +317,7 @@ impl ResponseHead {
} }
#[inline] #[inline]
pub(crate) fn ctype(&self) -> Option<ConnectionType> { pub(crate) fn conn_type(&self) -> Option<ConnectionType> {
if self.flags.contains(Flags::CLOSE) { if self.flags.contains(Flags::CLOSE) {
Some(ConnectionType::Close) Some(ConnectionType::Close)
} else if self.flags.contains(Flags::KEEP_ALIVE) { } else if self.flags.contains(Flags::KEEP_ALIVE) {
@ -363,7 +363,7 @@ impl<T: Head> std::ops::Deref for Message<T> {
type Target = T; type Target = T;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.head.as_ref() self.head.as_ref()
} }
} }

View File

@ -15,7 +15,7 @@ use crate::{
HttpMessage, HttpMessage,
}; };
/// Request /// An HTTP request.
pub struct Request<P = PayloadStream> { pub struct Request<P = PayloadStream> {
pub(crate) payload: Payload<P>, pub(crate) payload: Payload<P>,
pub(crate) head: Message<RequestHead>, pub(crate) head: Message<RequestHead>,

View File

@ -28,7 +28,7 @@ impl Response<AnyBody> {
pub fn new(status: StatusCode) -> Self { pub fn new(status: StatusCode) -> Self {
Response { Response {
head: BoxedResponseHead::new(status), head: BoxedResponseHead::new(status),
body: AnyBody::Empty, body: AnyBody::empty(),
} }
} }

View File

@ -262,7 +262,7 @@ impl ResponseBuilder {
S: Stream<Item = Result<Bytes, E>> + 'static, S: Stream<Item = Result<Bytes, E>> + 'static,
E: Into<Box<dyn StdError>> + 'static, E: Into<Box<dyn StdError>> + 'static,
{ {
self.body(AnyBody::from_message(BodyStream::new(stream))) self.body(AnyBody::new_boxed(BodyStream::new(stream)))
} }
/// Generate response with an empty body. /// Generate response with an empty body.
@ -270,7 +270,7 @@ impl ResponseBuilder {
/// This `ResponseBuilder` will be left in a useless state. /// This `ResponseBuilder` will be left in a useless state.
#[inline] #[inline]
pub fn finish(&mut self) -> Response<AnyBody> { pub fn finish(&mut self) -> Response<AnyBody> {
self.body(AnyBody::Empty) self.body(AnyBody::empty())
} }
/// Create an owned `ResponseBuilder`, leaving the original in a useless state. /// Create an owned `ResponseBuilder`, leaving the original in a useless state.
@ -357,7 +357,7 @@ impl fmt::Debug for ResponseBuilder {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::body::Body; use crate::body::AnyBody;
use crate::http::header::{HeaderName, HeaderValue, CONTENT_TYPE}; use crate::http::header::{HeaderName, HeaderValue, CONTENT_TYPE};
#[test] #[test]
@ -390,13 +390,13 @@ mod tests {
fn test_content_type() { fn test_content_type() {
let resp = Response::build(StatusCode::OK) let resp = Response::build(StatusCode::OK)
.content_type("text/plain") .content_type("text/plain")
.body(Body::Empty); .body(AnyBody::empty());
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain") assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain")
} }
#[test] #[test]
fn test_into_builder() { fn test_into_builder() {
let mut resp: Response<Body> = "test".into(); let mut resp: Response<AnyBody> = "test".into();
assert_eq!(resp.status(), StatusCode::OK); assert_eq!(resp.status(), StatusCode::OK);
resp.headers_mut().insert( resp.headers_mut().insert(

View File

@ -195,9 +195,11 @@ where
#[cfg(feature = "openssl")] #[cfg(feature = "openssl")]
mod openssl { mod openssl {
use actix_service::ServiceFactoryExt; use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::openssl::{Acceptor, SslAcceptor, SslError, TlsStream}; use actix_tls::accept::{
use actix_tls::accept::TlsError; openssl::{Acceptor, SslAcceptor, SslError, TlsStream},
TlsError,
};
use super::*; use super::*;
@ -227,7 +229,7 @@ mod openssl {
U::Error: fmt::Display + Into<Response<AnyBody>>, U::Error: fmt::Display + Into<Response<AnyBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create openssl based service /// Create OpenSSL based service.
pub fn openssl( pub fn openssl(
self, self,
acceptor: SslAcceptor, acceptor: SslAcceptor,
@ -239,9 +241,11 @@ mod openssl {
InitError = (), InitError = (),
> { > {
Acceptor::new(acceptor) Acceptor::new(acceptor)
.map_err(TlsError::Tls) .map_init_err(|_| {
.map_init_err(|_| panic!()) unreachable!("TLS acceptor service factory does not error on init")
.and_then(|io: TlsStream<TcpStream>| async { })
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let proto = if let Some(protos) = io.ssl().selected_alpn_protocol() { let proto = if let Some(protos) = io.ssl().selected_alpn_protocol() {
if protos.windows(2).any(|window| window == b"h2") { if protos.windows(2).any(|window| window == b"h2") {
Protocol::Http2 Protocol::Http2
@ -251,8 +255,9 @@ mod openssl {
} else { } else {
Protocol::Http1 Protocol::Http1
}; };
let peer_addr = io.get_ref().peer_addr().ok(); let peer_addr = io.get_ref().peer_addr().ok();
Ok((io, proto, peer_addr)) (io, proto, peer_addr)
}) })
.and_then(self.map_err(TlsError::Service)) .and_then(self.map_err(TlsError::Service))
} }
@ -263,11 +268,13 @@ mod openssl {
mod rustls { mod rustls {
use std::io; use std::io;
use actix_tls::accept::rustls::{Acceptor, ServerConfig, Session, TlsStream}; use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::TlsError; use actix_tls::accept::{
rustls::{Acceptor, ServerConfig, TlsStream},
TlsError,
};
use super::*; use super::*;
use actix_service::ServiceFactoryExt;
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U> impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
where where
@ -295,7 +302,7 @@ mod rustls {
U::Error: fmt::Display + Into<Response<AnyBody>>, U::Error: fmt::Display + Into<Response<AnyBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create rustls based service /// Create Rustls based service.
pub fn rustls( pub fn rustls(
self, self,
mut config: ServerConfig, mut config: ServerConfig,
@ -308,14 +315,15 @@ mod rustls {
> { > {
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()]; let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
protos.extend_from_slice(&config.alpn_protocols); protos.extend_from_slice(&config.alpn_protocols);
config.set_protocols(&protos); config.alpn_protocols = protos;
Acceptor::new(config) Acceptor::new(config)
.map_err(TlsError::Tls) .map_init_err(|_| {
.map_init_err(|_| panic!()) unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.and_then(|io: TlsStream<TcpStream>| async { .and_then(|io: TlsStream<TcpStream>| async {
let proto = if let Some(protos) = io.get_ref().1.get_alpn_protocol() let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
{
if protos.windows(2).any(|window| window == b"h2") { if protos.windows(2).any(|window| window == b"h2") {
Protocol::Http2 Protocol::Http2
} else { } else {

View File

@ -1,72 +0,0 @@
use time::{Date, OffsetDateTime, PrimitiveDateTime};
/// Attempt to parse a `time` string as one of either RFC 1123, RFC 850, or asctime.
pub(crate) fn parse_http_date(time: &str) -> Option<PrimitiveDateTime> {
try_parse_rfc_1123(time)
.or_else(|| try_parse_rfc_850(time))
.or_else(|| try_parse_asctime(time))
}
/// Attempt to parse a `time` string as a RFC 1123 formatted date time string.
///
/// Eg: `Fri, 12 Feb 2021 00:14:29 GMT`
fn try_parse_rfc_1123(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a, %d %b %Y %H:%M:%S").ok()
}
/// Attempt to parse a `time` string as a RFC 850 formatted date time string.
///
/// Eg: `Wednesday, 11-Jan-21 13:37:41 UTC`
fn try_parse_rfc_850(time: &str) -> Option<PrimitiveDateTime> {
let dt = PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S").ok()?;
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
// we consider the year as part of this century if it's within the next 50 years,
// otherwise we consider as part of the previous century.
let now = OffsetDateTime::now_utc();
let century_start_year = (now.year() / 100) * 100;
let mut expanded_year = century_start_year + dt.year();
if expanded_year > now.year() + 50 {
expanded_year -= 100;
}
let date = Date::try_from_ymd(expanded_year, dt.month(), dt.day()).ok()?;
Some(PrimitiveDateTime::new(date, dt.time()))
}
/// Attempt to parse a `time` string using ANSI C's `asctime` format.
///
/// Eg: `Wed Feb 13 15:46:11 2013`
fn try_parse_asctime(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a %b %_d %H:%M:%S %Y").ok()
}
#[cfg(test)]
mod tests {
use time::{date, time};
use super::*;
#[test]
fn test_rfc_850_year_shift() {
let date = try_parse_rfc_850("Friday, 19-Nov-82 16:14:55 EST").unwrap();
assert_eq!(date, date!(1982 - 11 - 19).with_time(time!(16:14:55)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-62 13:37:41 EST").unwrap();
assert_eq!(date, date!(2062 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-21 13:37:41 EST").unwrap();
assert_eq!(date, date!(2021 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-23 13:37:41 EST").unwrap();
assert_eq!(date, date!(2023 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-99 13:37:41 EST").unwrap();
assert_eq!(date, date!(1999 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-00 13:37:41 EST").unwrap();
assert_eq!(date, date!(2000 - 01 - 11).with_time(time!(13:37:41)));
}
}

View File

@ -25,8 +25,8 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
// //
// un aligned prefix and suffix would be mask/unmask per byte. // un aligned prefix and suffix would be mask/unmask per byte.
// proper aligned middle slice goes into fast path and operates on 4-byte blocks. // proper aligned middle slice goes into fast path and operates on 4-byte blocks.
let (mut prefix, words, mut suffix) = unsafe { buf.align_to_mut::<u32>() }; let (prefix, words, suffix) = unsafe { buf.align_to_mut::<u32>() };
apply_mask_fallback(&mut prefix, mask); apply_mask_fallback(prefix, mask);
let head = prefix.len() & 3; let head = prefix.len() & 3;
let mask_u32 = if head > 0 { let mask_u32 = if head > 0 {
if cfg!(target_endian = "big") { if cfg!(target_endian = "big") {
@ -40,7 +40,7 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
for word in words.iter_mut() { for word in words.iter_mut() {
*word ^= mask_u32; *word ^= mask_u32;
} }
apply_mask_fallback(&mut suffix, mask_u32.to_ne_bytes()); apply_mask_fallback(suffix, mask_u32.to_ne_bytes());
} }
#[cfg(test)] #[cfg(test)]

View File

@ -210,7 +210,6 @@ pub fn handshake_response(req: &RequestHead) -> ResponseBuilder {
Response::build(StatusCode::SWITCHING_PROTOCOLS) Response::build(StatusCode::SWITCHING_PROTOCOLS)
.upgrade("websocket") .upgrade("websocket")
.insert_header((header::TRANSFER_ENCODING, "chunked"))
.insert_header(( .insert_header((
header::SEC_WEBSOCKET_ACCEPT, header::SEC_WEBSOCKET_ACCEPT,
// key is known to be header value safe ascii // key is known to be header value safe ascii

View File

@ -0,0 +1,77 @@
use std::io;
use actix_http::{error::Error, HttpService, Response};
use actix_server::Server;
#[actix_rt::test]
async fn h2_ping_pong() -> io::Result<()> {
let (tx, rx) = std::sync::mpsc::sync_channel(1);
let lst = std::net::TcpListener::bind("127.0.0.1:0")?;
let addr = lst.local_addr().unwrap();
let join = std::thread::spawn(move || {
actix_rt::System::new().block_on(async move {
let srv = Server::build()
.disable_signals()
.workers(1)
.listen("h2_ping_pong", lst, || {
HttpService::build()
.keep_alive(3)
.h2(|_| async { Ok::<_, Error>(Response::ok()) })
.tcp()
})?
.run();
tx.send(srv.handle()).unwrap();
srv.await
})
});
let handle = rx.recv().unwrap();
let (sync_tx, rx) = std::sync::mpsc::sync_channel(1);
// use a separate thread for h2 client so it can be blocked.
std::thread::spawn(move || {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(async move {
let stream = tokio::net::TcpStream::connect(addr).await.unwrap();
let (mut tx, conn) = h2::client::handshake(stream).await.unwrap();
tokio::spawn(async move { conn.await.unwrap() });
let (res, _) = tx.send_request(::http::Request::new(()), true).unwrap();
let res = res.await.unwrap();
assert_eq!(res.status().as_u16(), 200);
sync_tx.send(()).unwrap();
// intentionally block the client thread so it can not answer ping pong.
std::thread::sleep(std::time::Duration::from_secs(1000));
})
});
rx.recv().unwrap();
let now = std::time::Instant::now();
// stop server gracefully. this step would take up to 30 seconds.
handle.stop(true).await;
// join server thread. only when connection are all gone this step would finish.
join.join().unwrap()?;
// check the time used for join server thread so it's known that the server shutdown
// is from keep alive and not server graceful shutdown timeout.
assert!(now.elapsed() < std::time::Duration::from_secs(30));
Ok(())
}

View File

@ -5,10 +5,10 @@ extern crate tls_openssl as openssl;
use std::{convert::Infallible, io}; use std::{convert::Infallible, io};
use actix_http::{ use actix_http::{
body::{AnyBody, Body, SizedStream}, body::{AnyBody, SizedStream},
error::PayloadError, error::PayloadError,
http::{ http::{
header::{self, HeaderName, HeaderValue}, header::{self, HeaderValue},
Method, StatusCode, Version, Method, StatusCode, Version,
}, },
Error, HttpMessage, HttpService, Request, Response, Error, HttpMessage, HttpService, Request, Response,
@ -143,38 +143,25 @@ async fn test_h2_content_length() {
}) })
.await; .await;
let header = HeaderName::from_static("content-length"); static VALUE: HeaderValue = HeaderValue::from_static("0");
let value = HeaderValue::from_static("0");
{ {
for &i in &[0] { let req = srv.request(Method::HEAD, srv.surl("/0")).send();
let req = srv req.await.expect_err("should timeout on recv 1xx frame");
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
.send();
let _response = req.await.expect_err("should timeout on recv 1xx frame");
// assert_eq!(response.headers().get(&header), None);
let req = srv let req = srv.request(Method::GET, srv.surl("/0")).send();
.request(Method::GET, srv.surl(&format!("/{}", i))) req.await.expect_err("should timeout on recv 1xx frame");
.send();
let _response = req.await.expect_err("should timeout on recv 1xx frame");
// assert_eq!(response.headers().get(&header), None);
}
for &i in &[1] { let req = srv.request(Method::GET, srv.surl("/1")).send();
let req = srv let response = req.await.unwrap();
.request(Method::GET, srv.surl(&format!("/{}", i))) assert!(response.headers().get("content-length").is_none());
.send();
let response = req.await.unwrap();
assert_eq!(response.headers().get(&header), None);
}
for &i in &[2, 3] { for &i in &[2, 3] {
let req = srv let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i))) .request(Method::GET, srv.surl(&format!("/{}", i)))
.send(); .send();
let response = req.await.unwrap(); let response = req.await.unwrap();
assert_eq!(response.headers().get(&header), Some(&value)); assert_eq!(response.headers().get("content-length"), Some(&VALUE));
} }
} }
} }
@ -422,7 +409,7 @@ impl From<BadRequest> for Response<AnyBody> {
async fn test_h2_service_error() { async fn test_h2_service_error() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| err::<Response<Body>, _>(BadRequest)) .h2(|_| err::<Response<AnyBody>, _>(BadRequest))
.openssl(tls_config()) .openssl(tls_config())
.map_err(|_| ()) .map_err(|_| ())
}) })

View File

@ -3,14 +3,14 @@
extern crate tls_rustls as rustls; extern crate tls_rustls as rustls;
use std::{ use std::{
convert::Infallible, convert::{Infallible, TryFrom},
io::{self, BufReader, Write}, io::{self, BufReader, Write},
net::{SocketAddr, TcpStream as StdTcpStream}, net::{SocketAddr, TcpStream as StdTcpStream},
sync::Arc, sync::Arc,
}; };
use actix_http::{ use actix_http::{
body::{AnyBody, Body, SizedStream}, body::{AnyBody, SizedStream},
error::PayloadError, error::PayloadError,
http::{ http::{
header::{self, HeaderName, HeaderValue}, header::{self, HeaderName, HeaderValue},
@ -20,16 +20,14 @@ use actix_http::{
}; };
use actix_http_test::test_server; use actix_http_test::test_server;
use actix_service::{fn_factory_with_config, fn_service}; use actix_service::{fn_factory_with_config, fn_service};
use actix_tls::connect::tls::rustls::webpki_roots_cert_store;
use actix_utils::future::{err, ok}; use actix_utils::future::{err, ok};
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use derive_more::{Display, Error}; use derive_more::{Display, Error};
use futures_core::Stream; use futures_core::Stream;
use futures_util::stream::{once, StreamExt as _}; use futures_util::stream::{once, StreamExt as _};
use rustls::{ use rustls::{Certificate, PrivateKey, ServerConfig as RustlsServerConfig, ServerName};
internal::pemfile::{certs, pkcs8_private_keys}, use rustls_pemfile::{certs, pkcs8_private_keys};
NoClientAuth, ServerConfig as RustlsServerConfig, Session,
};
use webpki::DNSNameRef;
async fn load_body<S>(mut stream: S) -> Result<BytesMut, PayloadError> async fn load_body<S>(mut stream: S) -> Result<BytesMut, PayloadError>
where where
@ -47,13 +45,24 @@ fn tls_config() -> RustlsServerConfig {
let cert_file = cert.serialize_pem().unwrap(); let cert_file = cert.serialize_pem().unwrap();
let key_file = cert.serialize_private_key_pem(); let key_file = cert.serialize_private_key_pem();
let mut config = RustlsServerConfig::new(NoClientAuth::new());
let cert_file = &mut BufReader::new(cert_file.as_bytes()); let cert_file = &mut BufReader::new(cert_file.as_bytes());
let key_file = &mut BufReader::new(key_file.as_bytes()); let key_file = &mut BufReader::new(key_file.as_bytes());
let cert_chain = certs(cert_file).unwrap(); let cert_chain = certs(cert_file)
.unwrap()
.into_iter()
.map(Certificate)
.collect();
let mut keys = pkcs8_private_keys(key_file).unwrap(); let mut keys = pkcs8_private_keys(key_file).unwrap();
config.set_single_cert(cert_chain, keys.remove(0)).unwrap();
let mut config = RustlsServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
.unwrap();
config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec());
config.alpn_protocols.push(H2_ALPN_PROTOCOL.to_vec());
config config
} }
@ -62,19 +71,28 @@ pub fn get_negotiated_alpn_protocol(
addr: SocketAddr, addr: SocketAddr,
client_alpn_protocol: &[u8], client_alpn_protocol: &[u8],
) -> Option<Vec<u8>> { ) -> Option<Vec<u8>> {
let mut config = rustls::ClientConfig::new(); let mut config = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(webpki_roots_cert_store())
.with_no_client_auth();
config.alpn_protocols.push(client_alpn_protocol.to_vec()); config.alpn_protocols.push(client_alpn_protocol.to_vec());
let mut sess = rustls::ClientSession::new(
&Arc::new(config), let mut sess = rustls::ClientConnection::new(
DNSNameRef::try_from_ascii_str("localhost").unwrap(), Arc::new(config),
); ServerName::try_from("localhost").unwrap(),
)
.unwrap();
let mut sock = StdTcpStream::connect(addr).unwrap(); let mut sock = StdTcpStream::connect(addr).unwrap();
let mut stream = rustls::Stream::new(&mut sess, &mut sock); let mut stream = rustls::Stream::new(&mut sess, &mut sock);
// The handshake will fails because the client will not be able to verify the server // The handshake will fails because the client will not be able to verify the server
// certificate, but it doesn't matter here as we are just interested in the negotiated ALPN // certificate, but it doesn't matter here as we are just interested in the negotiated ALPN
// protocol // protocol
let _ = stream.flush(); let _ = stream.flush();
sess.get_alpn_protocol().map(|proto| proto.to_vec())
sess.alpn_protocol().map(|proto| proto.to_vec())
} }
#[actix_rt::test] #[actix_rt::test]
@ -459,7 +477,7 @@ impl From<BadRequest> for Response<AnyBody> {
async fn test_h2_service_error() { async fn test_h2_service_error() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| err::<Response<Body>, _>(BadRequest)) .h2(|_| err::<Response<AnyBody>, _>(BadRequest))
.rustls(tls_config()) .rustls(tls_config())
}) })
.await; .await;
@ -476,7 +494,7 @@ async fn test_h2_service_error() {
async fn test_h1_service_error() { async fn test_h1_service_error() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h1(|_| err::<Response<Body>, _>(BadRequest)) .h1(|_| err::<Response<AnyBody>, _>(BadRequest))
.rustls(tls_config()) .rustls(tls_config())
}) })
.await; .await;

View File

@ -6,7 +6,7 @@ use std::{
}; };
use actix_http::{ use actix_http::{
body::{AnyBody, Body, SizedStream}, body::{AnyBody, SizedStream},
header, http, Error, HttpMessage, HttpService, KeepAlive, Request, Response, header, http, Error, HttpMessage, HttpService, KeepAlive, Request, Response,
StatusCode, StatusCode,
}; };
@ -24,7 +24,7 @@ use regex::Regex;
#[actix_rt::test] #[actix_rt::test]
async fn test_h1() { async fn test_h1() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.keep_alive(KeepAlive::Disabled) .keep_alive(KeepAlive::Disabled)
.client_timeout(1000) .client_timeout(1000)
@ -39,11 +39,13 @@ async fn test_h1() {
let response = srv.get("/").send().await.unwrap(); let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success()); assert!(response.status().is_success());
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_h1_2() { async fn test_h1_2() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.keep_alive(KeepAlive::Disabled) .keep_alive(KeepAlive::Disabled)
.client_timeout(1000) .client_timeout(1000)
@ -59,6 +61,8 @@ async fn test_h1_2() {
let response = srv.get("/").send().await.unwrap(); let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success()); assert!(response.status().is_success());
srv.stop().await;
} }
#[derive(Debug, Display, Error)] #[derive(Debug, Display, Error)]
@ -73,7 +77,7 @@ impl From<ExpectFailed> for Response<AnyBody> {
#[actix_rt::test] #[actix_rt::test]
async fn test_expect_continue() { async fn test_expect_continue() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.expect(fn_service(|req: Request| { .expect(fn_service(|req: Request| {
if req.head().uri.query() == Some("yes=") { if req.head().uri.query() == Some("yes=") {
@ -98,11 +102,13 @@ async fn test_expect_continue() {
let mut data = String::new(); let mut data = String::new();
let _ = stream.read_to_string(&mut data); let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n")); assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n"));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_expect_continue_h1() { async fn test_expect_continue_h1() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.expect(fn_service(|req: Request| { .expect(fn_service(|req: Request| {
sleep(Duration::from_millis(20)).then(move |_| { sleep(Duration::from_millis(20)).then(move |_| {
@ -129,6 +135,8 @@ async fn test_expect_continue_h1() {
let mut data = String::new(); let mut data = String::new();
let _ = stream.read_to_string(&mut data); let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n")); assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n"));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -136,7 +144,7 @@ async fn test_chunked_payload() {
let chunk_sizes = vec![32768, 32, 32768]; let chunk_sizes = vec![32768, 32, 32768];
let total_size: usize = chunk_sizes.iter().sum(); let total_size: usize = chunk_sizes.iter().sum();
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(fn_service(|mut request: Request| { .h1(fn_service(|mut request: Request| {
request request
@ -183,15 +191,18 @@ async fn test_chunked_payload() {
Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(), Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(),
None => panic!("Failed to find size in HTTP Response: {}", data), None => panic!("Failed to find size in HTTP Response: {}", data),
}; };
size size
}; };
assert_eq!(returned_size, total_size); assert_eq!(returned_size, total_size);
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_slow_request() { async fn test_slow_request() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.client_timeout(100) .client_timeout(100)
.finish(|_| ok::<_, Infallible>(Response::ok())) .finish(|_| ok::<_, Infallible>(Response::ok()))
@ -204,11 +215,13 @@ async fn test_slow_request() {
let mut data = String::new(); let mut data = String::new();
let _ = stream.read_to_string(&mut data); let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 408 Request Timeout")); assert!(data.starts_with("HTTP/1.1 408 Request Timeout"));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_http1_malformed_request() { async fn test_http1_malformed_request() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok())) .h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp() .tcp()
@ -220,11 +233,13 @@ async fn test_http1_malformed_request() {
let mut data = String::new(); let mut data = String::new();
let _ = stream.read_to_string(&mut data); let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 400 Bad Request")); assert!(data.starts_with("HTTP/1.1 400 Bad Request"));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_http1_keepalive() { async fn test_http1_keepalive() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok())) .h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp() .tcp()
@ -241,11 +256,13 @@ async fn test_http1_keepalive() {
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let _ = stream.read(&mut data); let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n"); assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_http1_keepalive_timeout() { async fn test_http1_keepalive_timeout() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.keep_alive(1) .keep_alive(1)
.h1(|_| ok::<_, Infallible>(Response::ok())) .h1(|_| ok::<_, Infallible>(Response::ok()))
@ -263,11 +280,13 @@ async fn test_http1_keepalive_timeout() {
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap(); let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0); assert_eq!(res, 0);
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_http1_keepalive_close() { async fn test_http1_keepalive_close() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok())) .h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp() .tcp()
@ -284,11 +303,13 @@ async fn test_http1_keepalive_close() {
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap(); let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0); assert_eq!(res, 0);
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_http10_keepalive_default_close() { async fn test_http10_keepalive_default_close() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok())) .h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp() .tcp()
@ -304,11 +325,13 @@ async fn test_http10_keepalive_default_close() {
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap(); let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0); assert_eq!(res, 0);
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_http10_keepalive() { async fn test_http10_keepalive() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok())) .h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp() .tcp()
@ -331,11 +354,13 @@ async fn test_http10_keepalive() {
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap(); let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0); assert_eq!(res, 0);
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_http1_keepalive_disabled() { async fn test_http1_keepalive_disabled() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.keep_alive(KeepAlive::Disabled) .keep_alive(KeepAlive::Disabled)
.h1(|_| ok::<_, Infallible>(Response::ok())) .h1(|_| ok::<_, Infallible>(Response::ok()))
@ -352,6 +377,8 @@ async fn test_http1_keepalive_disabled() {
let mut data = vec![0; 1024]; let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap(); let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0); assert_eq!(res, 0);
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -361,7 +388,7 @@ async fn test_content_length() {
StatusCode, StatusCode,
}; };
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|req: Request| { .h1(|req: Request| {
let indx: usize = req.uri().path()[1..].parse().unwrap(); let indx: usize = req.uri().path()[1..].parse().unwrap();
@ -399,6 +426,8 @@ async fn test_content_length() {
assert_eq!(response.headers().get(&header), Some(&value)); assert_eq!(response.headers().get(&header), Some(&value));
} }
} }
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -438,6 +467,8 @@ async fn test_h1_headers() {
// read response // read response
let bytes = srv.load_body(response).await.unwrap(); let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from(data2)); assert_eq!(bytes, Bytes::from(data2));
srv.stop().await;
} }
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \ const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
@ -477,6 +508,8 @@ async fn test_h1_body() {
// read response // read response
let bytes = srv.load_body(response).await.unwrap(); let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref())); assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -502,6 +535,8 @@ async fn test_h1_head_empty() {
// read response // read response
let bytes = srv.load_body(response).await.unwrap(); let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty()); assert!(bytes.is_empty());
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -527,11 +562,13 @@ async fn test_h1_head_binary() {
// read response // read response
let bytes = srv.load_body(response).await.unwrap(); let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty()); assert!(bytes.is_empty());
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_h1_head_binary2() { async fn test_h1_head_binary2() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .h1(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.tcp() .tcp()
@ -548,6 +585,8 @@ async fn test_h1_head_binary2() {
.unwrap(); .unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap()); assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
} }
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -570,6 +609,8 @@ async fn test_h1_body_length() {
// read response // read response
let bytes = srv.load_body(response).await.unwrap(); let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref())); assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -605,6 +646,8 @@ async fn test_h1_body_chunked_explicit() {
// decode // decode
assert_eq!(bytes, Bytes::from_static(STR.as_ref())); assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -634,6 +677,8 @@ async fn test_h1_body_chunked_implicit() {
// read response // read response
let bytes = srv.load_body(response).await.unwrap(); let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref())); assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
@ -661,6 +706,8 @@ async fn test_h1_response_http_error_handling() {
bytes, bytes,
Bytes::from_static(b"error processing HTTP: failed to parse header value") Bytes::from_static(b"error processing HTTP: failed to parse header value")
); );
srv.stop().await;
} }
#[derive(Debug, Display, Error)] #[derive(Debug, Display, Error)]
@ -677,7 +724,7 @@ impl From<BadRequest> for Response<AnyBody> {
async fn test_h1_service_error() { async fn test_h1_service_error() {
let mut srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.h1(|_| err::<Response<Body>, _>(BadRequest)) .h1(|_| err::<Response<AnyBody>, _>(BadRequest))
.tcp() .tcp()
}) })
.await; .await;
@ -688,11 +735,13 @@ async fn test_h1_service_error() {
// read response // read response
let bytes = srv.load_body(response).await.unwrap(); let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(b"error")); assert_eq!(bytes, Bytes::from_static(b"error"));
srv.stop().await;
} }
#[actix_rt::test] #[actix_rt::test]
async fn test_h1_on_connect() { async fn test_h1_on_connect() {
let srv = test_server(|| { let mut srv = test_server(|| {
HttpService::build() HttpService::build()
.on_connect_ext(|_, data| { .on_connect_ext(|_, data| {
data.insert(20isize); data.insert(20isize);
@ -707,4 +756,93 @@ async fn test_h1_on_connect() {
let response = srv.get("/").send().await.unwrap(); let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success()); assert!(response.status().is_success());
srv.stop().await;
}
/// Tests compliance with 304 Not Modified spec in RFC 7232 §4.1.
/// https://datatracker.ietf.org/doc/html/rfc7232#section-4.1
#[actix_rt::test]
async fn test_not_modified_spec_h1() {
// TODO: this test needing a few seconds to complete reveals some weirdness with either the
// dispatcher or the client, though similar hangs occur on other tests in this file, only
// succeeding, it seems, because of the keepalive timer
static CL: header::HeaderName = header::CONTENT_LENGTH;
let mut srv = test_server(|| {
HttpService::build()
.h1(|req: Request| {
let res: Response<AnyBody> = match req.path() {
// with no content-length
"/none" => {
Response::with_body(StatusCode::NOT_MODIFIED, AnyBody::None)
}
// with no content-length
"/body" => Response::with_body(
StatusCode::NOT_MODIFIED,
AnyBody::from("1234"),
),
// with manual content-length header and specific None body
"/cl-none" => {
let mut res =
Response::with_body(StatusCode::NOT_MODIFIED, AnyBody::None);
res.headers_mut()
.insert(CL.clone(), header::HeaderValue::from_static("24"));
res
}
// with manual content-length header and ignore-able body
"/cl-body" => {
let mut res = Response::with_body(
StatusCode::NOT_MODIFIED,
AnyBody::from("1234"),
);
res.headers_mut()
.insert(CL.clone(), header::HeaderValue::from_static("4"));
res
}
_ => panic!("unknown route"),
};
ok::<_, Infallible>(res)
})
.tcp()
})
.await;
let res = srv.get("/none").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(res.headers().get(&CL), None);
assert!(srv.load_body(res).await.unwrap().is_empty());
let res = srv.get("/body").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(res.headers().get(&CL), None);
assert!(srv.load_body(res).await.unwrap().is_empty());
let res = srv.get("/cl-none").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(
res.headers().get(&CL),
Some(&header::HeaderValue::from_static("24")),
);
assert!(srv.load_body(res).await.unwrap().is_empty());
let res = srv.get("/cl-body").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(
res.headers().get(&CL),
Some(&header::HeaderValue::from_static("4")),
);
// server does not prevent payload from being sent but clients may choose not to read it
// TODO: this is probably a bug, especially since CL header can differ in length from the body
assert!(!srv.load_body(res).await.unwrap().is_empty());
// TODO: add stream response tests
srv.stop().await;
} }

View File

@ -3,6 +3,25 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.4.0-beta.8 - 2021-11-22
* Ensure a correct Content-Disposition header is included in every part of a multipart message. [#2451]
* Added `MultipartError::NoContentDisposition` variant. [#2451]
* Since Content-Disposition is now ensured, `Field::content_disposition` is now infallible. [#2451]
* Added `Field::name` method for getting the field name. [#2451]
* `MultipartError` now marks variants with inner errors as the source. [#2451]
* `MultipartError` is now marked as non-exhaustive. [#2451]
[#2451]: https://github.com/actix/actix-web/pull/2451
## 0.4.0-beta.7 - 2021-10-20
* Minimum supported Rust version (MSRV) is now 1.52.
## 0.4.0-beta.6 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 0.4.0-beta.5 - 2021-06-17 ## 0.4.0-beta.5 - 2021-06-17
* No notable changes. * No notable changes.

View File

@ -1,13 +1,11 @@
[package] [package]
name = "actix-multipart" name = "actix-multipart"
version = "0.4.0-beta.5" version = "0.4.0-beta.8"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Multipart form support for Actix Web" description = "Multipart form support for Actix Web"
readme = "README.md"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs" homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web.git" repository = "https://github.com/actix/actix-web.git"
documentation = "https://docs.rs/actix-multipart"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2018" edition = "2018"
@ -16,7 +14,7 @@ name = "actix_multipart"
path = "src/lib.rs" path = "src/lib.rs"
[dependencies] [dependencies]
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.11", default-features = false }
actix-utils = "3.0.0" actix-utils = "3.0.0"
bytes = "1" bytes = "1"
@ -31,6 +29,6 @@ twoway = "0.2"
[dev-dependencies] [dev-dependencies]
actix-rt = "2.2" actix-rt = "2.2"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.13"
tokio = { version = "1", features = ["sync"] } tokio = { version = "1", features = ["sync"] }
tokio-stream = "0.1" tokio-stream = "0.1"

View File

@ -3,15 +3,15 @@
> Multipart form support for Actix Web. > Multipart form support for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart) [![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.5)](https://docs.rs/actix-multipart/0.4.0-beta.5) [![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.8)](https://docs.rs/actix-multipart/0.4.0-beta.8)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.52+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.5/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.5) [![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.8/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.8)
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart) [![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-multipart) - [API Documentation](https://docs.rs/actix-multipart)
- Minimum Supported Rust Version (MSRV): 1.46.0 - Minimum Supported Rust Version (MSRV): 1.52

View File

@ -2,39 +2,52 @@
use actix_web::error::{ParseError, PayloadError}; use actix_web::error::{ParseError, PayloadError};
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use actix_web::ResponseError; use actix_web::ResponseError;
use derive_more::{Display, From}; use derive_more::{Display, Error, From};
/// A set of errors that can occur during parsing multipart streams /// A set of errors that can occur during parsing multipart streams
#[derive(Debug, Display, From)] #[non_exhaustive]
#[derive(Debug, Display, From, Error)]
pub enum MultipartError { pub enum MultipartError {
/// Content-Disposition header is not found or is not equal to "form-data".
///
/// According to [RFC 7578](https://tools.ietf.org/html/rfc7578#section-4.2) a
/// Content-Disposition header must always be present and equal to "form-data".
#[display(fmt = "No Content-Disposition `form-data` header")]
NoContentDisposition,
/// Content-Type header is not found /// Content-Type header is not found
#[display(fmt = "No Content-type header found")] #[display(fmt = "No Content-Type header found")]
NoContentType, NoContentType,
/// Can not parse Content-Type header /// Can not parse Content-Type header
#[display(fmt = "Can not parse Content-Type header")] #[display(fmt = "Can not parse Content-Type header")]
ParseContentType, ParseContentType,
/// Multipart boundary is not found /// Multipart boundary is not found
#[display(fmt = "Multipart boundary is not found")] #[display(fmt = "Multipart boundary is not found")]
Boundary, Boundary,
/// Nested multipart is not supported /// Nested multipart is not supported
#[display(fmt = "Nested multipart is not supported")] #[display(fmt = "Nested multipart is not supported")]
Nested, Nested,
/// Multipart stream is incomplete /// Multipart stream is incomplete
#[display(fmt = "Multipart stream is incomplete")] #[display(fmt = "Multipart stream is incomplete")]
Incomplete, Incomplete,
/// Error during field parsing /// Error during field parsing
#[display(fmt = "{}", _0)] #[display(fmt = "{}", _0)]
Parse(ParseError), Parse(ParseError),
/// Payload error /// Payload error
#[display(fmt = "{}", _0)] #[display(fmt = "{}", _0)]
Payload(PayloadError), Payload(PayloadError),
/// Not consumed /// Not consumed
#[display(fmt = "Multipart stream is not consumed")] #[display(fmt = "Multipart stream is not consumed")]
NotConsumed, NotConsumed,
} }
impl std::error::Error for MultipartError {}
/// Return `BadRequest` for `MultipartError` /// Return `BadRequest` for `MultipartError`
impl ResponseError for MultipartError { impl ResponseError for MultipartError {
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {

View File

@ -33,7 +33,6 @@ use crate::server::Multipart;
impl FromRequest for Multipart { impl FromRequest for Multipart {
type Error = Error; type Error = Error;
type Future = Ready<Result<Multipart, Error>>; type Future = Ready<Result<Multipart, Error>>;
type Config = ();
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {

View File

@ -1,15 +1,20 @@
//! Multipart response payload support. //! Multipart response payload support.
use std::cell::{Cell, RefCell, RefMut}; use std::{
use std::convert::TryFrom; cell::{Cell, RefCell, RefMut},
use std::marker::PhantomData; cmp,
use std::pin::Pin; convert::TryFrom,
use std::rc::Rc; fmt,
use std::task::{Context, Poll}; marker::PhantomData,
use std::{cmp, fmt}; pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_web::error::{ParseError, PayloadError}; use actix_web::{
use actix_web::http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue}; error::{ParseError, PayloadError},
http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue},
};
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use futures_core::stream::{LocalBoxStream, Stream}; use futures_core::stream::{LocalBoxStream, Stream};
use futures_util::stream::StreamExt as _; use futures_util::stream::StreamExt as _;
@ -40,10 +45,13 @@ enum InnerMultipartItem {
enum InnerState { enum InnerState {
/// Stream eof /// Stream eof
Eof, Eof,
/// Skip data until first boundary /// Skip data until first boundary
FirstBoundary, FirstBoundary,
/// Reading boundary /// Reading boundary
Boundary, Boundary,
/// Reading Headers, /// Reading Headers,
Headers, Headers,
} }
@ -332,31 +340,55 @@ impl InnerMultipart {
return Poll::Pending; return Poll::Pending;
}; };
// content type // According to [RFC 7578](https://tools.ietf.org/html/rfc7578#section-4.2) a
let mut mt = mime::APPLICATION_OCTET_STREAM; // Content-Disposition header must always be present and set to "form-data".
if let Some(content_type) = headers.get(&header::CONTENT_TYPE) {
if let Ok(content_type) = content_type.to_str() { let content_disposition = headers
if let Ok(ct) = content_type.parse::<mime::Mime>() { .get(&header::CONTENT_DISPOSITION)
mt = ct; .and_then(|cd| ContentDisposition::from_raw(cd).ok())
} .filter(|content_disposition| {
} let is_form_data =
} content_disposition.disposition == header::DispositionType::FormData;
let has_field_name = content_disposition
.parameters
.iter()
.any(|param| matches!(param, header::DispositionParam::Name(_)));
is_form_data && has_field_name
});
let cd = if let Some(content_disposition) = content_disposition {
content_disposition
} else {
return Poll::Ready(Some(Err(MultipartError::NoContentDisposition)));
};
let ct: mime::Mime = headers
.get(&header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
.and_then(|ct| ct.parse().ok())
.unwrap_or(mime::APPLICATION_OCTET_STREAM);
self.state = InnerState::Boundary; self.state = InnerState::Boundary;
// nested multipart stream // nested multipart stream is not supported
if mt.type_() == mime::MULTIPART { if ct.type_() == mime::MULTIPART {
Poll::Ready(Some(Err(MultipartError::Nested))) return Poll::Ready(Some(Err(MultipartError::Nested)));
} else {
let field = Rc::new(RefCell::new(InnerField::new(
self.payload.clone(),
self.boundary.clone(),
&headers,
)?));
self.item = InnerMultipartItem::Field(Rc::clone(&field));
Poll::Ready(Some(Ok(Field::new(safety.clone(cx), headers, mt, field))))
} }
let field =
InnerField::new_in_rc(self.payload.clone(), self.boundary.clone(), &headers)?;
self.item = InnerMultipartItem::Field(Rc::clone(&field));
Poll::Ready(Some(Ok(Field::new(
safety.clone(cx),
headers,
ct,
cd,
field,
))))
} }
} }
} }
@ -371,6 +403,7 @@ impl Drop for InnerMultipart {
/// A single field in a multipart stream /// A single field in a multipart stream
pub struct Field { pub struct Field {
ct: mime::Mime, ct: mime::Mime,
cd: ContentDisposition,
headers: HeaderMap, headers: HeaderMap,
inner: Rc<RefCell<InnerField>>, inner: Rc<RefCell<InnerField>>,
safety: Safety, safety: Safety,
@ -381,35 +414,51 @@ impl Field {
safety: Safety, safety: Safety,
headers: HeaderMap, headers: HeaderMap,
ct: mime::Mime, ct: mime::Mime,
cd: ContentDisposition,
inner: Rc<RefCell<InnerField>>, inner: Rc<RefCell<InnerField>>,
) -> Self { ) -> Self {
Field { Field {
ct, ct,
cd,
headers, headers,
inner, inner,
safety, safety,
} }
} }
/// Get a map of headers /// Returns a reference to the field's header map.
pub fn headers(&self) -> &HeaderMap { pub fn headers(&self) -> &HeaderMap {
&self.headers &self.headers
} }
/// Get the content type of the field /// Returns a reference to the field's content (mime) type.
pub fn content_type(&self) -> &mime::Mime { pub fn content_type(&self) -> &mime::Mime {
&self.ct &self.ct
} }
/// Get the content disposition of the field, if it exists /// Returns the field's Content-Disposition.
pub fn content_disposition(&self) -> Option<ContentDisposition> { ///
// RFC 7578: 'Each part MUST contain a Content-Disposition header field /// Per [RFC 7578 §4.2]: 'Each part MUST contain a Content-Disposition header field where the
// where the disposition type is "form-data".' /// disposition type is "form-data". The Content-Disposition header field MUST also contain an
if let Some(content_disposition) = self.headers.get(&header::CONTENT_DISPOSITION) { /// additional parameter of "name"; the value of the "name" parameter is the original field name
ContentDisposition::from_raw(content_disposition).ok() /// from the form.'
} else { ///
None /// This crate validates that it exists before returning a `Field`. As such, it is safe to
} /// unwrap `.content_disposition().get_name()`. The [name](Self::name) method is provided as
/// a convenience.
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
pub fn content_disposition(&self) -> &ContentDisposition {
&self.cd
}
/// Returns the field's name.
///
/// See [content_disposition] regarding guarantees about
pub fn name(&self) -> &str {
self.content_disposition()
.get_name()
.expect("field name should be guaranteed to exist in multipart form-data")
} }
} }
@ -451,20 +500,23 @@ struct InnerField {
} }
impl InnerField { impl InnerField {
fn new_in_rc(
payload: PayloadRef,
boundary: String,
headers: &HeaderMap,
) -> Result<Rc<RefCell<InnerField>>, PayloadError> {
Self::new(payload, boundary, headers).map(|this| Rc::new(RefCell::new(this)))
}
fn new( fn new(
payload: PayloadRef, payload: PayloadRef,
boundary: String, boundary: String,
headers: &HeaderMap, headers: &HeaderMap,
) -> Result<InnerField, PayloadError> { ) -> Result<InnerField, PayloadError> {
let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) { let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) {
if let Ok(s) = len.to_str() { match len.to_str().ok().and_then(|len| len.parse::<u64>().ok()) {
if let Ok(len) = s.parse::<u64>() { Some(len) => Some(len),
Some(len) None => return Err(PayloadError::Incomplete(None)),
} else {
return Err(PayloadError::Incomplete(None));
}
} else {
return Err(PayloadError::Incomplete(None));
} }
} else { } else {
None None
@ -658,9 +710,8 @@ impl Clone for PayloadRef {
} }
} }
/// Counter. It tracks of number of clones of payloads and give access to /// Counter. It tracks of number of clones of payloads and give access to payload only to top most
/// payload only to top most task panics if Safety get destroyed and it not top /// task panics if Safety get destroyed and it not top most task.
/// most task.
#[derive(Debug)] #[derive(Debug)]
struct Safety { struct Safety {
task: LocalWaker, task: LocalWaker,
@ -707,11 +758,12 @@ impl Drop for Safety {
if Rc::strong_count(&self.payload) != self.level { if Rc::strong_count(&self.payload) != self.level {
self.clean.set(true); self.clean.set(true);
} }
self.task.wake(); self.task.wake();
} }
} }
/// Payload buffer /// Payload buffer.
struct PayloadBuffer { struct PayloadBuffer {
eof: bool, eof: bool,
buf: BytesMut, buf: BytesMut,
@ -719,7 +771,7 @@ struct PayloadBuffer {
} }
impl PayloadBuffer { impl PayloadBuffer {
/// Create new `PayloadBuffer` instance /// Constructs new `PayloadBuffer` instance.
fn new<S>(stream: S) -> Self fn new<S>(stream: S) -> Self
where where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static, S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
@ -767,7 +819,7 @@ impl PayloadBuffer {
} }
/// Read until specified ending /// Read until specified ending
pub fn read_until(&mut self, line: &[u8]) -> Result<Option<Bytes>, MultipartError> { fn read_until(&mut self, line: &[u8]) -> Result<Option<Bytes>, MultipartError> {
let res = twoway::find_bytes(&self.buf, line) let res = twoway::find_bytes(&self.buf, line)
.map(|idx| self.buf.split_to(idx + line.len()).freeze()); .map(|idx| self.buf.split_to(idx + line.len()).freeze());
@ -779,12 +831,12 @@ impl PayloadBuffer {
} }
/// Read bytes until new line delimiter /// Read bytes until new line delimiter
pub fn readline(&mut self) -> Result<Option<Bytes>, MultipartError> { fn readline(&mut self) -> Result<Option<Bytes>, MultipartError> {
self.read_until(b"\n") self.read_until(b"\n")
} }
/// Read bytes until new line delimiter or eof /// Read bytes until new line delimiter or eof
pub fn readline_or_eof(&mut self) -> Result<Option<Bytes>, MultipartError> { fn readline_or_eof(&mut self) -> Result<Option<Bytes>, MultipartError> {
match self.readline() { match self.readline() {
Err(MultipartError::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())), Err(MultipartError::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())),
line => line, line => line,
@ -792,7 +844,7 @@ impl PayloadBuffer {
} }
/// Put unprocessed data back to the buffer /// Put unprocessed data back to the buffer
pub fn unprocessed(&mut self, data: Bytes) { fn unprocessed(&mut self, data: Bytes) {
let buf = BytesMut::from(data.as_ref()); let buf = BytesMut::from(data.as_ref());
let buf = std::mem::replace(&mut self.buf, buf); let buf = std::mem::replace(&mut self.buf, buf);
self.buf.extend_from_slice(&buf); self.buf.extend_from_slice(&buf);
@ -914,6 +966,7 @@ mod tests {
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\ Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
test\r\n\ test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\ --abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\ Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
data\r\n\ data\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n", --abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
@ -965,7 +1018,7 @@ mod tests {
let mut multipart = Multipart::new(&headers, payload); let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await { match multipart.next().await {
Some(Ok(mut field)) => { Some(Ok(mut field)) => {
let cd = field.content_disposition().unwrap(); let cd = field.content_disposition();
assert_eq!(cd.disposition, DispositionType::FormData); assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into())); assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
@ -1027,7 +1080,7 @@ mod tests {
let mut multipart = Multipart::new(&headers, payload); let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await.unwrap() { match multipart.next().await.unwrap() {
Ok(mut field) => { Ok(mut field) => {
let cd = field.content_disposition().unwrap(); let cd = field.content_disposition();
assert_eq!(cd.disposition, DispositionType::FormData); assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into())); assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
@ -1182,4 +1235,59 @@ mod tests {
_ => unreachable!(), _ => unreachable!(),
} }
} }
#[actix_rt::test]
async fn no_content_disposition() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = SlowStream::new(bytes);
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert!(res.is_err());
assert!(matches!(
res.unwrap_err(),
MultipartError::NoContentDisposition,
));
}
#[actix_rt::test]
async fn no_name_in_content_disposition() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = SlowStream::new(bytes);
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert!(res.is_err());
assert!(matches!(
res.unwrap_err(),
MultipartError::NoContentDisposition,
));
}
} }

132
actix-router/CHANGES.md Normal file
View File

@ -0,0 +1,132 @@
# Changes
## Unreleased - 2021-xx-xx
* Minimum supported Rust version (MSRV) is now 1.52.
## 0.5.0-beta.2 - 2021-09-09
* Introduce `ResourceDef::join`. [#380]
* Disallow prefix routes with tail segments. [#379]
* Enforce path separators on dynamic prefixes. [#378]
* Improve malformed path error message. [#384]
* Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
* Prefix segments with trailing slashes define a trailing empty segment. [#2355]
* Support multi-pattern prefixes and joins. [#2356]
* `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356]
* Support `build_resource_path` on multi-pattern resources. [#2356]
* Minimum supported Rust version (MSRV) is now 1.51.
[#378]: https://github.com/actix/actix-net/pull/378
[#379]: https://github.com/actix/actix-net/pull/379
[#380]: https://github.com/actix/actix-net/pull/380
[#384]: https://github.com/actix/actix-net/pull/384
[#2355]: https://github.com/actix/actix-web/pull/2355
[#2356]: https://github.com/actix/actix-web/pull/2356
## 0.5.0-beta.1 - 2021-07-20
* Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366]
* Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373]
* Fix segment interpolation leaving `Path` in unintended state after matching. [#368]
* Fix `ResourceDef` `PartialEq` implementation. [#373]
* Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372]
* Implement `IntoPatterns` for `bytestring::ByteString`. [#372]
* Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370]
* Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371]
* `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373]
* Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371]
* Rename `ResourceDef::{is_prefix_match => find_match}`. [#373]
* Rename `ResourceDef::{match_path => capture_match_info}`. [#373]
* Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373]
* Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373]
* Rename `Router::{*_checked => *_fn}`. [#373]
* Return type of `ResourceDef::name` is now `Option<&str>`. [#373]
* Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373]
[#368]: https://github.com/actix/actix-net/pull/368
[#366]: https://github.com/actix/actix-net/pull/366
[#368]: https://github.com/actix/actix-net/pull/368
[#370]: https://github.com/actix/actix-net/pull/370
[#371]: https://github.com/actix/actix-net/pull/371
[#372]: https://github.com/actix/actix-net/pull/372
[#373]: https://github.com/actix/actix-net/pull/373
## 0.4.0 - 2021-06-06
* When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357]
* Path tail patterns now match new lines (`\n`) in request URL. [#360]
* Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359]
* Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345]
[#345]: https://github.com/actix/actix-net/pull/345
[#357]: https://github.com/actix/actix-net/pull/357
[#359]: https://github.com/actix/actix-net/pull/359
[#360]: https://github.com/actix/actix-net/pull/360
## 0.3.0 - 2019-12-31
* Version was yanked previously. See https://crates.io/crates/actix-router/0.3.0
## 0.2.7 - 2021-02-06
* Add `Router::recognize_checked` [#247]
[#247]: https://github.com/actix/actix-net/pull/247
## 0.2.6 - 2021-01-09
* Use `bytestring` version range compatible with Bytes v1.0. [#246]
[#246]: https://github.com/actix/actix-net/pull/246
## 0.2.5 - 2020-09-20
* Fix `from_hex()` method
## 0.2.4 - 2019-12-31
* Add `ResourceDef::resource_path_named()` path generation method
## 0.2.3 - 2019-12-25
* Add impl `IntoPattern` for `&String`
## 0.2.2 - 2019-12-25
* Use `IntoPattern` for `RouterBuilder::path()`
## 0.2.1 - 2019-12-25
* Add `IntoPattern` trait
* Add multi-pattern resources
## 0.2.0 - 2019-12-07
* Update http to 0.2
* Update regex to 1.3
* Use bytestring instead of string
## 0.1.5 - 2019-05-15
* Remove debug prints
## 0.1.4 - 2019-05-15
* Fix checked resource match
## 0.1.3 - 2019-04-22
* Added support for `remainder match` (i.e "/path/{tail}*")
## 0.1.2 - 2019-04-07
* Export `Quoter` type
* Allow to reset `Path` instance
## 0.1.1 - 2019-04-03
* Get dynamic segment by name instead of iterator.
## 0.1.0 - 2019-03-09
* Initial release

38
actix-router/Cargo.toml Normal file
View File

@ -0,0 +1,38 @@
[package]
name = "actix-router"
version = "0.5.0-beta.2"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Resource path matching and router"
keywords = ["actix", "router", "routing"]
repository = "https://github.com/actix/actix-web.git"
license = "MIT OR Apache-2.0"
edition = "2018"
[lib]
name = "actix_router"
path = "src/lib.rs"
[features]
default = ["http"]
[dependencies]
bytestring = ">=0.1.5, <2"
firestorm = "0.4"
http = { version = "0.2.3", optional = true }
log = "0.4"
regex = "1.5"
serde = "1"
[dev-dependencies]
criterion = { version = "0.3", features = ["html_reports"] }
firestorm = { version = "0.4", features = ["enable_system_time"] }
http = "0.2.5"
serde = { version = "1", features = ["derive"] }
[[bench]]
name = "router"
harness = false

1
actix-router/LICENSE-APACHE Symbolic link
View File

@ -0,0 +1 @@
../LICENSE-APACHE

1
actix-router/LICENSE-MIT Symbolic link
View File

@ -0,0 +1 @@
../LICENSE-MIT

View File

@ -0,0 +1,194 @@
//! Based on https://github.com/ibraheemdev/matchit/blob/master/benches/bench.rs
use criterion::{black_box, criterion_group, criterion_main, Criterion};
macro_rules! register {
(colon) => {{
register!(finish => ":p1", ":p2", ":p3", ":p4")
}};
(brackets) => {{
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
}};
(regex) => {{
register!(finish => "(.*)", "(.*)", "(.*)", "(.*)")
}};
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
let arr = [
concat!("/authorizations"),
concat!("/authorizations/", $p1),
concat!("/applications/", $p1, "/tokens/", $p2),
concat!("/events"),
concat!("/repos/", $p1, "/", $p2, "/events"),
concat!("/networks/", $p1, "/", $p2, "/events"),
concat!("/orgs/", $p1, "/events"),
concat!("/users/", $p1, "/received_events"),
concat!("/users/", $p1, "/received_events/public"),
concat!("/users/", $p1, "/events"),
concat!("/users/", $p1, "/events/public"),
concat!("/users/", $p1, "/events/orgs/", $p2),
concat!("/feeds"),
concat!("/notifications"),
concat!("/repos/", $p1, "/", $p2, "/notifications"),
concat!("/notifications/threads/", $p1),
concat!("/notifications/threads/", $p1, "/subscription"),
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
concat!("/users/", $p1, "/starred"),
concat!("/user/starred"),
concat!("/user/starred/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
concat!("/users/", $p1, "/subscriptions"),
concat!("/user/subscriptions"),
concat!("/repos/", $p1, "/", $p2, "/subscription"),
concat!("/user/subscriptions/", $p1, "/", $p2),
concat!("/users/", $p1, "/gists"),
concat!("/gists"),
concat!("/gists/", $p1),
concat!("/gists/", $p1, "/star"),
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
concat!("/issues"),
concat!("/user/issues"),
concat!("/orgs/", $p1, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
concat!("/repos/", $p1, "/", $p2, "/assignees"),
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
concat!("/repos/", $p1, "/", $p2, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
concat!("/emojis"),
concat!("/gitignore/templates"),
concat!("/gitignore/templates/", $p1),
concat!("/meta"),
concat!("/rate_limit"),
concat!("/users/", $p1, "/orgs"),
concat!("/user/orgs"),
concat!("/orgs/", $p1),
concat!("/orgs/", $p1, "/members"),
concat!("/orgs/", $p1, "/members", $p2),
concat!("/orgs/", $p1, "/public_members"),
concat!("/orgs/", $p1, "/public_members/", $p2),
concat!("/orgs/", $p1, "/teams"),
concat!("/teams/", $p1),
concat!("/teams/", $p1, "/members"),
concat!("/teams/", $p1, "/members", $p2),
concat!("/teams/", $p1, "/repos"),
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
concat!("/user/teams"),
concat!("/repos/", $p1, "/", $p2, "/pulls"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
concat!("/user/repos"),
concat!("/users/", $p1, "/repos"),
concat!("/orgs/", $p1, "/repos"),
concat!("/repositories"),
concat!("/repos/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/contributors"),
concat!("/repos/", $p1, "/", $p2, "/languages"),
concat!("/repos/", $p1, "/", $p2, "/teams"),
concat!("/repos/", $p1, "/", $p2, "/tags"),
concat!("/repos/", $p1, "/", $p2, "/branches"),
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
concat!("/repos/", $p1, "/", $p2, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/readme"),
concat!("/repos/", $p1, "/", $p2, "/keys"),
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
concat!("/repos/", $p1, "/", $p2, "/downloads"),
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
concat!("/repos/", $p1, "/", $p2, "/forks"),
concat!("/repos/", $p1, "/", $p2, "/hooks"),
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases"),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
concat!("/search/repositories"),
concat!("/search/code"),
concat!("/search/issues"),
concat!("/search/users"),
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
concat!("/legacy/repos/search/", $p1),
concat!("/legacy/user/search/", $p1),
concat!("/legacy/user/email/", $p1),
concat!("/users/", $p1),
concat!("/user"),
concat!("/users"),
concat!("/user/emails"),
concat!("/users/", $p1, "/followers"),
concat!("/user/followers"),
concat!("/users/", $p1, "/following"),
concat!("/user/following"),
concat!("/user/following/", $p1),
concat!("/users/", $p1, "/following", $p2),
concat!("/users/", $p1, "/keys"),
concat!("/user/keys"),
concat!("/user/keys/", $p1),
];
std::array::IntoIter::new(arr)
}};
}
fn call() -> impl Iterator<Item = &'static str> {
let arr = [
"/authorizations",
"/user/repos",
"/repos/rust-lang/rust/stargazers",
"/orgs/rust-lang/public_members/nikomatsakis",
"/repos/rust-lang/rust/releases/1.51.0",
];
std::array::IntoIter::new(arr)
}
fn compare_routers(c: &mut Criterion) {
let mut group = c.benchmark_group("Compare Routers");
let mut actix = actix_router::Router::<bool>::build();
for route in register!(brackets) {
actix.path(route, true);
}
let actix = actix.finish();
group.bench_function("actix", |b| {
b.iter(|| {
for route in call() {
let mut path = actix_router::Path::new(route);
black_box(actix.recognize(&mut path).unwrap());
}
});
});
let regex_set = regex::RegexSet::new(register!(regex)).unwrap();
group.bench_function("regex", |b| {
b.iter(|| {
for route in call() {
black_box(regex_set.matches(route));
}
});
});
group.finish();
}
criterion_group!(benches, compare_routers);
criterion_main!(benches);

View File

@ -0,0 +1,169 @@
macro_rules! register {
(brackets) => {{
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
}};
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
let arr = [
concat!("/authorizations"),
concat!("/authorizations/", $p1),
concat!("/applications/", $p1, "/tokens/", $p2),
concat!("/events"),
concat!("/repos/", $p1, "/", $p2, "/events"),
concat!("/networks/", $p1, "/", $p2, "/events"),
concat!("/orgs/", $p1, "/events"),
concat!("/users/", $p1, "/received_events"),
concat!("/users/", $p1, "/received_events/public"),
concat!("/users/", $p1, "/events"),
concat!("/users/", $p1, "/events/public"),
concat!("/users/", $p1, "/events/orgs/", $p2),
concat!("/feeds"),
concat!("/notifications"),
concat!("/repos/", $p1, "/", $p2, "/notifications"),
concat!("/notifications/threads/", $p1),
concat!("/notifications/threads/", $p1, "/subscription"),
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
concat!("/users/", $p1, "/starred"),
concat!("/user/starred"),
concat!("/user/starred/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
concat!("/users/", $p1, "/subscriptions"),
concat!("/user/subscriptions"),
concat!("/repos/", $p1, "/", $p2, "/subscription"),
concat!("/user/subscriptions/", $p1, "/", $p2),
concat!("/users/", $p1, "/gists"),
concat!("/gists"),
concat!("/gists/", $p1),
concat!("/gists/", $p1, "/star"),
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
concat!("/issues"),
concat!("/user/issues"),
concat!("/orgs/", $p1, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
concat!("/repos/", $p1, "/", $p2, "/assignees"),
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
concat!("/repos/", $p1, "/", $p2, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
concat!("/emojis"),
concat!("/gitignore/templates"),
concat!("/gitignore/templates/", $p1),
concat!("/meta"),
concat!("/rate_limit"),
concat!("/users/", $p1, "/orgs"),
concat!("/user/orgs"),
concat!("/orgs/", $p1),
concat!("/orgs/", $p1, "/members"),
concat!("/orgs/", $p1, "/members", $p2),
concat!("/orgs/", $p1, "/public_members"),
concat!("/orgs/", $p1, "/public_members/", $p2),
concat!("/orgs/", $p1, "/teams"),
concat!("/teams/", $p1),
concat!("/teams/", $p1, "/members"),
concat!("/teams/", $p1, "/members", $p2),
concat!("/teams/", $p1, "/repos"),
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
concat!("/user/teams"),
concat!("/repos/", $p1, "/", $p2, "/pulls"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
concat!("/user/repos"),
concat!("/users/", $p1, "/repos"),
concat!("/orgs/", $p1, "/repos"),
concat!("/repositories"),
concat!("/repos/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/contributors"),
concat!("/repos/", $p1, "/", $p2, "/languages"),
concat!("/repos/", $p1, "/", $p2, "/teams"),
concat!("/repos/", $p1, "/", $p2, "/tags"),
concat!("/repos/", $p1, "/", $p2, "/branches"),
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
concat!("/repos/", $p1, "/", $p2, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/readme"),
concat!("/repos/", $p1, "/", $p2, "/keys"),
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
concat!("/repos/", $p1, "/", $p2, "/downloads"),
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
concat!("/repos/", $p1, "/", $p2, "/forks"),
concat!("/repos/", $p1, "/", $p2, "/hooks"),
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases"),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
concat!("/search/repositories"),
concat!("/search/code"),
concat!("/search/issues"),
concat!("/search/users"),
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
concat!("/legacy/repos/search/", $p1),
concat!("/legacy/user/search/", $p1),
concat!("/legacy/user/email/", $p1),
concat!("/users/", $p1),
concat!("/user"),
concat!("/users"),
concat!("/user/emails"),
concat!("/users/", $p1, "/followers"),
concat!("/user/followers"),
concat!("/users/", $p1, "/following"),
concat!("/user/following"),
concat!("/user/following/", $p1),
concat!("/users/", $p1, "/following", $p2),
concat!("/users/", $p1, "/keys"),
concat!("/user/keys"),
concat!("/user/keys/", $p1),
];
arr.to_vec()
}};
}
static PATHS: [&str; 5] = [
"/authorizations",
"/user/repos",
"/repos/rust-lang/rust/stargazers",
"/orgs/rust-lang/public_members/nikomatsakis",
"/repos/rust-lang/rust/releases/1.51.0",
];
fn main() {
let mut router = actix_router::Router::<bool>::build();
for route in register!(brackets) {
router.path(route, true);
}
let actix = router.finish();
if firestorm::enabled() {
firestorm::bench("target", || {
for &route in &PATHS {
let mut path = actix_router::Path::new(route);
actix.recognize(&mut path).unwrap();
}
})
.unwrap();
}
}

723
actix-router/src/de.rs Normal file
View File

@ -0,0 +1,723 @@
use serde::de::{self, Deserializer, Error as DeError, Visitor};
use serde::forward_to_deserialize_any;
use crate::path::{Path, PathIter};
use crate::ResourcePath;
macro_rules! unsupported_type {
($trait_fn:ident, $name:expr) => {
fn $trait_fn<V>(self, _: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom(concat!(
"unsupported type: ",
$name
)))
}
};
}
macro_rules! parse_single_value {
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() != 1 {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected 1",
self.path.segment_count()
)
.as_str(),
))
} else {
let v = self.path[0].parse().map_err(|_| {
de::value::Error::custom(format!(
"can not parse {:?} to a {}",
&self.path[0], $tp
))
})?;
visitor.$visit_fn(v)
}
}
};
}
pub struct PathDeserializer<'de, T: ResourcePath> {
path: &'de Path<T>,
}
impl<'de, T: ResourcePath + 'de> PathDeserializer<'de, T> {
pub fn new(path: &'de Path<T>) -> Self {
PathDeserializer { path }
}
}
impl<'de, T: ResourcePath + 'de> Deserializer<'de> for PathDeserializer<'de, T> {
type Error = de::value::Error;
fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_map(ParamsDeserializer {
params: self.path.iter(),
current: None,
})
}
fn deserialize_struct<V>(
self,
_: &'static str,
_: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.deserialize_map(visitor)
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.deserialize_unit(visitor)
}
fn deserialize_newtype_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() < len {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected {}",
self.path.segment_count(),
len
)
.as_str(),
))
} else {
visitor.visit_seq(ParamsSeq {
params: self.path.iter(),
})
}
}
fn deserialize_tuple_struct<V>(
self,
_: &'static str,
len: usize,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() < len {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected {}",
self.path.segment_count(),
len
)
.as_str(),
))
} else {
visitor.visit_seq(ParamsSeq {
params: self.path.iter(),
})
}
}
fn deserialize_enum<V>(
self,
_: &'static str,
_: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.is_empty() {
Err(de::value::Error::custom("expected at least one parameters"))
} else {
visitor.visit_enum(ValueEnum {
value: &self.path[0],
})
}
}
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() != 1 {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected 1",
self.path.segment_count()
)
.as_str(),
))
} else {
visitor.visit_str(&self.path[0])
}
}
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_seq(ParamsSeq {
params: self.path.iter(),
})
}
unsupported_type!(deserialize_any, "'any'");
unsupported_type!(deserialize_bytes, "bytes");
unsupported_type!(deserialize_option, "Option<T>");
unsupported_type!(deserialize_identifier, "identifier");
unsupported_type!(deserialize_ignored_any, "ignored_any");
parse_single_value!(deserialize_bool, visit_bool, "bool");
parse_single_value!(deserialize_i8, visit_i8, "i8");
parse_single_value!(deserialize_i16, visit_i16, "i16");
parse_single_value!(deserialize_i32, visit_i32, "i32");
parse_single_value!(deserialize_i64, visit_i64, "i64");
parse_single_value!(deserialize_u8, visit_u8, "u8");
parse_single_value!(deserialize_u16, visit_u16, "u16");
parse_single_value!(deserialize_u32, visit_u32, "u32");
parse_single_value!(deserialize_u64, visit_u64, "u64");
parse_single_value!(deserialize_f32, visit_f32, "f32");
parse_single_value!(deserialize_f64, visit_f64, "f64");
parse_single_value!(deserialize_string, visit_string, "String");
parse_single_value!(deserialize_byte_buf, visit_string, "String");
parse_single_value!(deserialize_char, visit_char, "char");
}
struct ParamsDeserializer<'de, T: ResourcePath> {
params: PathIter<'de, T>,
current: Option<(&'de str, &'de str)>,
}
impl<'de, T: ResourcePath> de::MapAccess<'de> for ParamsDeserializer<'de, T> {
type Error = de::value::Error;
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
where
K: de::DeserializeSeed<'de>,
{
self.current = self.params.next().map(|ref item| (item.0, item.1));
match self.current {
Some((key, _)) => Ok(Some(seed.deserialize(Key { key })?)),
None => Ok(None),
}
}
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
where
V: de::DeserializeSeed<'de>,
{
if let Some((_, value)) = self.current.take() {
seed.deserialize(Value { value })
} else {
Err(de::value::Error::custom("unexpected item"))
}
}
}
struct Key<'de> {
key: &'de str,
}
impl<'de> Deserializer<'de> for Key<'de> {
type Error = de::value::Error;
fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_str(self.key)
}
fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("Unexpected"))
}
forward_to_deserialize_any! {
bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
byte_buf option unit unit_struct newtype_struct seq tuple
tuple_struct map struct enum ignored_any
}
}
macro_rules! parse_value {
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let v = self.value.parse().map_err(|_| {
de::value::Error::custom(format!("can not parse {:?} to a {}", self.value, $tp))
})?;
visitor.$visit_fn(v)
}
};
}
struct Value<'de> {
value: &'de str,
}
impl<'de> Deserializer<'de> for Value<'de> {
type Error = de::value::Error;
parse_value!(deserialize_bool, visit_bool, "bool");
parse_value!(deserialize_i8, visit_i8, "i8");
parse_value!(deserialize_i16, visit_i16, "i16");
parse_value!(deserialize_i32, visit_i32, "i16");
parse_value!(deserialize_i64, visit_i64, "i64");
parse_value!(deserialize_u8, visit_u8, "u8");
parse_value!(deserialize_u16, visit_u16, "u16");
parse_value!(deserialize_u32, visit_u32, "u32");
parse_value!(deserialize_u64, visit_u64, "u64");
parse_value!(deserialize_f32, visit_f32, "f32");
parse_value!(deserialize_f64, visit_f64, "f64");
parse_value!(deserialize_string, visit_string, "String");
parse_value!(deserialize_byte_buf, visit_string, "String");
parse_value!(deserialize_char, visit_char, "char");
fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_borrowed_bytes(self.value.as_bytes())
}
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_borrowed_str(self.value)
}
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_some(self)
}
fn deserialize_enum<V>(
self,
_: &'static str,
_: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_enum(ValueEnum { value: self.value })
}
fn deserialize_newtype_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_tuple<V>(self, _: usize, _: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("unsupported type: tuple"))
}
fn deserialize_struct<V>(
self,
_: &'static str,
_: &'static [&'static str],
_: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("unsupported type: struct"))
}
fn deserialize_tuple_struct<V>(
self,
_: &'static str,
_: usize,
_: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("unsupported type: tuple struct"))
}
unsupported_type!(deserialize_any, "any");
unsupported_type!(deserialize_seq, "seq");
unsupported_type!(deserialize_map, "map");
unsupported_type!(deserialize_identifier, "identifier");
}
struct ParamsSeq<'de, T: ResourcePath> {
params: PathIter<'de, T>,
}
impl<'de, T: ResourcePath> de::SeqAccess<'de> for ParamsSeq<'de, T> {
type Error = de::value::Error;
fn next_element_seed<U>(&mut self, seed: U) -> Result<Option<U::Value>, Self::Error>
where
U: de::DeserializeSeed<'de>,
{
match self.params.next() {
Some(item) => Ok(Some(seed.deserialize(Value { value: item.1 })?)),
None => Ok(None),
}
}
}
struct ValueEnum<'de> {
value: &'de str,
}
impl<'de> de::EnumAccess<'de> for ValueEnum<'de> {
type Error = de::value::Error;
type Variant = UnitVariant;
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
where
V: de::DeserializeSeed<'de>,
{
Ok((seed.deserialize(Key { key: self.value })?, UnitVariant))
}
}
struct UnitVariant;
impl<'de> de::VariantAccess<'de> for UnitVariant {
type Error = de::value::Error;
fn unit_variant(self) -> Result<(), Self::Error> {
Ok(())
}
fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Self::Error>
where
T: de::DeserializeSeed<'de>,
{
Err(de::value::Error::custom("not supported"))
}
fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("not supported"))
}
fn struct_variant<V>(
self,
_: &'static [&'static str],
_: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("not supported"))
}
}
#[cfg(test)]
mod tests {
use serde::{de, Deserialize};
use super::*;
use crate::path::Path;
use crate::router::Router;
#[derive(Deserialize)]
struct MyStruct {
key: String,
value: String,
}
#[derive(Deserialize)]
struct Id {
_id: String,
}
#[derive(Debug, Deserialize)]
struct Test1(String, u32);
#[derive(Debug, Deserialize)]
struct Test2 {
key: String,
value: u32,
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
enum TestEnum {
Val1,
Val2,
}
#[derive(Debug, Deserialize)]
struct Test3 {
val: TestEnum,
}
#[test]
fn test_request_extract() {
let mut router = Router::<()>::build();
router.path("/{key}/{value}/", ());
let router = router.finish();
let mut path = Path::new("/name/user1/");
assert!(router.recognize(&mut path).is_some());
let s: MyStruct = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.key, "name");
assert_eq!(s.value, "user1");
let s: (String, String) =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.0, "name");
assert_eq!(s.1, "user1");
let mut router = Router::<()>::build();
router.path("/{key}/{value}/", ());
let router = router.finish();
let mut path = Path::new("/name/32/");
assert!(router.recognize(&mut path).is_some());
let s: Test1 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.0, "name");
assert_eq!(s.1, 32);
let s: Test2 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.key, "name");
assert_eq!(s.value, 32);
let s: (String, u8) =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.0, "name");
assert_eq!(s.1, 32);
let res: Vec<String> =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(res[0], "name".to_owned());
assert_eq!(res[1], "32".to_owned());
}
#[test]
fn test_extract_path_single() {
let mut router = Router::<()>::build();
router.path("/{value}/", ());
let router = router.finish();
let mut path = Path::new("/32/");
assert!(router.recognize(&mut path).is_some());
let i: i8 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i, 32);
}
#[test]
fn test_extract_enum() {
let mut router = Router::<()>::build();
router.path("/{val}/", ());
let router = router.finish();
let mut path = Path::new("/val1/");
assert!(router.recognize(&mut path).is_some());
let i: TestEnum = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i, TestEnum::Val1);
let mut router = Router::<()>::build();
router.path("/{val1}/{val2}/", ());
let router = router.finish();
let mut path = Path::new("/val1/val2/");
assert!(router.recognize(&mut path).is_some());
let i: (TestEnum, TestEnum) =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i, (TestEnum::Val1, TestEnum::Val2));
}
#[test]
fn test_extract_enum_value() {
let mut router = Router::<()>::build();
router.path("/{val}/", ());
let router = router.finish();
let mut path = Path::new("/val1/");
assert!(router.recognize(&mut path).is_some());
let i: Test3 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i.val, TestEnum::Val1);
let mut path = Path::new("/val3/");
assert!(router.recognize(&mut path).is_some());
let i: Result<Test3, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(i.is_err());
assert!(format!("{:?}", i).contains("unknown variant"));
}
#[test]
fn test_extract_errors() {
let mut router = Router::<()>::build();
router.path("/{value}/", ());
let router = router.finish();
let mut path = Path::new("/name/");
assert!(router.recognize(&mut path).is_some());
let s: Result<Test1, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("wrong number of parameters"));
let s: Result<Test2, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("can not parse"));
let s: Result<(String, String), de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("wrong number of parameters"));
let s: Result<u32, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("can not parse"));
}
// #[test]
// fn test_extract_path_decode() {
// let mut router = Router::<()>::default();
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
// macro_rules! test_single_value {
// ($value:expr, $expected:expr) => {{
// let req = TestRequest::with_uri($value).finish();
// let info = router.recognize(&req, &(), 0);
// let req = req.with_route_info(info);
// assert_eq!(
// *Path::<String>::from_request(&req, &PathConfig::default()).unwrap(),
// $expected
// );
// }};
// }
// test_single_value!("/%25/", "%");
// test_single_value!("/%40%C2%A3%24%25%5E%26%2B%3D/", "@£$%^&+=");
// test_single_value!("/%2B/", "+");
// test_single_value!("/%252B/", "%2B");
// test_single_value!("/%2F/", "/");
// test_single_value!("/%252F/", "%2F");
// test_single_value!(
// "/http%3A%2F%2Flocalhost%3A80%2Ffoo/",
// "http://localhost:80/foo"
// );
// test_single_value!("/%2Fvar%2Flog%2Fsyslog/", "/var/log/syslog");
// test_single_value!(
// "/http%3A%2F%2Flocalhost%3A80%2Ffile%2F%252Fvar%252Flog%252Fsyslog/",
// "http://localhost:80/file/%2Fvar%2Flog%2Fsyslog"
// );
// let req = TestRequest::with_uri("/%25/7/?id=test").finish();
// let mut router = Router::<()>::default();
// router.register_resource(Resource::new(ResourceDef::new("/{key}/{value}/")));
// let info = router.recognize(&req, &(), 0);
// let req = req.with_route_info(info);
// let s = Path::<Test2>::from_request(&req, &PathConfig::default()).unwrap();
// assert_eq!(s.key, "%");
// assert_eq!(s.value, 7);
// let s = Path::<(String, String)>::from_request(&req, &PathConfig::default()).unwrap();
// assert_eq!(s.0, "%");
// assert_eq!(s.1, "7");
// }
// #[test]
// fn test_extract_path_no_decode() {
// let mut router = Router::<()>::default();
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
// let req = TestRequest::with_uri("/%25/").finish();
// let info = router.recognize(&req, &(), 0);
// let req = req.with_route_info(info);
// assert_eq!(
// *Path::<String>::from_request(&req, &&PathConfig::default().disable_decoding())
// .unwrap(),
// "%25"
// );
// }
}

149
actix-router/src/lib.rs Normal file
View File

@ -0,0 +1,149 @@
//! Resource path matching and router.
#![deny(rust_2018_idioms, nonstandard_style)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
mod de;
mod path;
mod resource;
mod router;
pub use self::de::PathDeserializer;
pub use self::path::Path;
pub use self::resource::ResourceDef;
pub use self::router::{ResourceInfo, Router, RouterBuilder};
// TODO: this trait is necessary, document it
// see impl Resource for ServiceRequest
pub trait Resource<T: ResourcePath> {
fn resource_path(&mut self) -> &mut Path<T>;
}
pub trait ResourcePath {
fn path(&self) -> &str;
}
impl ResourcePath for String {
fn path(&self) -> &str {
self.as_str()
}
}
impl<'a> ResourcePath for &'a str {
fn path(&self) -> &str {
self
}
}
impl ResourcePath for bytestring::ByteString {
fn path(&self) -> &str {
&*self
}
}
/// One or many patterns.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Patterns {
Single(String),
List(Vec<String>),
}
impl Patterns {
pub fn is_empty(&self) -> bool {
match self {
Patterns::Single(_) => false,
Patterns::List(pats) => pats.is_empty(),
}
}
}
/// Helper trait for type that could be converted to one or more path pattern.
pub trait IntoPatterns {
fn patterns(&self) -> Patterns;
}
impl IntoPatterns for String {
fn patterns(&self) -> Patterns {
Patterns::Single(self.clone())
}
}
impl<'a> IntoPatterns for &'a String {
fn patterns(&self) -> Patterns {
Patterns::Single((*self).clone())
}
}
impl<'a> IntoPatterns for &'a str {
fn patterns(&self) -> Patterns {
Patterns::Single((*self).to_owned())
}
}
impl IntoPatterns for bytestring::ByteString {
fn patterns(&self) -> Patterns {
Patterns::Single(self.to_string())
}
}
impl IntoPatterns for Patterns {
fn patterns(&self) -> Patterns {
self.clone()
}
}
impl<T: AsRef<str>> IntoPatterns for Vec<T> {
fn patterns(&self) -> Patterns {
let mut patterns = self.iter().map(|v| v.as_ref().to_owned());
match patterns.size_hint() {
(1, _) => Patterns::Single(patterns.next().unwrap()),
_ => Patterns::List(patterns.collect()),
}
}
}
macro_rules! array_patterns_single (($tp:ty) => {
impl IntoPatterns for [$tp; 1] {
fn patterns(&self) -> Patterns {
Patterns::Single(self[0].to_owned())
}
}
});
macro_rules! array_patterns_multiple (($tp:ty, $str_fn:expr, $($num:tt) +) => {
// for each array length specified in $num
$(
impl IntoPatterns for [$tp; $num] {
fn patterns(&self) -> Patterns {
Patterns::List(self.iter().map($str_fn).collect())
}
}
)+
});
array_patterns_single!(&str);
array_patterns_multiple!(&str, |&v| v.to_owned(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
array_patterns_single!(String);
array_patterns_multiple!(String, |v| v.clone(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
#[cfg(feature = "http")]
mod url;
#[cfg(feature = "http")]
pub use self::url::{Quoter, Url};
#[cfg(feature = "http")]
mod http_impls {
use http::Uri;
use super::ResourcePath;
impl ResourcePath for Uri {
fn path(&self) -> &str {
self.path()
}
}
}

220
actix-router/src/path.rs Normal file
View File

@ -0,0 +1,220 @@
use std::borrow::Cow;
use std::ops::Index;
use firestorm::profile_method;
use serde::de;
use crate::{de::PathDeserializer, Resource, ResourcePath};
#[derive(Debug, Clone)]
pub(crate) enum PathItem {
Static(Cow<'static, str>),
Segment(u16, u16),
}
impl Default for PathItem {
fn default() -> Self {
Self::Static(Cow::Borrowed(""))
}
}
/// Resource path match information.
///
/// If resource path contains variable patterns, `Path` stores them.
#[derive(Debug, Clone, Default)]
pub struct Path<T> {
path: T,
pub(crate) skip: u16,
pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>,
}
impl<T: ResourcePath> Path<T> {
pub fn new(path: T) -> Path<T> {
Path {
path,
skip: 0,
segments: Vec::new(),
}
}
/// Get reference to inner path instance.
#[inline]
pub fn get_ref(&self) -> &T {
&self.path
}
/// Get mutable reference to inner path instance.
#[inline]
pub fn get_mut(&mut self) -> &mut T {
&mut self.path
}
/// Path.
#[inline]
pub fn path(&self) -> &str {
profile_method!(path);
let skip = self.skip as usize;
let path = self.path.path();
if skip <= path.len() {
&path[skip..]
} else {
""
}
}
/// Set new path.
#[inline]
pub fn set(&mut self, path: T) {
self.skip = 0;
self.path = path;
self.segments.clear();
}
/// Reset state.
#[inline]
pub fn reset(&mut self) {
self.skip = 0;
self.segments.clear();
}
/// Skip first `n` chars in path.
#[inline]
pub fn skip(&mut self, n: u16) {
self.skip += n;
}
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
profile_method!(add);
match value {
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
PathItem::Segment(begin, end) => self.segments.push((
name.into(),
PathItem::Segment(self.skip + begin, self.skip + end),
)),
}
}
#[doc(hidden)]
pub fn add_static(
&mut self,
name: impl Into<Cow<'static, str>>,
value: impl Into<Cow<'static, str>>,
) {
self.segments
.push((name.into(), PathItem::Static(value.into())));
}
/// Check if there are any matched patterns.
#[inline]
pub fn is_empty(&self) -> bool {
self.segments.is_empty()
}
/// Returns number of interpolated segments.
#[inline]
pub fn segment_count(&self) -> usize {
self.segments.len()
}
/// Get matched parameter by name without type conversion
pub fn get(&self, name: &str) -> Option<&str> {
profile_method!(get);
for (seg_name, val) in self.segments.iter() {
if name == seg_name {
return match val {
PathItem::Static(ref s) => Some(s),
PathItem::Segment(s, e) => {
Some(&self.path.path()[(*s as usize)..(*e as usize)])
}
};
}
}
None
}
/// Get unprocessed part of the path
pub fn unprocessed(&self) -> &str {
&self.path.path()[(self.skip as usize)..]
}
/// Get matched parameter by name.
///
/// If keyed parameter is not available empty string is used as default value.
pub fn query(&self, key: &str) -> &str {
profile_method!(query);
if let Some(s) = self.get(key) {
s
} else {
""
}
}
/// Return iterator to items in parameter container.
pub fn iter(&self) -> PathIter<'_, T> {
PathIter {
idx: 0,
params: self,
}
}
/// Try to deserialize matching parameters to a specified type `U`
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
profile_method!(load);
de::Deserialize::deserialize(PathDeserializer::new(self))
}
}
#[derive(Debug)]
pub struct PathIter<'a, T> {
idx: usize,
params: &'a Path<T>,
}
impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> {
type Item = (&'a str, &'a str);
#[inline]
fn next(&mut self) -> Option<(&'a str, &'a str)> {
if self.idx < self.params.segment_count() {
let idx = self.idx;
let res = match self.params.segments[idx].1 {
PathItem::Static(ref s) => s,
PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)],
};
self.idx += 1;
return Some((&self.params.segments[idx].0, res));
}
None
}
}
impl<'a, T: ResourcePath> Index<&'a str> for Path<T> {
type Output = str;
fn index(&self, name: &'a str) -> &str {
self.get(name)
.expect("Value for parameter is not available")
}
}
impl<T: ResourcePath> Index<usize> for Path<T> {
type Output = str;
fn index(&self, idx: usize) -> &str {
match self.segments[idx].1 {
PathItem::Static(ref s) => s,
PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)],
}
}
}
impl<T: ResourcePath> Resource<T> for Path<T> {
fn resource_path(&mut self) -> &mut Self {
self
}
}

1814
actix-router/src/resource.rs Normal file

File diff suppressed because it is too large Load Diff

282
actix-router/src/router.rs Normal file
View File

@ -0,0 +1,282 @@
use firestorm::profile_method;
use crate::{IntoPatterns, Resource, ResourceDef, ResourcePath};
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct ResourceId(pub u16);
/// Information about current resource
#[derive(Debug, Clone)]
pub struct ResourceInfo {
#[allow(dead_code)]
resource: ResourceId,
}
/// Resource router.
// T is the resource itself
// U is any other data needed for routing like method guards
pub struct Router<T, U = ()> {
routes: Vec<(ResourceDef, T, Option<U>)>,
}
impl<T, U> Router<T, U> {
pub fn build() -> RouterBuilder<T, U> {
RouterBuilder {
resources: Vec::new(),
}
}
pub fn recognize<R, P>(&self, resource: &mut R) -> Option<(&T, ResourceId)>
where
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize);
for item in self.routes.iter() {
if item.0.capture_match_info(resource.resource_path()) {
return Some((&item.1, ResourceId(item.0.id())));
}
}
None
}
pub fn recognize_mut<R, P>(&mut self, resource: &mut R) -> Option<(&mut T, ResourceId)>
where
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize_mut);
for item in self.routes.iter_mut() {
if item.0.capture_match_info(resource.resource_path()) {
return Some((&mut item.1, ResourceId(item.0.id())));
}
}
None
}
pub fn recognize_fn<R, P, F>(&self, resource: &mut R, check: F) -> Option<(&T, ResourceId)>
where
F: Fn(&R, &Option<U>) -> bool,
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize_checked);
for item in self.routes.iter() {
if item.0.capture_match_info_fn(resource, &check, &item.2) {
return Some((&item.1, ResourceId(item.0.id())));
}
}
None
}
pub fn recognize_mut_fn<R, P, F>(
&mut self,
resource: &mut R,
check: F,
) -> Option<(&mut T, ResourceId)>
where
F: Fn(&R, &Option<U>) -> bool,
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize_mut_checked);
for item in self.routes.iter_mut() {
if item.0.capture_match_info_fn(resource, &check, &item.2) {
return Some((&mut item.1, ResourceId(item.0.id())));
}
}
None
}
}
pub struct RouterBuilder<T, U = ()> {
resources: Vec<(ResourceDef, T, Option<U>)>,
}
impl<T, U> RouterBuilder<T, U> {
/// Register resource for specified path.
pub fn path<P: IntoPatterns>(
&mut self,
path: P,
resource: T,
) -> &mut (ResourceDef, T, Option<U>) {
profile_method!(path);
self.resources
.push((ResourceDef::new(path), resource, None));
self.resources.last_mut().unwrap()
}
/// Register resource for specified path prefix.
pub fn prefix(&mut self, prefix: &str, resource: T) -> &mut (ResourceDef, T, Option<U>) {
profile_method!(prefix);
self.resources
.push((ResourceDef::prefix(prefix), resource, None));
self.resources.last_mut().unwrap()
}
/// Register resource for ResourceDef
pub fn rdef(&mut self, rdef: ResourceDef, resource: T) -> &mut (ResourceDef, T, Option<U>) {
profile_method!(rdef);
self.resources.push((rdef, resource, None));
self.resources.last_mut().unwrap()
}
/// Finish configuration and create router instance.
pub fn finish(self) -> Router<T, U> {
Router {
routes: self.resources,
}
}
}
#[cfg(test)]
mod tests {
use crate::path::Path;
use crate::router::{ResourceId, Router};
#[allow(clippy::cognitive_complexity)]
#[test]
fn test_recognizer_1() {
let mut router = Router::<usize>::build();
router.path("/name", 10).0.set_id(0);
router.path("/name/{val}", 11).0.set_id(1);
router.path("/name/{val}/index.html", 12).0.set_id(2);
router.path("/file/{file}.{ext}", 13).0.set_id(3);
router.path("/v{val}/{val2}/index.html", 14).0.set_id(4);
router.path("/v/{tail:.*}", 15).0.set_id(5);
router.path("/test2/{test}.html", 16).0.set_id(6);
router.path("/{test}/index.html", 17).0.set_id(7);
let mut router = router.finish();
let mut path = Path::new("/unknown");
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/name");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
assert_eq!(info, ResourceId(0));
assert!(path.is_empty());
let mut path = Path::new("/name/value");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
assert_eq!(info, ResourceId(1));
assert_eq!(path.get("val").unwrap(), "value");
assert_eq!(&path["val"], "value");
let mut path = Path::new("/name/value2/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 12);
assert_eq!(info, ResourceId(2));
assert_eq!(path.get("val").unwrap(), "value2");
let mut path = Path::new("/file/file.gz");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 13);
assert_eq!(info, ResourceId(3));
assert_eq!(path.get("file").unwrap(), "file");
assert_eq!(path.get("ext").unwrap(), "gz");
let mut path = Path::new("/vtest/ttt/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 14);
assert_eq!(info, ResourceId(4));
assert_eq!(path.get("val").unwrap(), "test");
assert_eq!(path.get("val2").unwrap(), "ttt");
let mut path = Path::new("/v/blah-blah/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 15);
assert_eq!(info, ResourceId(5));
assert_eq!(path.get("tail").unwrap(), "blah-blah/index.html");
let mut path = Path::new("/test2/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 16);
assert_eq!(info, ResourceId(6));
assert_eq!(path.get("test").unwrap(), "index");
let mut path = Path::new("/bbb/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 17);
assert_eq!(info, ResourceId(7));
assert_eq!(path.get("test").unwrap(), "bbb");
}
#[test]
fn test_recognizer_2() {
let mut router = Router::<usize>::build();
router.path("/index.json", 10);
router.path("/{source}.json", 11);
let mut router = router.finish();
let mut path = Path::new("/index.json");
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
let mut path = Path::new("/test.json");
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
}
#[test]
fn test_recognizer_with_prefix() {
let mut router = Router::<usize>::build();
router.path("/name", 10).0.set_id(0);
router.path("/name/{val}", 11).0.set_id(1);
let mut router = router.finish();
let mut path = Path::new("/name");
path.skip(5);
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/test/name");
path.skip(5);
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
let mut path = Path::new("/test/name/value");
path.skip(5);
let (h, id) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
assert_eq!(id, ResourceId(1));
assert_eq!(path.get("val").unwrap(), "value");
assert_eq!(&path["val"], "value");
// same patterns
let mut router = Router::<usize>::build();
router.path("/name", 10);
router.path("/name/{val}", 11);
let mut router = router.finish();
let mut path = Path::new("/name");
path.skip(6);
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/test2/name");
path.skip(6);
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
let mut path = Path::new("/test2/name-test");
path.skip(6);
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/test2/name/ttt");
path.skip(6);
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
assert_eq!(&path["val"], "ttt");
}
}

288
actix-router/src/url.rs Normal file
View File

@ -0,0 +1,288 @@
use crate::ResourcePath;
#[allow(dead_code)]
const GEN_DELIMS: &[u8] = b":/?#[]@";
#[allow(dead_code)]
const SUB_DELIMS_WITHOUT_QS: &[u8] = b"!$'()*,";
#[allow(dead_code)]
const SUB_DELIMS: &[u8] = b"!$'()*,+?=;";
#[allow(dead_code)]
const RESERVED: &[u8] = b":/?#[]@!$'()*,+?=;";
#[allow(dead_code)]
const UNRESERVED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
ABCDEFGHIJKLMNOPQRSTUVWXYZ
1234567890
-._~";
const ALLOWED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
ABCDEFGHIJKLMNOPQRSTUVWXYZ
1234567890
-._~
!$'()*,";
const QS: &[u8] = b"+&=;b";
#[inline]
fn bit_at(array: &[u8], ch: u8) -> bool {
array[(ch >> 3) as usize] & (1 << (ch & 7)) != 0
}
#[inline]
fn set_bit(array: &mut [u8], ch: u8) {
array[(ch >> 3) as usize] |= 1 << (ch & 7)
}
thread_local! {
static DEFAULT_QUOTER: Quoter = Quoter::new(b"@:", b"%/+");
}
#[derive(Default, Clone, Debug)]
pub struct Url {
uri: http::Uri,
path: Option<String>,
}
impl Url {
pub fn new(uri: http::Uri) -> Url {
let path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
Url { uri, path }
}
pub fn with_quoter(uri: http::Uri, quoter: &Quoter) -> Url {
Url {
path: quoter.requote(uri.path().as_bytes()),
uri,
}
}
pub fn uri(&self) -> &http::Uri {
&self.uri
}
pub fn path(&self) -> &str {
if let Some(ref s) = self.path {
s
} else {
self.uri.path()
}
}
#[inline]
pub fn update(&mut self, uri: &http::Uri) {
self.uri = uri.clone();
self.path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
}
#[inline]
pub fn update_with_quoter(&mut self, uri: &http::Uri, quoter: &Quoter) {
self.uri = uri.clone();
self.path = quoter.requote(uri.path().as_bytes());
}
}
impl ResourcePath for Url {
#[inline]
fn path(&self) -> &str {
self.path()
}
}
pub struct Quoter {
safe_table: [u8; 16],
protected_table: [u8; 16],
}
impl Quoter {
pub fn new(safe: &[u8], protected: &[u8]) -> Quoter {
let mut q = Quoter {
safe_table: [0; 16],
protected_table: [0; 16],
};
// prepare safe table
for i in 0..128 {
if ALLOWED.contains(&i) {
set_bit(&mut q.safe_table, i);
}
if QS.contains(&i) {
set_bit(&mut q.safe_table, i);
}
}
for ch in safe {
set_bit(&mut q.safe_table, *ch)
}
// prepare protected table
for ch in protected {
set_bit(&mut q.safe_table, *ch);
set_bit(&mut q.protected_table, *ch);
}
q
}
pub fn requote(&self, val: &[u8]) -> Option<String> {
let mut has_pct = 0;
let mut pct = [b'%', 0, 0];
let mut idx = 0;
let mut cloned: Option<Vec<u8>> = None;
let len = val.len();
while idx < len {
let ch = val[idx];
if has_pct != 0 {
pct[has_pct] = val[idx];
has_pct += 1;
if has_pct == 3 {
has_pct = 0;
let buf = cloned.as_mut().unwrap();
if let Some(ch) = restore_ch(pct[1], pct[2]) {
if ch < 128 {
if bit_at(&self.protected_table, ch) {
buf.extend_from_slice(&pct);
idx += 1;
continue;
}
if bit_at(&self.safe_table, ch) {
buf.push(ch);
idx += 1;
continue;
}
}
buf.push(ch);
} else {
buf.extend_from_slice(&pct[..]);
}
}
} else if ch == b'%' {
has_pct = 1;
if cloned.is_none() {
let mut c = Vec::with_capacity(len);
c.extend_from_slice(&val[..idx]);
cloned = Some(c);
}
} else if let Some(ref mut cloned) = cloned {
cloned.push(ch)
}
idx += 1;
}
cloned.map(|data| String::from_utf8_lossy(&data).into_owned())
}
}
#[inline]
fn from_hex(v: u8) -> Option<u8> {
if (b'0'..=b'9').contains(&v) {
Some(v - 0x30) // ord('0') == 0x30
} else if (b'A'..=b'F').contains(&v) {
Some(v - 0x41 + 10) // ord('A') == 0x41
} else if (b'a'..=b'f').contains(&v) {
Some(v - 0x61 + 10) // ord('a') == 0x61
} else {
None
}
}
#[inline]
fn restore_ch(d1: u8, d2: u8) -> Option<u8> {
from_hex(d1).and_then(|d1| from_hex(d2).map(move |d2| d1 << 4 | d2))
}
#[cfg(test)]
mod tests {
use http::Uri;
use std::convert::TryFrom;
use super::*;
use crate::{Path, ResourceDef};
const PROTECTED: &[u8] = b"%/+";
fn match_url(pattern: &'static str, url: impl AsRef<str>) -> Path<Url> {
let re = ResourceDef::new(pattern);
let uri = Uri::try_from(url.as_ref()).unwrap();
let mut path = Path::new(Url::new(uri));
assert!(re.capture_match_info(&mut path));
path
}
fn percent_encode(data: &[u8]) -> String {
data.iter().map(|c| format!("%{:02X}", c)).collect()
}
#[test]
fn test_parse_url() {
let re = "/user/{id}/test";
let path = match_url(re, "/user/2345/test");
assert_eq!(path.get("id").unwrap(), "2345");
// "%25" should never be decoded into '%' to guarantee the output is a valid
// percent-encoded format
let path = match_url(re, "/user/qwe%25/test");
assert_eq!(path.get("id").unwrap(), "qwe%25");
let path = match_url(re, "/user/qwe%25rty/test");
assert_eq!(path.get("id").unwrap(), "qwe%25rty");
}
#[test]
fn test_protected_chars() {
let encoded = percent_encode(PROTECTED);
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
assert_eq!(path.get("id").unwrap(), &encoded);
}
#[test]
fn test_non_protecteed_ascii() {
let nonprotected_ascii = ('\u{0}'..='\u{7F}')
.filter(|&c| c.is_ascii() && !PROTECTED.contains(&(c as u8)))
.collect::<String>();
let encoded = percent_encode(nonprotected_ascii.as_bytes());
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
assert_eq!(path.get("id").unwrap(), &nonprotected_ascii);
}
#[test]
fn test_valid_utf8_multibyte() {
let test = ('\u{FF00}'..='\u{FFFF}').collect::<String>();
let encoded = percent_encode(test.as_bytes());
let path = match_url("/a/{id}/b", format!("/a/{}/b", &encoded));
assert_eq!(path.get("id").unwrap(), &test);
}
#[test]
fn test_invalid_utf8() {
let invalid_utf8 = percent_encode((0x80..=0xff).collect::<Vec<_>>().as_slice());
let uri = Uri::try_from(format!("/{}", invalid_utf8)).unwrap();
let path = Path::new(Url::new(uri));
// We should always get a valid utf8 string
assert!(String::from_utf8(path.path().as_bytes().to_owned()).is_ok());
}
#[test]
fn test_from_hex() {
let hex = b"0123456789abcdefABCDEF";
for i in 0..256 {
let c = i as u8;
if hex.contains(&c) {
assert!(from_hex(c).is_some())
} else {
assert!(from_hex(c).is_none())
}
}
let expected = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15,
];
for i in 0..hex.len() {
assert_eq!(from_hex(hex[i]).unwrap(), expected[i]);
}
}
}

View File

@ -3,6 +3,27 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.1.0-beta.7 - 2021-11-22
* Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
[#2408]: https://github.com/actix/actix-web/pull/2408
## 0.1.0-beta.6 - 2021-11-15
* No significant changes from `0.1.0-beta.5`.
## 0.1.0-beta.5 - 2021-10-20
* Updated rustls to v0.20. [#2414]
* Minimum supported Rust version (MSRV) is now 1.52.
[#2414]: https://github.com/actix/actix-web/pull/2414
## 0.1.0-beta.4 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 0.1.0-beta.3 - 2021-06-20 ## 0.1.0-beta.3 - 2021-06-20
* No significant changes from `0.1.0-beta.2`. * No significant changes from `0.1.0-beta.2`.

View File

@ -1,32 +1,41 @@
[package] [package]
name = "actix-test" name = "actix-test"
version = "0.1.0-beta.3" version = "0.1.0-beta.7"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
] ]
edition = "2018"
description = "Integration testing tools for Actix Web applications" description = "Integration testing tools for Actix Web applications"
keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web.git"
categories = [
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2018"
[features] [features]
default = [] default = []
# rustls # rustls
rustls = ["tls-rustls", "actix-http/rustls"] rustls = ["tls-rustls", "actix-http/rustls", "awc/rustls"]
# openssl # openssl
openssl = ["tls-openssl", "actix-http/openssl"] openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"]
[dependencies] [dependencies]
actix-codec = "0.4.0" actix-codec = "0.4.1"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.13"
actix-http-test = { version = "3.0.0-beta.4", features = [] } actix-http-test = "3.0.0-beta.7"
actix-service = "2.0.0" actix-service = "2.0.0"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] } actix-web = { version = "4.0.0-beta.11", default-features = false, features = ["cookies"] }
actix-rt = "2.1" actix-rt = "2.1"
awc = { version = "3.0.0-beta.7", default-features = false, features = ["cookies"] } awc = { version = "3.0.0-beta.11", default-features = false, features = ["cookies"] }
futures-core = { version = "0.3.7", default-features = false, features = ["std"] } futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
futures-util = { version = "0.3.7", default-features = false, features = [] } futures-util = { version = "0.3.7", default-features = false, features = [] }
@ -35,4 +44,5 @@ serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
serde_urlencoded = "0.7" serde_urlencoded = "0.7"
tls-openssl = { package = "openssl", version = "0.10.9", optional = true } tls-openssl = { package = "openssl", version = "0.10.9", optional = true }
tls-rustls = { package = "rustls", version = "0.19.0", optional = true } tls-rustls = { package = "rustls", version = "0.20.0", optional = true }
tokio = { version = "1.2", features = ["sync"] }

View File

@ -31,7 +31,7 @@ extern crate tls_openssl as openssl;
#[cfg(feature = "rustls")] #[cfg(feature = "rustls")]
extern crate tls_rustls as rustls; extern crate tls_rustls as rustls;
use std::{error::Error as StdError, fmt, net, sync::mpsc, thread, time}; use std::{error::Error as StdError, fmt, net, thread, time::Duration};
use actix_codec::{AsyncRead, AsyncWrite, Framed}; use actix_codec::{AsyncRead, AsyncWrite, Framed};
pub use actix_http::test::TestBuffer; pub use actix_http::test::TestBuffer;
@ -41,8 +41,9 @@ use actix_http::{
}; };
use actix_service::{map_config, IntoServiceFactory, ServiceFactory, ServiceFactoryExt as _}; use actix_service::{map_config, IntoServiceFactory, ServiceFactory, ServiceFactoryExt as _};
use actix_web::{ use actix_web::{
dev::{AppConfig, MessageBody, Server, Service}, dev::{AppConfig, MessageBody, Server, ServerHandle, Service},
rt, web, Error, rt::{self, System},
web, Error,
}; };
use awc::{error::PayloadError, Client, ClientRequest, ClientResponse, Connector}; use awc::{error::PayloadError, Client, ClientRequest, ClientResponse, Connector};
use futures_core::Stream; use futures_core::Stream;
@ -52,6 +53,7 @@ pub use actix_web::test::{
call_service, default_service, init_service, load_stream, ok_service, read_body, call_service, default_service, init_service, load_stream, ok_service, read_body,
read_body_json, read_response, read_response_json, TestRequest, read_body_json, read_response, read_response_json, TestRequest,
}; };
use tokio::sync::mpsc;
/// Start default [`TestServer`]. /// Start default [`TestServer`].
/// ///
@ -64,7 +66,7 @@ pub use actix_web::test::{
/// Ok(HttpResponse::Ok()) /// Ok(HttpResponse::Ok())
/// } /// }
/// ///
/// #[actix_rt::test] /// #[actix_web::test]
/// async fn test_example() { /// async fn test_example() {
/// let srv = actix_test::start(|| /// let srv = actix_test::start(||
/// App::new().service(my_handler) /// App::new().service(my_handler)
@ -104,7 +106,7 @@ where
/// Ok(HttpResponse::Ok()) /// Ok(HttpResponse::Ok())
/// } /// }
/// ///
/// #[actix_rt::test] /// #[actix_web::test]
/// async fn test_example() { /// async fn test_example() {
/// let srv = actix_test::start_with(actix_test::config().h1(), || /// let srv = actix_test::start_with(actix_test::config().h1(), ||
/// App::new().service(my_handler) /// App::new().service(my_handler)
@ -128,7 +130,11 @@ where
B: MessageBody + 'static, B: MessageBody + 'static,
B::Error: Into<Box<dyn StdError>>, B::Error: Into<Box<dyn StdError>>,
{ {
let (tx, rx) = mpsc::channel(); // for sending handles and server info back from the spawned thread
let (started_tx, started_rx) = std::sync::mpsc::channel();
// for signaling the shutdown of spawned server and system
let (thread_stop_tx, thread_stop_rx) = mpsc::channel(1);
let tls = match cfg.stream { let tls = match cfg.stream {
StreamType::Tcp => false, StreamType::Tcp => false,
@ -138,154 +144,189 @@ where
StreamType::Rustls(_) => true, StreamType::Rustls(_) => true,
}; };
// run server in separate thread // run server in separate orphaned thread
thread::spawn(move || { thread::spawn(move || {
let sys = rt::System::new(); rt::System::new().block_on(async move {
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap(); let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
let local_addr = tcp.local_addr().unwrap(); let local_addr = tcp.local_addr().unwrap();
let factory = factory.clone(); let factory = factory.clone();
let srv_cfg = cfg.clone(); let srv_cfg = cfg.clone();
let timeout = cfg.client_timeout; let timeout = cfg.client_timeout;
let builder = Server::build().workers(1).disable_signals();
let srv = match srv_cfg.stream { let builder = Server::build().workers(1).disable_signals().system_exit();
StreamType::Tcp => match srv_cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() let srv = match srv_cfg.stream {
.into_factory() StreamType::Tcp => match srv_cfg.tp {
.map_err(|err| err.into().error_response()); HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.h1(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.tcp()
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .h1(map_config(fac, move |_| app_cfg.clone()))
.tcp()
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.h2(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.tcp()
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .h2(map_config(fac, move |_| app_cfg.clone()))
.tcp()
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.finish(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.tcp()
}),
},
#[cfg(feature = "openssl")]
StreamType::Openssl(acceptor) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .finish(map_config(fac, move |_| app_cfg.clone()))
.tcp()
}),
},
#[cfg(feature = "openssl")]
StreamType::Openssl(acceptor) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.h1(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.openssl(acceptor.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .h1(map_config(fac, move |_| app_cfg.clone()))
.openssl(acceptor.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.h2(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.openssl(acceptor.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .h2(map_config(fac, move |_| app_cfg.clone()))
.openssl(acceptor.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.finish(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.openssl(acceptor.clone())
}),
},
#[cfg(feature = "rustls")]
StreamType::Rustls(config) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .finish(map_config(fac, move |_| app_cfg.clone()))
.openssl(acceptor.clone())
}),
},
#[cfg(feature = "rustls")]
StreamType::Rustls(config) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.h1(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.rustls(config.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .h1(map_config(fac, move |_| app_cfg.clone()))
.rustls(config.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.h2(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.rustls(config.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory() HttpService::build()
.into_factory() .client_timeout(timeout)
.map_err(|err| err.into().error_response()); .h2(map_config(fac, move |_| app_cfg.clone()))
.rustls(config.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg = AppConfig::__priv_test_new(
false,
local_addr.to_string(),
local_addr,
);
HttpService::build() let fac = factory()
.client_timeout(timeout) .into_factory()
.finish(map_config(fac, move |_| app_cfg.clone())) .map_err(|err| err.into().error_response());
.rustls(config.clone())
}), HttpService::build()
}, .client_timeout(timeout)
} .finish(map_config(fac, move |_| app_cfg.clone()))
.unwrap(); .rustls(config.clone())
}),
},
}
.expect("test server could not be created");
sys.block_on(async {
let srv = srv.run(); let srv = srv.run();
tx.send((rt::System::current(), srv, local_addr)).unwrap(); started_tx
.send((System::current(), srv.handle(), local_addr))
.unwrap();
// drive server loop
srv.await.unwrap();
// notify TestServer that server and system have shut down
// all thread managed resources should be dropped at this point
}); });
sys.run() let _ = thread_stop_tx.send(());
}); });
let (system, server, addr) = rx.recv().unwrap(); let (system, server, addr) = started_rx.recv().unwrap();
let client = { let client = {
let connector = { let connector = {
@ -299,15 +340,15 @@ where
.set_alpn_protos(b"\x02h2\x08http/1.1") .set_alpn_protos(b"\x02h2\x08http/1.1")
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e)); .map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
Connector::new() Connector::new()
.conn_lifetime(time::Duration::from_secs(0)) .conn_lifetime(Duration::from_secs(0))
.timeout(time::Duration::from_millis(30000)) .timeout(Duration::from_millis(30000))
.ssl(builder.build()) .ssl(builder.build())
} }
#[cfg(not(feature = "openssl"))] #[cfg(not(feature = "openssl"))]
{ {
Connector::new() Connector::new()
.conn_lifetime(time::Duration::from_secs(0)) .conn_lifetime(Duration::from_secs(0))
.timeout(time::Duration::from_millis(30000)) .timeout(Duration::from_millis(30000))
} }
}; };
@ -315,11 +356,12 @@ where
}; };
TestServer { TestServer {
addr, server,
thread_stop_rx,
client, client,
system, system,
addr,
tls, tls,
server,
} }
} }
@ -405,11 +447,12 @@ impl TestServerConfig {
/// ///
/// See [`start`] for usage example. /// See [`start`] for usage example.
pub struct TestServer { pub struct TestServer {
addr: net::SocketAddr, server: ServerHandle,
thread_stop_rx: mpsc::Receiver<()>,
client: awc::Client, client: awc::Client,
system: rt::System, system: rt::System,
addr: net::SocketAddr,
tls: bool, tls: bool,
server: Server,
} }
impl TestServer { impl TestServer {
@ -504,16 +547,31 @@ impl TestServer {
self.client.headers() self.client.headers()
} }
/// Gracefully stop HTTP server. /// Stop HTTP server.
pub async fn stop(self) { ///
self.server.stop(true).await; /// Waits for spawned `Server` and `System` to shutdown (force) shutdown.
pub async fn stop(mut self) {
// signal server to stop
self.server.stop(false).await;
// also signal system to stop
// though this is handled by `ServerBuilder::exit_system` too
self.system.stop(); self.system.stop();
rt::time::sleep(time::Duration::from_millis(100)).await;
// wait for thread to be stopped but don't care about result
let _ = self.thread_stop_rx.recv().await;
} }
} }
impl Drop for TestServer { impl Drop for TestServer {
fn drop(&mut self) { fn drop(&mut self) {
self.system.stop() // calls in this Drop impl should be enough to shut down the server, system, and thread
// without needing to await anything
// signal server to stop
let _ = self.server.stop(true);
// signal system to stop
self.system.stop();
} }
} }

View File

@ -1,6 +1,11 @@
# Changes # Changes
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
* Minimum supported Rust version (MSRV) is now 1.52.
## 4.0.0-beta.7 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 4.0.0-beta.6 - 2021-06-26 ## 4.0.0-beta.6 - 2021-06-26

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-web-actors" name = "actix-web-actors"
version = "4.0.0-beta.6" version = "4.0.0-beta.7"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix actors support for Actix Web" description = "Actix actors support for Actix Web"
keywords = ["actix", "http", "web", "framework", "async"] keywords = ["actix", "http", "web", "framework", "async"]
@ -15,9 +15,9 @@ path = "src/lib.rs"
[dependencies] [dependencies]
actix = { version = "0.12.0", default-features = false } actix = { version = "0.12.0", default-features = false }
actix-codec = "0.4.0" actix-codec = "0.4.1"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.13"
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.11", default-features = false }
bytes = "1" bytes = "1"
bytestring = "1" bytestring = "1"
@ -27,8 +27,8 @@ tokio = { version = "1", features = ["sync"] }
[dev-dependencies] [dev-dependencies]
actix-rt = "2.2" actix-rt = "2.2"
actix-test = "0.1.0-beta.3" actix-test = "0.1.0-beta.7"
awc = { version = "3.0.0-beta.7", default-features = false } awc = { version = "3.0.0-beta.11", default-features = false }
env_logger = "0.8" env_logger = "0.9"
futures-util = { version = "0.3.7", default-features = false } futures-util = { version = "0.3.7", default-features = false }

View File

@ -3,15 +3,15 @@
> Actix actors support for Actix Web. > Actix actors support for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors) [![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.6)](https://docs.rs/actix-web-actors/4.0.0-beta.6) [![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.7)](https://docs.rs/actix-web-actors/4.0.0-beta.7)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.52+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
![License](https://img.shields.io/crates/l/actix-web-actors.svg) ![License](https://img.shields.io/crates/l/actix-web-actors.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6) [![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7)
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors) [![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-web-actors) - [API Documentation](https://docs.rs/actix-web-actors)
- Minimum supported Rust version: 1.46 or later - Minimum Supported Rust Version (MSRV): 1.52

View File

@ -3,6 +3,22 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.5.0-beta.5 - 2021-10-20
* Improve error recovery potential when macro input is invalid. [#2410]
* Add `#[actix_web::test]` macro for setting up tests with a runtime. [#2409]
* Minimum supported Rust version (MSRV) is now 1.52.
[#2410]: https://github.com/actix/actix-web/pull/2410
[#2409]: https://github.com/actix/actix-web/pull/2409
## 0.5.0-beta.4 - 2021-09-09
* In routing macros, paths are now validated at compile time. [#2350]
* Minimum supported Rust version (MSRV) is now 1.51.
[#2350]: https://github.com/actix/actix-web/pull/2350
## 0.5.0-beta.3 - 2021-06-17 ## 0.5.0-beta.3 - 2021-06-17
* No notable changes. * No notable changes.

View File

@ -1,12 +1,13 @@
[package] [package]
name = "actix-web-codegen" name = "actix-web-codegen"
version = "0.5.0-beta.3" version = "0.5.0-beta.5"
description = "Routing and runtime macros for Actix Web" description = "Routing and runtime macros for Actix Web"
readme = "README.md"
homepage = "https://actix.rs" homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web" repository = "https://github.com/actix/actix-web.git"
documentation = "https://docs.rs/actix-web-codegen" authors = [
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2018" edition = "2018"
@ -17,12 +18,14 @@ proc-macro = true
quote = "1" quote = "1"
syn = { version = "1", features = ["full", "parsing"] } syn = { version = "1", features = ["full", "parsing"] }
proc-macro2 = "1" proc-macro2 = "1"
actix-router = "0.5.0-beta.2"
[dev-dependencies] [dev-dependencies]
actix-rt = "2.2" actix-rt = "2.2"
actix-test = "0.1.0-beta.3" actix-macros = "0.2.3"
actix-test = "0.1.0-beta.7"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-web = "4.0.0-beta.8" actix-web = "4.0.0-beta.11"
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] } futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
trybuild = "1" trybuild = "1"

View File

@ -3,18 +3,18 @@
> Routing and runtime macros for Actix Web. > Routing and runtime macros for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen) [![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen)
[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=0.5.0-beta.3)](https://docs.rs/actix-web-codegen/0.5.0-beta.3) [![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=0.5.0-beta.5)](https://docs.rs/actix-web-codegen/0.5.0-beta.5)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.52+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
![License](https://img.shields.io/crates/l/actix-web-codegen.svg) ![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3/status.svg)](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3) [![dependency status](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.5/status.svg)](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.5)
[![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen) [![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-web-codegen) - [API Documentation](https://docs.rs/actix-web-codegen)
- Minimum supported Rust version: 1.46 or later. - Minimum Supported Rust Version (MSRV): 1.52
## Compile Testing ## Compile Testing

View File

@ -59,6 +59,7 @@
#![recursion_limit = "512"] #![recursion_limit = "512"]
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote::quote;
mod route; mod route;
@ -157,24 +158,41 @@ method_macro! {
} }
/// Marks async main function as the actix system entry-point. /// Marks async main function as the actix system entry-point.
///
/// # Actix Web Re-export
/// This macro can be applied with `#[actix_web::main]` when used in Actix Web applications.
///
/// # Examples /// # Examples
/// ``` /// ```
/// #[actix_web_codegen::main] /// #[actix_web::main]
/// async fn main() { /// async fn main() {
/// async { println!("Hello world"); }.await /// async { println!("Hello world"); }.await
/// } /// }
/// ``` /// ```
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn main(_: TokenStream, item: TokenStream) -> TokenStream { pub fn main(_: TokenStream, item: TokenStream) -> TokenStream {
use quote::quote; let mut output: TokenStream = (quote! {
let input = syn::parse_macro_input!(item as syn::ItemFn); #[::actix_web::rt::main(system = "::actix_web::rt::System")]
(quote! {
#[actix_web::rt::main(system = "::actix_web::rt::System")]
#input
}) })
.into() .into();
output.extend(item);
output
}
/// Marks async test functions to use the actix system entry-point.
///
/// # Examples
/// ```
/// #[actix_web::test]
/// async fn test() {
/// assert_eq!(async { "Hello world" }.await, "Hello world");
/// }
/// ```
#[proc_macro_attribute]
pub fn test(_: TokenStream, item: TokenStream) -> TokenStream {
let mut output: TokenStream = (quote! {
#[::actix_web::rt::test(system = "::actix_web::rt::System")]
})
.into();
output.extend(item);
output
} }

View File

@ -3,6 +3,7 @@ extern crate proc_macro;
use std::collections::HashSet; use std::collections::HashSet;
use std::convert::TryFrom; use std::convert::TryFrom;
use actix_router::ResourceDef;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2}; use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{format_ident, quote, ToTokens, TokenStreamExt}; use quote::{format_ident, quote, ToTokens, TokenStreamExt};
@ -101,6 +102,7 @@ impl Args {
match arg { match arg {
NestedMeta::Lit(syn::Lit::Str(lit)) => match path { NestedMeta::Lit(syn::Lit::Str(lit)) => match path {
None => { None => {
let _ = ResourceDef::new(lit.value());
path = Some(lit); path = Some(lit);
} }
_ => { _ => {
@ -218,7 +220,7 @@ fn guess_resource_type(typ: &syn::Type) -> ResourceType {
impl Route { impl Route {
pub fn new( pub fn new(
args: AttributeArgs, args: AttributeArgs,
input: TokenStream, ast: syn::ItemFn,
method: Option<MethodType>, method: Option<MethodType>,
) -> syn::Result<Self> { ) -> syn::Result<Self> {
if args.is_empty() { if args.is_empty() {
@ -232,14 +234,11 @@ impl Route {
), ),
)); ));
} }
let ast: syn::ItemFn = syn::parse(input)?;
let name = ast.sig.ident.clone(); let name = ast.sig.ident.clone();
// Try and pull out the doc comments so that we can reapply them to the // Try and pull out the doc comments so that we can reapply them to the generated struct.
// generated struct. // Note that multi line doc comments are converted to multiple doc attributes.
//
// Note that multi line doc comments are converted to multiple doc
// attributes.
let doc_attributes = ast let doc_attributes = ast
.attrs .attrs
.iter() .iter()
@ -347,8 +346,28 @@ pub(crate) fn with_method(
input: TokenStream, input: TokenStream,
) -> TokenStream { ) -> TokenStream {
let args = parse_macro_input!(args as syn::AttributeArgs); let args = parse_macro_input!(args as syn::AttributeArgs);
match Route::new(args, input, method) {
let ast = match syn::parse::<syn::ItemFn>(input.clone()) {
Ok(ast) => ast,
// on parse error, make IDEs happy; see fn docs
Err(err) => return input_and_compile_error(input, err),
};
match Route::new(args, ast, method) {
Ok(route) => route.into_token_stream().into(), Ok(route) => route.into_token_stream().into(),
Err(err) => err.to_compile_error().into(), // on macro related error, make IDEs happy; see fn docs
Err(err) => input_and_compile_error(input, err),
} }
} }
/// Converts the error to a token stream and appends it to the original input.
///
/// Returning the original input in addition to the error is good for IDEs which can gracefully
/// recover and show more precise errors within the macro body.
///
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
let compile_err = TokenStream::from(err.to_compile_error());
item.extend(compile_err);
item
}

Some files were not shown because too many files have changed in this diff Show More