mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-04 18:06:23 +02:00
Compare commits
67 Commits
files-v0.6
...
web-v4.0.0
Author | SHA1 | Date | |
---|---|---|---|
9abe166d52 | |||
c09ec6af4c | |||
37f2bf5625 | |||
4f6f0b0137 | |||
591abc37c3 | |||
ad22cc4e7f | |||
efdf3ab1c3 | |||
6b3ea4fc61 | |||
99985fc4ec | |||
a6707fb7ee | |||
a3806cde19 | |||
efefa0d0ce | |||
450ff5fa1d | |||
8ae278cb68 | |||
46699e3429 | |||
ba88d3b4bf | |||
8dd30611fa | |||
1383c7d701 | |||
d8a0f46f26 | |||
53ec66caf4 | |||
93112644d3 | |||
ddc8c16cb3 | |||
373b3f91df | |||
7d01ece355 | |||
c50eef6166 | |||
dade818eba | |||
ae35e69382 | |||
5128b1bdfc | |||
168b2f227d | |||
4bb32fb19b | |||
f9da6e48e0 | |||
ff07816b65 | |||
5f412c67db | |||
a0c0bff944 | |||
384164cc14 | |||
e965d8298f | |||
f6e69919ed | |||
293c52c3ef | |||
5a14ffeef2 | |||
7ae132cb68 | |||
d8deed0475 | |||
2504c2ecb0 | |||
604be5495f | |||
262c6bc828 | |||
5eba95b731 | |||
09afd033fc | |||
539697292a | |||
5a480d1d78 | |||
9a26393375 | |||
2eacb735a4 | |||
767e4efe22 | |||
e559a197cc | |||
93aa86e30b | |||
2d8d2f5ab0 | |||
083ee05d50 | |||
ed0516d724 | |||
7535a1ade8 | |||
8846808804 | |||
3b6333e65f | |||
b1148fd735 | |||
12f7720309 | |||
2d8530feb3 | |||
7faeffc5ab | |||
f81d4bdae7 | |||
6893773280 | |||
73a655544e | |||
baa5a663c4 |
@ -1,8 +1,14 @@
|
||||
[alias]
|
||||
chk = "check --workspace --all-features --tests --examples --bins"
|
||||
lint = "clippy --workspace --tests --examples"
|
||||
lint = "clippy --workspace --all-features --tests --examples --bins"
|
||||
ci-min = "hack check --workspace --no-default-features"
|
||||
ci-min-test = "hack check --workspace --no-default-features --tests --examples"
|
||||
ci-default = "hack check --workspace"
|
||||
ci-full = "check --workspace --bins --examples --tests"
|
||||
ci-test = "test --workspace --all-features --no-fail-fast"
|
||||
ci-default = "check --workspace --bins --tests --examples"
|
||||
ci-full = "check --workspace --all-features --bins --tests --examples"
|
||||
ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture"
|
||||
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
||||
|
||||
ci-feature-powerset-check-no-tls="hack --workspace --feature-powerset --skip=__compress,rustls,openssl check"
|
||||
ci-feature-powerset-check-rustls="hack --workspace --feature-powerset --features=rustls --skip=__compress,openssl check"
|
||||
ci-feature-powerset-check-openssl="hack --workspace --feature-powerset --features=openssl --skip=__compress,rustls check"
|
||||
ci-feature-powerset-check-all="hack --workspace --feature-powerset --skip=__compress check"
|
||||
|
13
.github/ISSUE_TEMPLATE/config.yml
vendored
13
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,15 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: GitHub Discussions
|
||||
url: https://github.com/actix/actix-web/discussions
|
||||
about: Actix Web Q&A
|
||||
- name: Gitter chat (actix-web)
|
||||
url: https://gitter.im/actix/actix-web
|
||||
about: Actix Web Q&A
|
||||
- name: Gitter chat (actix)
|
||||
url: https://gitter.im/actix/actix
|
||||
about: Actix (actor framework) Q&A
|
||||
- name: Actix Discord
|
||||
url: https://discord.gg/NWpN5mmg3x
|
||||
about: Actix developer discussion and community chat
|
||||
|
||||
- name: GitHub Discussions
|
||||
url: https://github.com/actix/actix-web/discussions
|
||||
about: Actix Web Q&A
|
||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -8,7 +8,7 @@ PR_TYPE
|
||||
|
||||
|
||||
## PR Checklist
|
||||
<!-- Check your PR fulfills the following items. ->>
|
||||
<!-- Check your PR fulfills the following items. -->
|
||||
<!-- For draft PRs check the boxes as you complete them. -->
|
||||
|
||||
- [ ] Tests for the changes have been added / updated.
|
||||
|
149
.github/workflows/ci.yml
vendored
149
.github/workflows/ci.yml
vendored
@ -14,9 +14,9 @@ jobs:
|
||||
target:
|
||||
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
|
||||
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
||||
version:
|
||||
- 1.46.0 # MSRV
|
||||
- 1.52.0 # MSRV
|
||||
- stable
|
||||
- nightly
|
||||
|
||||
@ -24,12 +24,16 @@ jobs:
|
||||
runs-on: ${{ matrix.target.os }}
|
||||
|
||||
env:
|
||||
CI: 1
|
||||
CARGO_INCREMENTAL: 0
|
||||
VCPKGRS_DYNAMIC: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# install OpenSSL on Windows
|
||||
# TODO: GitHub actions docs state that OpenSSL is
|
||||
# already installed on these Windows machines somewhere
|
||||
- name: Set vcpkg root
|
||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
||||
run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||
@ -44,17 +48,9 @@ jobs:
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Install ${{ matrix.version }}
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.version }}-${{ matrix.target.triple }}
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: generate-lockfile
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
@ -66,62 +62,117 @@ jobs:
|
||||
|
||||
- name: check minimal
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: hack
|
||||
args: check --workspace --no-default-features
|
||||
with: { command: ci-min }
|
||||
|
||||
- name: check minimal + tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: hack
|
||||
args: check --workspace --no-default-features --tests --examples
|
||||
with: { command: ci-min-test }
|
||||
|
||||
- name: check default
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-default }
|
||||
|
||||
- name: check full
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
args: --workspace --bins --examples --tests
|
||||
with: { command: ci-full }
|
||||
|
||||
- name: tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --workspace --all-features --no-fail-fast -- --nocapture
|
||||
--skip=test_h2_content_length
|
||||
--skip=test_reading_deflate_encoding_large_random_rustls
|
||||
|
||||
- name: tests (actix-http)
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with:
|
||||
command: test
|
||||
args: --package=actix-http --no-default-features --features=rustls -- --nocapture
|
||||
command: ci-test
|
||||
args: --skip=test_reading_deflate_encoding_large_random_rustls
|
||||
|
||||
- name: tests (awc)
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
|
||||
ci_feature_powerset_check:
|
||||
name: Verify Feature Combinations
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
command: test
|
||||
args: --package=awc --no-default-features --features=rustls -- --nocapture
|
||||
toolchain: stable-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: check feature combinations
|
||||
# if: github.ref == 'refs/heads/master'
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-feature-powerset-check-all }
|
||||
|
||||
coverage:
|
||||
name: coverage
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Generate coverage file
|
||||
if: >
|
||||
matrix.target.os == 'ubuntu-latest'
|
||||
&& matrix.version == 'stable'
|
||||
&& github.ref == 'refs/heads/master'
|
||||
if: github.ref == 'refs/heads/master'
|
||||
run: |
|
||||
cargo install cargo-tarpaulin --vers "^0.13"
|
||||
cargo tarpaulin --out Xml --verbose
|
||||
- name: Upload to Codecov
|
||||
if: >
|
||||
matrix.target.os == 'ubuntu-latest'
|
||||
&& matrix.version == 'stable'
|
||||
&& github.ref == 'refs/heads/master'
|
||||
if: github.ref == 'refs/heads/master'
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
file: cobertura.xml
|
||||
with: { file: cobertura.xml }
|
||||
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.6.2 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
rustdoc:
|
||||
name: rustdoc
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: doc tests
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with: { command: ci-doctest }
|
||||
|
2
.github/workflows/clippy-fmt.yml
vendored
2
.github/workflows/clippy-fmt.yml
vendored
@ -36,4 +36,4 @@ jobs:
|
||||
uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --workspace --tests --all-features
|
||||
args: --workspace --all-features --tests
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -16,3 +16,6 @@ guide/build/
|
||||
|
||||
# Configuration directory generated by CLion
|
||||
.idea
|
||||
|
||||
# Configuration directory generated by VSCode
|
||||
.vscode
|
||||
|
68
CHANGES.md
68
CHANGES.md
@ -3,12 +3,78 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 4.0.0-beta.10 - 2021-10-20
|
||||
### Added
|
||||
* Option to allow `Json` extractor to work without a `Content-Type` header present. [#2362]
|
||||
* `#[actix_web::test]` macro for setting up tests with a runtime. [#2409]
|
||||
|
||||
### Changed
|
||||
* Associated type `FromRequest::Config` was removed. [#2233]
|
||||
* Inner field made private on `web::Payload`. [#2384]
|
||||
* `Data::into_inner` and `Data::get_ref` no longer require T: Sized. [#2403]
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
### Removed
|
||||
* `ServiceResponse::checked_expr` was a legacy and just removed. [#2401]
|
||||
|
||||
[#2233]: https://github.com/actix/actix-web/pull/2233
|
||||
[#2362]: https://github.com/actix/actix-web/pull/2362
|
||||
[#2384]: https://github.com/actix/actix-web/pull/2384
|
||||
[#2401]: https://github.com/actix/actix-web/pull/2401
|
||||
[#2409]: https://github.com/actix/actix-web/pull/2409
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 4.0.0-beta.9 - 2021-09-09
|
||||
### Added
|
||||
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
|
||||
|
||||
### Changed
|
||||
* Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344]
|
||||
* Move `BaseHttpResponse` to `dev::Response`. [#2379]
|
||||
* Enable `TestRequest::param` to accept more than just static strings. [#2172]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
### Fixed
|
||||
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||
* Re-export correct type at `web::HttpResponse`. [#2379]
|
||||
|
||||
[#2172]: https://github.com/actix/actix-web/pull/2172
|
||||
[#2325]: https://github.com/actix/actix-web/pull/2325
|
||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||
[#2379]: https://github.com/actix/actix-web/pull/2379
|
||||
|
||||
|
||||
## 4.0.0-beta.8 - 2021-06-26
|
||||
### Added
|
||||
* Add `ServiceRequest::parts_mut`. [#2177]
|
||||
* Add extractors for `Uri` and `Method`. [#2263]
|
||||
* Add extractors for `ConnectionInfo` and `PeerAddr`. [#2263]
|
||||
* Add `Route::service` for using hand-written services as handlers. [#2262]
|
||||
|
||||
### Changed
|
||||
* Change compression algorithm features flags. [#2250]
|
||||
* Deprecate `App::data` and `App::data_factory`. [#2271]
|
||||
* Smarter extraction of `ConnectionInfo` parts. [#2282]
|
||||
|
||||
### Fixed
|
||||
* Scope and Resource middleware can access data items set on their own layer. [#2288]
|
||||
|
||||
[#2177]: https://github.com/actix/actix-web/pull/2177
|
||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||
[#2271]: https://github.com/actix/actix-web/pull/2271
|
||||
[#2262]: https://github.com/actix/actix-web/pull/2262
|
||||
[#2263]: https://github.com/actix/actix-web/pull/2263
|
||||
[#2282]: https://github.com/actix/actix-web/pull/2282
|
||||
[#2288]: https://github.com/actix/actix-web/pull/2288
|
||||
|
||||
|
||||
## 4.0.0-beta.7 - 2021-06-17
|
||||
### Added
|
||||
* `HttpServer::worker_max_blocking_threads` for setting block thread pool. [#2200]
|
||||
|
||||
### Changed
|
||||
|
||||
* Adjusted default JSON payload limit to 2MB (from 32kb) and included size and limits in the `JsonPayloadError::Overflow` error variant. [#2162]
|
||||
[#2162]: (https://github.com/actix/actix-web/pull/2162)
|
||||
* `ServiceResponse::error_response` now uses body type of `Body`. [#2201]
|
||||
|
64
Cargo.toml
64
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web"
|
||||
version = "4.0.0-beta.7"
|
||||
version = "4.0.0-beta.10"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
@ -11,19 +11,21 @@ categories = [
|
||||
"web-programming::websocket"
|
||||
]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress", "cookies", "secure-cookies"]
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lib]
|
||||
name = "actix_web"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
".",
|
||||
"awc",
|
||||
@ -34,15 +36,18 @@ members = [
|
||||
"actix-web-codegen",
|
||||
"actix-http-test",
|
||||
"actix-test",
|
||||
"actix-router",
|
||||
]
|
||||
# enable when MSRV is 1.51+
|
||||
# resolver = "2"
|
||||
|
||||
[features]
|
||||
default = ["compress", "cookies"]
|
||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
# content-encoding support
|
||||
compress = ["actix-http/compress"]
|
||||
# Brotli algorithm content-encoding support
|
||||
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
||||
# Gzip and deflate algorithms content-encoding support
|
||||
compress-gzip = ["actix-http/compress-gzip", "__compress"]
|
||||
# Zstd algorithm content-encoding support
|
||||
compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
||||
|
||||
# support for cookies
|
||||
cookies = ["cookie"]
|
||||
@ -56,21 +61,26 @@ openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
||||
# rustls
|
||||
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-macros = "0.2.1"
|
||||
actix-router = "0.2.7"
|
||||
actix-macros = "0.2.3"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true }
|
||||
actix-tls = { version = "3.0.0-beta.7", default-features = false, optional = true }
|
||||
|
||||
actix-web-codegen = "0.5.0-beta.2"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-http = "3.0.0-beta.11"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
actix-web-codegen = "0.5.0-beta.5"
|
||||
|
||||
ahash = "0.7"
|
||||
bytes = "1"
|
||||
cfg-if = "1"
|
||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
||||
derive_more = "0.99.5"
|
||||
either = "1.5.3"
|
||||
@ -88,25 +98,30 @@ regex = "1.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
smallvec = "1.6"
|
||||
smallvec = "1.6.1"
|
||||
socket2 = "0.4.0"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
time = { version = "0.3", default-features = false, features = ["formatting"] }
|
||||
url = "2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-test = { version = "0.1.0-beta.2", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.6", features = ["openssl"] }
|
||||
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.9", features = ["openssl"] }
|
||||
|
||||
brotli2 = "0.3.2"
|
||||
criterion = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
env_logger = "0.8"
|
||||
flate2 = "1.0.13"
|
||||
zstd = "0.7"
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
rand = "0.8"
|
||||
rcgen = "0.8"
|
||||
serde_derive = "1.0"
|
||||
rustls-pemfile = "0.2"
|
||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||
tls-rustls = { package = "rustls", version = "0.19.0" }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
||||
zstd = "0.7"
|
||||
|
||||
[profile.dev]
|
||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||
debug = 0
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
@ -118,6 +133,7 @@ actix-files = { path = "actix-files" }
|
||||
actix-http = { path = "actix-http" }
|
||||
actix-http-test = { path = "actix-http-test" }
|
||||
actix-multipart = { path = "actix-multipart" }
|
||||
actix-router = { path = "actix-router" }
|
||||
actix-test = { path = "actix-test" }
|
||||
actix-web = { path = "." }
|
||||
actix-web-actors = { path = "actix-web-actors" }
|
||||
@ -126,15 +142,15 @@ awc = { path = "awc" }
|
||||
|
||||
[[test]]
|
||||
name = "test_server"
|
||||
required-features = ["compress", "cookies"]
|
||||
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
[[example]]
|
||||
name = "basic"
|
||||
required-features = ["compress"]
|
||||
required-features = ["compress-gzip"]
|
||||
|
||||
[[example]]
|
||||
name = "uds"
|
||||
required-features = ["compress"]
|
||||
required-features = ["compress-gzip"]
|
||||
|
||||
[[example]]
|
||||
name = "on_connect"
|
||||
|
21
MIGRATION.md
21
MIGRATION.md
@ -3,13 +3,28 @@
|
||||
* The default `NormalizePath` behavior now strips trailing slashes by default. This was
|
||||
previously documented to be the case in v3 but the behavior now matches. The effect is that
|
||||
routes defined with trailing slashes will become inaccessible when
|
||||
using `NormalizePath::default()`.
|
||||
using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
|
||||
It is advised that the `new` method be used instead.
|
||||
|
||||
Before: `#[get("/test/")`
|
||||
After: `#[get("/test")`
|
||||
Before: `#[get("/test/")]`
|
||||
After: `#[get("/test")]`
|
||||
|
||||
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
|
||||
|
||||
* The `type Config` of `FromRequest` was removed.
|
||||
|
||||
* Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
|
||||
By default all compression algorithms are enabled.
|
||||
To select algorithm you want to include with `middleware::Compress` use following flags:
|
||||
- `compress-brotli`
|
||||
- `compress-gzip`
|
||||
- `compress-zstd`
|
||||
If you have set in your `Cargo.toml` dedicated `actix-web` features and you still want
|
||||
to have compression enabled. Please change features selection like bellow:
|
||||
|
||||
Before: `"compress"`
|
||||
After: `"compress-brotli", "compress-gzip", "compress-zstd"`
|
||||
|
||||
|
||||
## 3.0.0
|
||||
|
||||
|
10
README.md
10
README.md
@ -6,10 +6,10 @@
|
||||
<p>
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.10)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.7)
|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.10)
|
||||
<br />
|
||||
[](https://github.com/actix/actix-web/actions)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||
@ -25,14 +25,14 @@
|
||||
* Streaming and pipelining
|
||||
* Keep-alive and slow requests handling
|
||||
* Client/server [WebSockets](https://actix.rs/docs/websockets/) support
|
||||
* Transparent content compression/decompression (br, gzip, deflate)
|
||||
* Transparent content compression/decompression (br, gzip, deflate, zstd)
|
||||
* Powerful [request routing](https://actix.rs/docs/url-dispatch/)
|
||||
* Multipart streams
|
||||
* Static assets
|
||||
* SSL support using OpenSSL or Rustls
|
||||
* Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
||||
* Includes an async [HTTP client](https://docs.rs/awc/)
|
||||
* Runs on stable Rust 1.46+
|
||||
* Runs on stable Rust 1.52+
|
||||
|
||||
## Documentation
|
||||
|
||||
|
@ -1,6 +1,19 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 0.6.0-beta.7 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 0.6.0-beta.6 - 2021-06-26
|
||||
* Added `Files::path_filter()`. [#2274]
|
||||
* `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]
|
||||
|
||||
[#2274]: https://github.com/actix/actix-web/pull/2274
|
||||
[#2228]: https://github.com/actix/actix-web/pull/2228
|
||||
|
||||
|
||||
## 0.6.0-beta.5 - 2021-06-17
|
||||
@ -16,12 +29,11 @@
|
||||
|
||||
|
||||
## 0.6.0-beta.4 - 2021-04-02
|
||||
* No notable changes.
|
||||
|
||||
* Add support for `.guard` in `Files` to selectively filter `Files` services. [#2046]
|
||||
|
||||
[#2046]: https://github.com/actix/actix-web/pull/2046
|
||||
|
||||
|
||||
## 0.6.0-beta.3 - 2021-03-09
|
||||
* No notable changes.
|
||||
|
||||
|
@ -1,13 +1,11 @@
|
||||
[package]
|
||||
name = "actix-files"
|
||||
version = "0.6.0-beta.5"
|
||||
version = "0.6.0-beta.7"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Static file serving for Actix Web"
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-files/"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
categories = ["asynchronous", "web-programming::http-server"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
@ -17,8 +15,8 @@ name = "actix_files"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false }
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-web = { version = "4.0.0-beta.10", default-features = false }
|
||||
actix-http = "3.0.0-beta.11"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
@ -35,5 +33,5 @@ percent-encoding = "2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-web = "4.0.0-beta.7"
|
||||
actix-test = "0.1.0-beta.2"
|
||||
actix-web = "4.0.0-beta.10"
|
||||
actix-test = "0.1.0-beta.5"
|
||||
|
@ -3,17 +3,16 @@
|
||||
> Static file serving for Actix Web
|
||||
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.7)
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-files/)
|
||||
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum supported Rust version: 1.46 or later
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -21,6 +21,7 @@ impl ResponseError for FilesError {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Display, Debug, PartialEq)]
|
||||
pub enum UriSegmentError {
|
||||
/// The segment started with the wrapped invalid character.
|
||||
|
@ -1,9 +1,17 @@
|
||||
use std::{cell::RefCell, fmt, io, path::PathBuf, rc::Rc};
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
fmt, io,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
|
||||
use actix_utils::future::ok;
|
||||
use actix_web::{
|
||||
dev::{AppService, HttpServiceFactory, ResourceDef, ServiceRequest, ServiceResponse},
|
||||
dev::{
|
||||
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest,
|
||||
ServiceResponse,
|
||||
},
|
||||
error::Error,
|
||||
guard::Guard,
|
||||
http::header::DispositionType,
|
||||
@ -13,7 +21,7 @@ use futures_core::future::LocalBoxFuture;
|
||||
|
||||
use crate::{
|
||||
directory_listing, named, Directory, DirectoryRenderer, FilesService, HttpNewService,
|
||||
MimeOverride,
|
||||
MimeOverride, PathFilter,
|
||||
};
|
||||
|
||||
/// Static files handling service.
|
||||
@ -36,6 +44,7 @@ pub struct Files {
|
||||
default: Rc<RefCell<Option<Rc<HttpNewService>>>>,
|
||||
renderer: Rc<DirectoryRenderer>,
|
||||
mime_override: Option<Rc<MimeOverride>>,
|
||||
path_filter: Option<Rc<PathFilter>>,
|
||||
file_flags: named::Flags,
|
||||
use_guards: Option<Rc<dyn Guard>>,
|
||||
guards: Vec<Rc<dyn Guard>>,
|
||||
@ -60,6 +69,7 @@ impl Clone for Files {
|
||||
file_flags: self.file_flags,
|
||||
path: self.path.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
path_filter: self.path_filter.clone(),
|
||||
use_guards: self.use_guards.clone(),
|
||||
guards: self.guards.clone(),
|
||||
hidden_files: self.hidden_files,
|
||||
@ -96,7 +106,7 @@ impl Files {
|
||||
};
|
||||
|
||||
Files {
|
||||
path: mount_path.to_owned(),
|
||||
path: mount_path.trim_end_matches('/').to_owned(),
|
||||
directory: dir,
|
||||
index: None,
|
||||
show_index: false,
|
||||
@ -104,6 +114,7 @@ impl Files {
|
||||
default: Rc::new(RefCell::new(None)),
|
||||
renderer: Rc::new(directory_listing),
|
||||
mime_override: None,
|
||||
path_filter: None,
|
||||
file_flags: named::Flags::default(),
|
||||
use_guards: None,
|
||||
guards: Vec::new(),
|
||||
@ -114,6 +125,9 @@ impl Files {
|
||||
/// Show files listing for directories.
|
||||
///
|
||||
/// By default show files listing is disabled.
|
||||
///
|
||||
/// When used with [`Files::index_file()`], files listing is shown as a fallback
|
||||
/// when the index file is not found.
|
||||
pub fn show_files_listing(mut self) -> Self {
|
||||
self.show_index = true;
|
||||
self
|
||||
@ -146,10 +160,45 @@ impl Files {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets path filtering closure.
|
||||
///
|
||||
/// The path provided to the closure is relative to `serve_from` path.
|
||||
/// You can safely join this path with the `serve_from` path to get the real path.
|
||||
/// However, the real path may not exist since the filter is called before checking path existence.
|
||||
///
|
||||
/// When a path doesn't pass the filter, [`Files::default_handler`] is called if set, otherwise,
|
||||
/// `404 Not Found` is returned.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use std::path::Path;
|
||||
/// use actix_files::Files;
|
||||
///
|
||||
/// // prevent searching subdirectories and following symlinks
|
||||
/// let files_service = Files::new("/", "./static").path_filter(|path, _| {
|
||||
/// path.components().count() == 1
|
||||
/// && Path::new("./static")
|
||||
/// .join(path)
|
||||
/// .symlink_metadata()
|
||||
/// .map(|m| !m.file_type().is_symlink())
|
||||
/// .unwrap_or(false)
|
||||
/// });
|
||||
/// ```
|
||||
pub fn path_filter<F>(mut self, f: F) -> Self
|
||||
where
|
||||
F: Fn(&Path, &RequestHead) -> bool + 'static,
|
||||
{
|
||||
self.path_filter = Some(Rc::new(f));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set index file
|
||||
///
|
||||
/// Shows specific index file for directory "/" instead of
|
||||
/// Shows specific index file for directories instead of
|
||||
/// showing files listing.
|
||||
///
|
||||
/// If the index file is not found, files listing is shown as a fallback if
|
||||
/// [`Files::show_files_listing()`] is set.
|
||||
pub fn index_file<T: Into<String>>(mut self, index: T) -> Self {
|
||||
self.index = Some(index.into());
|
||||
self
|
||||
@ -312,6 +361,7 @@ impl ServiceFactory<ServiceRequest> for Files {
|
||||
default: None,
|
||||
renderer: self.renderer.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
path_filter: self.path_filter.clone(),
|
||||
file_flags: self.file_flags,
|
||||
guards: self.use_guards.clone(),
|
||||
hidden_files: self.hidden_files,
|
||||
|
@ -16,11 +16,12 @@
|
||||
|
||||
use actix_service::boxed::{BoxService, BoxServiceFactory};
|
||||
use actix_web::{
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
dev::{RequestHead, ServiceRequest, ServiceResponse},
|
||||
error::Error,
|
||||
http::header::DispositionType,
|
||||
};
|
||||
use mime_guess::from_ext;
|
||||
use std::path::Path;
|
||||
|
||||
mod chunked;
|
||||
mod directory;
|
||||
@ -56,6 +57,8 @@ pub fn file_extension_to_mime(ext: &str) -> mime::Mime {
|
||||
|
||||
type MimeOverride = dyn Fn(&mime::Name<'_>) -> DispositionType;
|
||||
|
||||
type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{
|
||||
@ -80,7 +83,7 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_file_extension_to_mime() {
|
||||
let m = file_extension_to_mime("");
|
||||
assert_eq!(m, mime::APPLICATION_OCTET_STREAM);
|
||||
@ -872,4 +875,69 @@ mod tests {
|
||||
"inline; filename=\"symlink-test.png\""
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_index_with_show_files_listing() {
|
||||
let service = Files::new(".", ".")
|
||||
.index_file("lib.rs")
|
||||
.show_files_listing()
|
||||
.new_service(())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Serve the index if exists
|
||||
let req = TestRequest::default().uri("/src").to_srv_request();
|
||||
let resp = test::call_service(&service, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
assert_eq!(
|
||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||
"text/x-rust"
|
||||
);
|
||||
|
||||
// Show files listing, otherwise.
|
||||
let req = TestRequest::default().uri("/tests").to_srv_request();
|
||||
let resp = test::call_service(&service, req).await;
|
||||
assert_eq!(
|
||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
let bytes = test::read_body(resp).await;
|
||||
assert!(format!("{:?}", bytes).contains("/tests/test.png"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_path_filter() {
|
||||
// prevent searching subdirectories
|
||||
let st = Files::new("/", ".")
|
||||
.path_filter(|path, _| path.components().count() == 1)
|
||||
.new_service(())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let req = TestRequest::with_uri("/Cargo.toml").to_srv_request();
|
||||
let resp = test::call_service(&st, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
|
||||
let req = TestRequest::with_uri("/src/lib.rs").to_srv_request();
|
||||
let resp = test::call_service(&st, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_default_handler_filter() {
|
||||
let st = Files::new("/", ".")
|
||||
.default_handler(|req: ServiceRequest| {
|
||||
ok(req.into_response(HttpResponse::Ok().body("default content")))
|
||||
})
|
||||
.path_filter(|path, _| path.extension() == Some("png".as_ref()))
|
||||
.new_service(())
|
||||
.await
|
||||
.unwrap();
|
||||
let req = TestRequest::with_uri("/Cargo.toml").to_srv_request();
|
||||
let resp = test::call_service(&st, req).await;
|
||||
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
let bytes = test::read_body(resp).await;
|
||||
assert_eq!(bytes, web::Bytes::from_static(b"default content"));
|
||||
}
|
||||
}
|
||||
|
@ -355,8 +355,8 @@ impl NamedFile {
|
||||
} else if let (Some(ref m), Some(header::IfUnmodifiedSince(ref since))) =
|
||||
(last_modified, req.get_header())
|
||||
{
|
||||
let t1: SystemTime = m.clone().into();
|
||||
let t2: SystemTime = since.clone().into();
|
||||
let t1: SystemTime = (*m).into();
|
||||
let t2: SystemTime = (*since).into();
|
||||
|
||||
match (t1.duration_since(UNIX_EPOCH), t2.duration_since(UNIX_EPOCH)) {
|
||||
(Ok(t1), Ok(t2)) => t1.as_secs() > t2.as_secs(),
|
||||
@ -374,8 +374,8 @@ impl NamedFile {
|
||||
} else if let (Some(ref m), Some(header::IfModifiedSince(ref since))) =
|
||||
(last_modified, req.get_header())
|
||||
{
|
||||
let t1: SystemTime = m.clone().into();
|
||||
let t2: SystemTime = since.clone().into();
|
||||
let t1: SystemTime = (*m).into();
|
||||
let t2: SystemTime = (*since).into();
|
||||
|
||||
match (t1.duration_since(UNIX_EPOCH), t2.duration_since(UNIX_EPOCH)) {
|
||||
(Ok(t1), Ok(t2)) => t1.as_secs() <= t2.as_secs(),
|
||||
|
@ -59,7 +59,6 @@ impl AsRef<Path> for PathBufWrap {
|
||||
impl FromRequest for PathBufWrap {
|
||||
type Error = UriSegmentError;
|
||||
type Future = Ready<Result<Self, Self::Error>>;
|
||||
type Config = ();
|
||||
|
||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||
ready(req.match_info().path().parse())
|
||||
|
@ -13,7 +13,7 @@ use futures_core::future::LocalBoxFuture;
|
||||
|
||||
use crate::{
|
||||
named, Directory, DirectoryRenderer, FilesError, HttpService, MimeOverride, NamedFile,
|
||||
PathBufWrap,
|
||||
PathBufWrap, PathFilter,
|
||||
};
|
||||
|
||||
/// Assembled file serving service.
|
||||
@ -25,6 +25,7 @@ pub struct FilesService {
|
||||
pub(crate) default: Option<HttpService>,
|
||||
pub(crate) renderer: Rc<DirectoryRenderer>,
|
||||
pub(crate) mime_override: Option<Rc<MimeOverride>>,
|
||||
pub(crate) path_filter: Option<Rc<PathFilter>>,
|
||||
pub(crate) file_flags: named::Flags,
|
||||
pub(crate) guards: Option<Rc<dyn Guard>>,
|
||||
pub(crate) hidden_files: bool,
|
||||
@ -82,6 +83,18 @@ impl Service<ServiceRequest> for FilesService {
|
||||
Err(e) => return Box::pin(ok(req.error_response(e))),
|
||||
};
|
||||
|
||||
if let Some(filter) = &self.path_filter {
|
||||
if !filter(real_path.as_ref(), req.head()) {
|
||||
if let Some(ref default) = self.default {
|
||||
return Box::pin(default.call(req));
|
||||
} else {
|
||||
return Box::pin(ok(
|
||||
req.into_response(actix_web::HttpResponse::NotFound().finish())
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// full file path
|
||||
let path = self.directory.join(&real_path);
|
||||
if let Err(err) = path.canonicalize() {
|
||||
@ -102,26 +115,20 @@ impl Service<ServiceRequest> for FilesService {
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(ref redir_index) = self.index {
|
||||
let path = path.join(redir_index);
|
||||
|
||||
match NamedFile::open(path) {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition =
|
||||
mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Box::pin(ok(ServiceResponse::new(req, res)))
|
||||
}
|
||||
Err(err) => self.handle_err(err, req),
|
||||
let serve_named_file = |req: ServiceRequest, mut named_file: NamedFile| {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
} else if self.show_index {
|
||||
let dir = Directory::new(self.directory.clone(), path);
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Box::pin(ok(ServiceResponse::new(req, res)))
|
||||
};
|
||||
|
||||
let show_index = |req: ServiceRequest| {
|
||||
let dir = Directory::new(self.directory.clone(), path.clone());
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let x = (self.renderer)(&dir, &req);
|
||||
@ -130,11 +137,19 @@ impl Service<ServiceRequest> for FilesService {
|
||||
Ok(resp) => ok(resp),
|
||||
Err(err) => ok(ServiceResponse::from_err(err, req)),
|
||||
})
|
||||
} else {
|
||||
Box::pin(ok(ServiceResponse::from_err(
|
||||
};
|
||||
|
||||
match self.index {
|
||||
Some(ref index) => match NamedFile::open(path.join(index)) {
|
||||
Ok(named_file) => serve_named_file(req, named_file),
|
||||
Err(_) if self.show_index => show_index(req),
|
||||
Err(err) => self.handle_err(err, req),
|
||||
},
|
||||
None if self.show_index => show_index(req),
|
||||
_ => Box::pin(ok(ServiceResponse::from_err(
|
||||
FilesError::IsDirectory,
|
||||
req.into_parts().0,
|
||||
)))
|
||||
))),
|
||||
}
|
||||
} else {
|
||||
match NamedFile::open(path) {
|
||||
|
@ -8,7 +8,7 @@ use actix_web::{
|
||||
App,
|
||||
};
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_utf8_file_contents() {
|
||||
// use default ISO-8859-1 encoding
|
||||
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
|
||||
|
@ -7,7 +7,7 @@ use actix_web::{
|
||||
};
|
||||
use bytes::Bytes;
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_guard_filter() {
|
||||
let srv = test::init_service(
|
||||
App::new()
|
||||
|
@ -1,6 +1,11 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 3.0.0-beta.5 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 3.0.0-beta.4 - 2021-04-02
|
||||
|
@ -1,18 +1,18 @@
|
||||
[package]
|
||||
name = "actix-http-test"
|
||||
version = "3.0.0-beta.4"
|
||||
version = "3.0.0-beta.5"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Various helpers for Actix applications to use during testing"
|
||||
readme = "README.md"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-http-test/"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
exclude = [".gitignore", ".cargo/config"]
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
@ -31,11 +31,11 @@ openssl = ["tls-openssl", "awc/openssl"]
|
||||
[dependencies]
|
||||
actix-service = "2.0.0"
|
||||
actix-codec = "0.4.0"
|
||||
actix-tls = "3.0.0-beta.5"
|
||||
actix-tls = "3.0.0-beta.7"
|
||||
actix-utils = "3.0.0"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
awc = { version = "3.0.0-beta.6", default-features = false }
|
||||
awc = { version = "3.0.0-beta.9", default-features = false }
|
||||
|
||||
base64 = "0.13"
|
||||
bytes = "1"
|
||||
@ -47,9 +47,8 @@ serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
slab = "0.4"
|
||||
serde_urlencoded = "0.7"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-web = { version = "4.0.0-beta.10", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.11"
|
||||
|
@ -3,13 +3,15 @@
|
||||
> Various helpers for Actix applications to use during testing.
|
||||
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.4)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.4)
|
||||
[](https://gitter.im/actix/actix-web?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
<br>
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.5)
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-http-test)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum Supported Rust Version (MSRV): 1.46.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -7,8 +7,7 @@
|
||||
#[cfg(feature = "openssl")]
|
||||
extern crate tls_openssl as openssl;
|
||||
|
||||
use std::sync::mpsc;
|
||||
use std::{net, thread, time};
|
||||
use std::{net, sync::mpsc, thread, time::Duration};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use actix_rt::{net::TcpStream, System};
|
||||
@ -37,7 +36,7 @@ use socket2::{Domain, Protocol, Socket, Type};
|
||||
/// Ok(HttpResponse::Ok().into())
|
||||
/// }
|
||||
///
|
||||
/// #[actix_rt::test]
|
||||
/// #[actix_web::test]
|
||||
/// async fn test_example() {
|
||||
/// let mut srv = TestServer::start(
|
||||
/// || HttpService::new(
|
||||
@ -95,15 +94,15 @@ pub async fn test_server_with_addr<F: ServiceFactory<TcpStream>>(
|
||||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
.ssl(builder.build())
|
||||
}
|
||||
#[cfg(not(feature = "openssl"))]
|
||||
{
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -3,6 +3,46 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.11 - 2021-10-20
|
||||
### Changed
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 3.0.0-beta.10 - 2021-09-09
|
||||
### Changed
|
||||
* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
### Fixed
|
||||
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
|
||||
* Remove `Into<Error>` bound on `Encoder` body types. [#2375]
|
||||
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||
|
||||
[#2364]: https://github.com/actix/actix-web/pull/2364
|
||||
[#2375]: https://github.com/actix/actix-web/pull/2375
|
||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||
[#2377]: https://github.com/actix/actix-web/pull/2377
|
||||
|
||||
|
||||
## 3.0.0-beta.9 - 2021-08-09
|
||||
### Fixed
|
||||
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-06-26
|
||||
### Changed
|
||||
* Change compression algorithm features flags. [#2250]
|
||||
|
||||
### Removed
|
||||
* `downcast` and `downcast_get_type_id` macros. [#2291]
|
||||
|
||||
[#2291]: https://github.com/actix/actix-web/pull/2291
|
||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||
|
||||
|
||||
## 3.0.0-beta.7 - 2021-06-17
|
||||
### Added
|
||||
* Alias `body::Body` as `body::AnyBody`. [#2215]
|
||||
@ -199,6 +239,11 @@
|
||||
[#1878]: https://github.com/actix/actix-web/pull/1878
|
||||
|
||||
|
||||
## 2.2.1 - 2021-08-09
|
||||
### Fixed
|
||||
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
|
||||
|
||||
|
||||
## 2.2.0 - 2020-11-25
|
||||
### Added
|
||||
* HttpResponse builders for 1xx status codes. [#1768]
|
||||
|
@ -1,22 +1,23 @@
|
||||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.0.0-beta.7"
|
||||
version = "3.0.0-beta.11"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "HTTP primitives for the Actix ecosystem"
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-http/"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress"]
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
||||
|
||||
[lib]
|
||||
name = "actix_http"
|
||||
@ -32,17 +33,23 @@ openssl = ["actix-tls/openssl"]
|
||||
rustls = ["actix-tls/rustls"]
|
||||
|
||||
# enable compression support
|
||||
compress = ["flate2", "brotli2", "zstd"]
|
||||
compress-brotli = ["brotli2", "__compress"]
|
||||
compress-gzip = ["flate2", "__compress"]
|
||||
compress-zstd = ["zstd", "__compress"]
|
||||
|
||||
# trust-dns as client dns resolver
|
||||
trust-dns = ["trust-dns-resolver"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-service = "2.0.0"
|
||||
actix-codec = "0.4.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-rt = "2.2"
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["accept", "connect"] }
|
||||
actix-tls = { version = "3.0.0-beta.7", features = ["accept", "connect"] }
|
||||
|
||||
ahash = "0.7"
|
||||
base64 = "0.13"
|
||||
@ -55,7 +62,8 @@ futures-core = { version = "0.3.7", default-features = false, features = ["alloc
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] }
|
||||
h2 = "0.3.1"
|
||||
http = "0.2.2"
|
||||
httparse = "1.3"
|
||||
httparse = "1.5.1"
|
||||
httpdate = "1.0.1"
|
||||
itoa = "0.4"
|
||||
language-tags = "0.3"
|
||||
local-channel = "0.1"
|
||||
@ -66,11 +74,8 @@ percent-encoding = "2.1"
|
||||
pin-project = "1.0.0"
|
||||
pin-project-lite = "0.2"
|
||||
rand = "0.8"
|
||||
regex = "1.3"
|
||||
serde = "1.0"
|
||||
sha-1 = "0.9"
|
||||
smallvec = "1.6"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
smallvec = "1.6.1"
|
||||
tokio = { version = "1.2", features = ["sync"] }
|
||||
|
||||
# compression
|
||||
@ -82,17 +87,18 @@ trust-dns-resolver = { version = "0.20.0", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] }
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-tls = { version = "3.0.0-beta.7", features = ["openssl"] }
|
||||
async-stream = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
env_logger = "0.8"
|
||||
rcgen = "0.8"
|
||||
regex = "1.3"
|
||||
rustls-pemfile = "0.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tls-openssl = { version = "0.10", package = "openssl" }
|
||||
tls-rustls = { version = "0.19", package = "rustls" }
|
||||
webpki = { version = "0.21.0" }
|
||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
||||
|
||||
[[example]]
|
||||
name = "ws"
|
||||
|
@ -3,19 +3,18 @@
|
||||
> HTTP primitives for the Actix ecosystem.
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.11)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.7)
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.11)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-http)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum Supported Rust Version (MSRV): 1.46.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -78,12 +78,12 @@ impl HeaderIndex {
|
||||
// test cases taken from:
|
||||
// https://github.com/seanmonstar/httparse/blob/master/benches/parse.rs
|
||||
|
||||
const REQ_SHORT: &'static [u8] = b"\
|
||||
const REQ_SHORT: &[u8] = b"\
|
||||
GET / HTTP/1.0\r\n\
|
||||
Host: example.com\r\n\
|
||||
Cookie: session=60; user_id=1\r\n\r\n";
|
||||
|
||||
const REQ: &'static [u8] = b"\
|
||||
const REQ: &[u8] = b"\
|
||||
GET /wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg HTTP/1.1\r\n\
|
||||
Host: www.kittyhell.com\r\n\
|
||||
User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\r\n\
|
||||
@ -119,6 +119,8 @@ mod _original {
|
||||
use std::mem::MaybeUninit;
|
||||
|
||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
||||
#![allow(clippy::uninit_assumed_init)]
|
||||
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
|
||||
|
@ -18,7 +18,8 @@ fn bench_write_camel_case(c: &mut Criterion) {
|
||||
group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| {
|
||||
b.iter(|| {
|
||||
let mut buf = black_box([0; 24]);
|
||||
_new::write_camel_case(black_box(bts), &mut buf)
|
||||
let len = black_box(bts.len());
|
||||
_new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -30,9 +31,12 @@ criterion_group!(benches, bench_write_camel_case);
|
||||
criterion_main!(benches);
|
||||
|
||||
mod _new {
|
||||
pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
||||
pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
||||
// first copy entire (potentially wrong) slice to output
|
||||
buffer[..value.len()].copy_from_slice(value);
|
||||
let buffer = unsafe {
|
||||
std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len);
|
||||
std::slice::from_raw_parts_mut(buf, len)
|
||||
};
|
||||
|
||||
let mut iter = value.iter();
|
||||
|
||||
|
@ -85,22 +85,31 @@ impl Stream for Heartbeat {
|
||||
fn tls_config() -> rustls::ServerConfig {
|
||||
use std::io::BufReader;
|
||||
|
||||
use rustls::{
|
||||
internal::pemfile::{certs, pkcs8_private_keys},
|
||||
NoClientAuth, ServerConfig,
|
||||
};
|
||||
use rustls::{Certificate, PrivateKey};
|
||||
use rustls_pemfile::{certs, pkcs8_private_keys};
|
||||
|
||||
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
|
||||
let cert_file = cert.serialize_pem().unwrap();
|
||||
let key_file = cert.serialize_private_key_pem();
|
||||
|
||||
let mut config = ServerConfig::new(NoClientAuth::new());
|
||||
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
||||
let key_file = &mut BufReader::new(key_file.as_bytes());
|
||||
|
||||
let cert_chain = certs(cert_file).unwrap();
|
||||
let cert_chain = certs(cert_file)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(Certificate)
|
||||
.collect();
|
||||
let mut keys = pkcs8_private_keys(key_file).unwrap();
|
||||
config.set_single_cert(cert_chain, keys.remove(0)).unwrap();
|
||||
|
||||
let mut config = rustls::ServerConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_no_client_auth()
|
||||
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
|
||||
.unwrap();
|
||||
|
||||
config.alpn_protocols.push(b"http/1.1".to_vec());
|
||||
config.alpn_protocols.push(b"h2".to_vec());
|
||||
|
||||
config
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use std::{
|
||||
};
|
||||
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures_core::{ready, Stream};
|
||||
use futures_core::Stream;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
@ -74,14 +74,10 @@ impl MessageBody for AnyBody {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
AnyBody::Message(body) => match ready!(body.as_pin_mut().poll_next(cx)) {
|
||||
Some(Err(err)) => {
|
||||
Poll::Ready(Some(Err(Error::new_body().with_cause(err))))
|
||||
}
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
},
|
||||
AnyBody::Message(body) => body
|
||||
.as_pin_mut()
|
||||
.poll_next(cx)
|
||||
.map_err(|err| Error::new_body().with_cause(err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -223,11 +219,9 @@ impl MessageBody for BoxAnyBody {
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
match ready!(self.0.as_mut().poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(Error::new_body().with_cause(err)))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
}
|
||||
self.0
|
||||
.as_mut()
|
||||
.poll_next(cx)
|
||||
.map_err(|err| Error::new_body().with_cause(err))
|
||||
}
|
||||
}
|
||||
|
@ -11,8 +11,6 @@ use bytes::{Bytes, BytesMut};
|
||||
use futures_core::ready;
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
use super::BodySize;
|
||||
|
||||
/// An interface for response bodies.
|
||||
@ -47,7 +45,6 @@ impl MessageBody for () {
|
||||
impl<B> MessageBody for Box<B>
|
||||
where
|
||||
B: MessageBody + Unpin,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = B::Error;
|
||||
|
||||
@ -66,7 +63,6 @@ where
|
||||
impl<B> MessageBody for Pin<Box<B>>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = B::Error;
|
||||
|
||||
|
@ -80,7 +80,7 @@ mod tests {
|
||||
impl Body {
|
||||
pub(crate) fn get_ref(&self) -> &[u8] {
|
||||
match *self {
|
||||
Body::Bytes(ref bin) => &bin,
|
||||
Body::Bytes(ref bin) => bin,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use std::{
|
||||
};
|
||||
|
||||
use bytes::Bytes;
|
||||
use futures_core::{ready, Stream};
|
||||
use futures_core::Stream;
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
@ -77,12 +77,7 @@ where
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
match self.project() {
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
ResponseBodyProj::Body(body) => match ready!(body.poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(err.into()))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
},
|
||||
ResponseBodyProj::Body(body) => body.poll_next(cx).map_err(Into::into),
|
||||
ResponseBodyProj::Other(body) => Pin::new(body).poll_next(cx),
|
||||
}
|
||||
}
|
||||
|
@ -28,18 +28,13 @@ use super::pool::ConnectionPool;
|
||||
use super::Connect;
|
||||
use super::Protocol;
|
||||
|
||||
#[cfg(feature = "openssl")]
|
||||
use actix_tls::connect::ssl::openssl::SslConnector as OpensslConnector;
|
||||
#[cfg(feature = "rustls")]
|
||||
use actix_tls::connect::ssl::rustls::ClientConfig;
|
||||
|
||||
enum SslConnector {
|
||||
#[allow(dead_code)]
|
||||
None,
|
||||
#[cfg(feature = "openssl")]
|
||||
Openssl(OpensslConnector),
|
||||
Openssl(actix_tls::connect::ssl::openssl::SslConnector),
|
||||
#[cfg(feature = "rustls")]
|
||||
Rustls(std::sync::Arc<ClientConfig>),
|
||||
Rustls(std::sync::Arc<actix_tls::connect::ssl::rustls::ClientConfig>),
|
||||
}
|
||||
|
||||
/// Manages HTTP client network connectivity.
|
||||
@ -78,41 +73,50 @@ impl Connector<()> {
|
||||
}
|
||||
}
|
||||
|
||||
// Build Ssl connector with openssl, based on supplied alpn protocols
|
||||
#[cfg(feature = "openssl")]
|
||||
/// Provides an empty TLS connector when no TLS feature is enabled.
|
||||
#[cfg(not(any(feature = "openssl", feature = "rustls")))]
|
||||
fn build_ssl(_: Vec<Vec<u8>>) -> SslConnector {
|
||||
SslConnector::None
|
||||
}
|
||||
|
||||
/// Build TLS connector with rustls, based on supplied ALPN protocols
|
||||
///
|
||||
/// Note that if both `openssl` and `rustls` features are enabled, rustls will be used.
|
||||
#[cfg(feature = "rustls")]
|
||||
fn build_ssl(protocols: Vec<Vec<u8>>) -> SslConnector {
|
||||
use actix_tls::connect::ssl::openssl::SslMethod;
|
||||
use actix_tls::connect::tls::rustls::{webpki_roots_cert_store, ClientConfig};
|
||||
|
||||
let mut config = ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(webpki_roots_cert_store())
|
||||
.with_no_client_auth();
|
||||
|
||||
config.alpn_protocols = protocols;
|
||||
|
||||
SslConnector::Rustls(std::sync::Arc::new(config))
|
||||
}
|
||||
|
||||
/// Build TLS connector with openssl, based on supplied ALPN protocols
|
||||
#[cfg(all(feature = "openssl", not(feature = "rustls")))]
|
||||
fn build_ssl(protocols: Vec<Vec<u8>>) -> SslConnector {
|
||||
use actix_tls::connect::tls::openssl::{
|
||||
SslConnector as OpensslConnector, SslMethod,
|
||||
};
|
||||
use bytes::{BufMut, BytesMut};
|
||||
|
||||
let mut alpn = BytesMut::with_capacity(20);
|
||||
for proto in protocols.iter() {
|
||||
for proto in &protocols {
|
||||
alpn.put_u8(proto.len() as u8);
|
||||
alpn.put(proto.as_slice());
|
||||
}
|
||||
|
||||
let mut ssl = OpensslConnector::builder(SslMethod::tls()).unwrap();
|
||||
let _ = ssl
|
||||
.set_alpn_protos(&alpn)
|
||||
.map_err(|e| error!("Can not set alpn protocol: {:?}", e));
|
||||
if let Err(err) = ssl.set_alpn_protos(&alpn) {
|
||||
error!("Can not set ALPN protocol: {:?}", err);
|
||||
}
|
||||
|
||||
SslConnector::Openssl(ssl.build())
|
||||
}
|
||||
|
||||
// Build Ssl connector with rustls, based on supplied alpn protocols
|
||||
#[cfg(all(not(feature = "openssl"), feature = "rustls"))]
|
||||
fn build_ssl(protocols: Vec<Vec<u8>>) -> SslConnector {
|
||||
let mut config = ClientConfig::new();
|
||||
config.set_protocols(&protocols);
|
||||
config.root_store.add_server_trust_anchors(
|
||||
&actix_tls::connect::ssl::rustls::TLS_SERVER_ROOTS,
|
||||
);
|
||||
SslConnector::Rustls(std::sync::Arc::new(config))
|
||||
}
|
||||
|
||||
// ssl turned off, provides empty ssl connector
|
||||
#[cfg(not(any(feature = "openssl", feature = "rustls")))]
|
||||
fn build_ssl(_: Vec<Vec<u8>>) -> SslConnector {
|
||||
SslConnector::None
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Connector<S> {
|
||||
@ -167,14 +171,20 @@ where
|
||||
|
||||
#[cfg(feature = "openssl")]
|
||||
/// Use custom `SslConnector` instance.
|
||||
pub fn ssl(mut self, connector: OpensslConnector) -> Self {
|
||||
pub fn ssl(
|
||||
mut self,
|
||||
connector: actix_tls::connect::ssl::openssl::SslConnector,
|
||||
) -> Self {
|
||||
self.ssl = SslConnector::Openssl(connector);
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(feature = "rustls")]
|
||||
/// Use custom `SslConnector` instance.
|
||||
pub fn rustls(mut self, connector: std::sync::Arc<ClientConfig>) -> Self {
|
||||
pub fn rustls(
|
||||
mut self,
|
||||
connector: std::sync::Arc<actix_tls::connect::ssl::rustls::ClientConfig>,
|
||||
) -> Self {
|
||||
self.ssl = SslConnector::Rustls(connector);
|
||||
self
|
||||
}
|
||||
@ -290,8 +300,7 @@ where
|
||||
let h2 = sock
|
||||
.ssl()
|
||||
.selected_alpn_protocol()
|
||||
.map(|protos| protos.windows(2).any(|w| w == H2))
|
||||
.unwrap_or(false);
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
@ -314,19 +323,15 @@ where
|
||||
SslConnector::Rustls(tls) => {
|
||||
const H2: &[u8] = b"h2";
|
||||
|
||||
use actix_tls::connect::ssl::rustls::{
|
||||
RustlsConnector, Session, TlsStream,
|
||||
};
|
||||
use actix_tls::connect::ssl::rustls::{RustlsConnector, TlsStream};
|
||||
|
||||
impl<Io: ConnectionIo> IntoConnectionIo for TcpConnection<Uri, TlsStream<Io>> {
|
||||
fn into_connection_io(self) -> (Box<dyn ConnectionIo>, Protocol) {
|
||||
let sock = self.into_parts().0;
|
||||
let h2 = sock
|
||||
.get_ref()
|
||||
.1
|
||||
.get_alpn_protocol()
|
||||
.map(|protos| protos.windows(2).any(|w| w == H2))
|
||||
.unwrap_or(false);
|
||||
let h2 =
|
||||
sock.get_ref().1.alpn_protocol().map_or(false, |protos| {
|
||||
protos.windows(2).any(|w| w == H2)
|
||||
});
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
|
@ -168,14 +168,13 @@ where
|
||||
|
||||
if let Err(e) = send.send_data(bytes, false) {
|
||||
return Err(e.into());
|
||||
} else {
|
||||
if !b.is_empty() {
|
||||
send.reserve_capacity(b.len());
|
||||
} else {
|
||||
buf = None;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if !b.is_empty() {
|
||||
send.reserve_capacity(b.len());
|
||||
} else {
|
||||
buf = None;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Some(Err(e)) => return Err(e.into()),
|
||||
}
|
||||
|
@ -1,18 +1,19 @@
|
||||
use std::cell::Cell;
|
||||
use std::fmt::Write;
|
||||
use std::rc::Rc;
|
||||
use std::time::Duration;
|
||||
use std::{fmt, net};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
fmt::{self, Write},
|
||||
net,
|
||||
rc::Rc,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use actix_rt::{
|
||||
task::JoinHandle,
|
||||
time::{interval, sleep_until, Instant, Sleep},
|
||||
};
|
||||
use bytes::BytesMut;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
/// "Sun, 06 Nov 1994 08:49:37 GMT".len()
|
||||
const DATE_VALUE_LENGTH: usize = 29;
|
||||
pub(crate) const DATE_VALUE_LENGTH: usize = 29;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||
/// Server keep-alive setting
|
||||
@ -104,6 +105,8 @@ impl ServiceConfig {
|
||||
}
|
||||
|
||||
/// Returns the local address that this server is bound to.
|
||||
///
|
||||
/// Returns `None` for connections via UDS (Unix Domain Socket).
|
||||
#[inline]
|
||||
pub fn local_addr(&self) -> Option<net::SocketAddr> {
|
||||
self.0.local_addr
|
||||
@ -152,8 +155,8 @@ impl ServiceConfig {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Return keep-alive timer delay is configured.
|
||||
#[inline]
|
||||
pub fn keep_alive_timer(&self) -> Option<Sleep> {
|
||||
self.keep_alive().map(|ka| sleep_until(self.now() + ka))
|
||||
}
|
||||
@ -204,12 +207,7 @@ impl Date {
|
||||
|
||||
fn update(&mut self) {
|
||||
self.pos = 0;
|
||||
write!(
|
||||
self,
|
||||
"{}",
|
||||
OffsetDateTime::now_utc().format("%a, %d %b %Y %H:%M:%S GMT")
|
||||
)
|
||||
.unwrap();
|
||||
write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@ -267,11 +265,11 @@ impl DateService {
|
||||
}
|
||||
|
||||
// TODO: move to a util module for testing all spawn handle drop style tasks.
|
||||
#[cfg(test)]
|
||||
/// Test Module for checking the drop state of certain async tasks that are spawned
|
||||
/// with `actix_rt::spawn`
|
||||
///
|
||||
/// The target task must explicitly generate `NotifyOnDrop` when spawn the task
|
||||
#[cfg(test)]
|
||||
mod notify_on_drop {
|
||||
use std::cell::RefCell;
|
||||
|
||||
@ -281,9 +279,8 @@ mod notify_on_drop {
|
||||
|
||||
/// Check if the spawned task is dropped.
|
||||
///
|
||||
/// # Panic:
|
||||
///
|
||||
/// When there was no `NotifyOnDrop` instance on current thread
|
||||
/// # Panics
|
||||
/// Panics when there was no `NotifyOnDrop` instance on current thread.
|
||||
pub(crate) fn is_dropped() -> bool {
|
||||
NOTIFY_DROPPED.with(|bool| {
|
||||
bool.borrow()
|
||||
@ -326,7 +323,7 @@ mod notify_on_drop {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use actix_rt::task::yield_now;
|
||||
use actix_rt::{task::yield_now, time::sleep};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_date_service_update() {
|
||||
@ -350,7 +347,14 @@ mod tests {
|
||||
assert_ne!(buf1, buf2);
|
||||
|
||||
drop(settings);
|
||||
assert!(notify_on_drop::is_dropped());
|
||||
|
||||
// Ensure the task will drop eventually
|
||||
let mut times = 0;
|
||||
while !notify_on_drop::is_dropped() {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
times += 1;
|
||||
assert!(times < 10, "Timeout waiting for task drop");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -365,14 +369,21 @@ mod tests {
|
||||
let clone3 = service.clone();
|
||||
|
||||
drop(clone1);
|
||||
assert_eq!(false, notify_on_drop::is_dropped());
|
||||
assert!(!notify_on_drop::is_dropped());
|
||||
drop(clone2);
|
||||
assert_eq!(false, notify_on_drop::is_dropped());
|
||||
assert!(!notify_on_drop::is_dropped());
|
||||
drop(clone3);
|
||||
assert_eq!(false, notify_on_drop::is_dropped());
|
||||
assert!(!notify_on_drop::is_dropped());
|
||||
|
||||
drop(service);
|
||||
assert!(notify_on_drop::is_dropped());
|
||||
|
||||
// Ensure the task will drop eventually
|
||||
let mut times = 0;
|
||||
while !notify_on_drop::is_dropped() {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
times += 1;
|
||||
assert!(times < 10, "Timeout waiting for task drop");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -8,10 +8,16 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||
use brotli2::write::BrotliDecoder;
|
||||
use bytes::Bytes;
|
||||
use flate2::write::{GzDecoder, ZlibDecoder};
|
||||
use futures_core::{ready, Stream};
|
||||
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
use brotli2::write::BrotliDecoder;
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
use flate2::write::{GzDecoder, ZlibDecoder};
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
use zstd::stream::write::Decoder as ZstdDecoder;
|
||||
|
||||
use crate::{
|
||||
@ -37,15 +43,19 @@ where
|
||||
#[inline]
|
||||
pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> {
|
||||
let decoder = match encoding {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoding::Br => Some(ContentDecoder::Br(Box::new(
|
||||
BrotliDecoder::new(Writer::new()),
|
||||
))),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
|
||||
ZlibDecoder::new(Writer::new()),
|
||||
))),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(
|
||||
GzDecoder::new(Writer::new()),
|
||||
))),
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
|
||||
ZstdDecoder::new(Writer::new()).expect(
|
||||
"Failed to create zstd decoder. This is a bug. \
|
||||
@ -70,7 +80,7 @@ where
|
||||
let encoding = headers
|
||||
.get(&CONTENT_ENCODING)
|
||||
.and_then(|val| val.to_str().ok())
|
||||
.map(ContentEncoding::from)
|
||||
.and_then(|x| x.parse().ok())
|
||||
.unwrap_or(ContentEncoding::Identity);
|
||||
|
||||
Self::new(stream, encoding)
|
||||
@ -148,17 +158,22 @@ where
|
||||
}
|
||||
|
||||
enum ContentDecoder {
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Deflate(Box<ZlibDecoder<Writer>>),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Gzip(Box<GzDecoder<Writer>>),
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
Br(Box<BrotliDecoder<Writer>>),
|
||||
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
|
||||
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
Zstd(Box<ZstdDecoder<'static, Writer>>),
|
||||
}
|
||||
|
||||
impl ContentDecoder {
|
||||
fn feed_eof(&mut self) -> io::Result<Option<Bytes>> {
|
||||
match self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentDecoder::Br(ref mut decoder) => match decoder.flush() {
|
||||
Ok(()) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -172,6 +187,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Gzip(ref mut decoder) => match decoder.try_finish() {
|
||||
Ok(_) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -185,6 +201,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Deflate(ref mut decoder) => match decoder.try_finish() {
|
||||
Ok(_) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -197,6 +214,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentDecoder::Zstd(ref mut decoder) => match decoder.flush() {
|
||||
Ok(_) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -213,6 +231,7 @@ impl ContentDecoder {
|
||||
|
||||
fn feed_data(&mut self, data: Bytes) -> io::Result<Option<Bytes>> {
|
||||
match self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentDecoder::Br(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
@ -227,6 +246,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Gzip(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
@ -241,6 +261,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Deflate(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
@ -255,6 +276,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentDecoder::Zstd(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
|
@ -9,12 +9,18 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||
use brotli2::write::BrotliEncoder;
|
||||
use bytes::Bytes;
|
||||
use derive_more::Display;
|
||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||
use futures_core::ready;
|
||||
use pin_project::pin_project;
|
||||
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
use brotli2::write::BrotliEncoder;
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||
|
||||
use crate::{
|
||||
@ -23,7 +29,7 @@ use crate::{
|
||||
header::{ContentEncoding, CONTENT_ENCODING},
|
||||
HeaderValue, StatusCode,
|
||||
},
|
||||
Error, ResponseHead,
|
||||
ResponseHead,
|
||||
};
|
||||
|
||||
use super::Writer;
|
||||
@ -101,7 +107,6 @@ enum EncoderBody<B> {
|
||||
impl<B> MessageBody for EncoderBody<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = EncoderError<B::Error>;
|
||||
|
||||
@ -125,18 +130,9 @@ where
|
||||
Poll::Ready(Some(Ok(std::mem::take(b))))
|
||||
}
|
||||
}
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
EncoderBodyProj::Stream(b) => match ready!(b.poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Body(err)))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
},
|
||||
EncoderBodyProj::Stream(b) => b.poll_next(cx).map_err(EncoderError::Body),
|
||||
EncoderBodyProj::BoxedStream(ref mut b) => {
|
||||
match ready!(b.as_pin_mut().poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Boxed(err)))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
}
|
||||
b.as_pin_mut().poll_next(cx).map_err(EncoderError::Boxed)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -145,7 +141,6 @@ where
|
||||
impl<B> MessageBody for Encoder<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = EncoderError<B::Error>;
|
||||
|
||||
@ -233,28 +228,36 @@ fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
|
||||
}
|
||||
|
||||
enum ContentEncoder {
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Deflate(ZlibEncoder<Writer>),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Gzip(GzEncoder<Writer>),
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
Br(BrotliEncoder<Writer>),
|
||||
// We need explicit 'static lifetime here because ZstdEncoder need lifetime
|
||||
// argument, and we use `spawn_blocking` in `Encoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
Zstd(ZstdEncoder<'static, Writer>),
|
||||
}
|
||||
|
||||
impl ContentEncoder {
|
||||
fn encoder(encoding: ContentEncoding) -> Option<Self> {
|
||||
match encoding {
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
|
||||
Writer::new(),
|
||||
flate2::Compression::fast(),
|
||||
))),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Gzip => Some(ContentEncoder::Gzip(GzEncoder::new(
|
||||
Writer::new(),
|
||||
flate2::Compression::fast(),
|
||||
))),
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoding::Br => {
|
||||
Some(ContentEncoder::Br(BrotliEncoder::new(Writer::new(), 3)))
|
||||
}
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoding::Zstd => {
|
||||
let encoder = ZstdEncoder::new(Writer::new(), 3).ok()?;
|
||||
Some(ContentEncoder::Zstd(encoder))
|
||||
@ -266,27 +269,35 @@ impl ContentEncoder {
|
||||
#[inline]
|
||||
pub(crate) fn take(&mut self) -> Bytes {
|
||||
match *self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoder::Br(ref mut encoder) => encoder.get_mut().take(),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Deflate(ref mut encoder) => encoder.get_mut().take(),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Gzip(ref mut encoder) => encoder.get_mut().take(),
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoder::Zstd(ref mut encoder) => encoder.get_mut().take(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Result<Bytes, io::Error> {
|
||||
match self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoder::Br(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Gzip(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Deflate(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoder::Zstd(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
@ -296,6 +307,7 @@ impl ContentEncoder {
|
||||
|
||||
fn write(&mut self, data: &[u8]) -> Result<(), io::Error> {
|
||||
match *self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoder::Br(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
@ -303,6 +315,7 @@ impl ContentEncoder {
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
@ -310,6 +323,7 @@ impl ContentEncoder {
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
@ -317,6 +331,7 @@ impl ContentEncoder {
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
|
@ -55,6 +55,8 @@ impl Error {
|
||||
Self::new(Kind::Io)
|
||||
}
|
||||
|
||||
// used in encoder behind feature flag so ignore unused warning
|
||||
#[allow(unused)]
|
||||
pub(crate) fn new_encoder() -> Self {
|
||||
Self::new(Kind::Encoder)
|
||||
}
|
||||
@ -125,7 +127,7 @@ impl fmt::Display for Error {
|
||||
|
||||
impl StdError for Error {
|
||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
self.inner.cause.as_ref().map(|err| err.as_ref())
|
||||
self.inner.cause.as_ref().map(Box::as_ref)
|
||||
}
|
||||
}
|
||||
|
||||
@ -194,7 +196,7 @@ pub enum ParseError {
|
||||
#[display(fmt = "IO error: {}", _0)]
|
||||
Io(io::Error),
|
||||
|
||||
/// Parsing a field as string failed
|
||||
/// Parsing a field as string failed.
|
||||
#[display(fmt = "UTF8 error: {}", _0)]
|
||||
Utf8(Utf8Error),
|
||||
}
|
||||
|
432
actix-http/src/h1/chunked.rs
Normal file
432
actix-http/src/h1/chunked.rs
Normal file
@ -0,0 +1,432 @@
|
||||
use std::{io, task::Poll};
|
||||
|
||||
use bytes::{Buf as _, Bytes, BytesMut};
|
||||
|
||||
macro_rules! byte (
|
||||
($rdr:ident) => ({
|
||||
if $rdr.len() > 0 {
|
||||
let b = $rdr[0];
|
||||
$rdr.advance(1);
|
||||
b
|
||||
} else {
|
||||
return Poll::Pending
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub(super) enum ChunkedState {
|
||||
Size,
|
||||
SizeLws,
|
||||
Extension,
|
||||
SizeLf,
|
||||
Body,
|
||||
BodyCr,
|
||||
BodyLf,
|
||||
EndCr,
|
||||
EndLf,
|
||||
End,
|
||||
}
|
||||
|
||||
impl ChunkedState {
|
||||
pub(super) fn step(
|
||||
&self,
|
||||
body: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
use self::ChunkedState::*;
|
||||
match *self {
|
||||
Size => ChunkedState::read_size(body, size),
|
||||
SizeLws => ChunkedState::read_size_lws(body),
|
||||
Extension => ChunkedState::read_extension(body),
|
||||
SizeLf => ChunkedState::read_size_lf(body, *size),
|
||||
Body => ChunkedState::read_body(body, size, buf),
|
||||
BodyCr => ChunkedState::read_body_cr(body),
|
||||
BodyLf => ChunkedState::read_body_lf(body),
|
||||
EndCr => ChunkedState::read_end_cr(body),
|
||||
EndLf => ChunkedState::read_end_lf(body),
|
||||
End => Poll::Ready(Ok(ChunkedState::End)),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
let radix = 16;
|
||||
|
||||
let rem = match byte!(rdr) {
|
||||
b @ b'0'..=b'9' => b - b'0',
|
||||
b @ b'a'..=b'f' => b + 10 - b'a',
|
||||
b @ b'A'..=b'F' => b + 10 - b'A',
|
||||
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => {
|
||||
return Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Invalid Size",
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
match size.checked_mul(radix) {
|
||||
Some(n) => {
|
||||
*size = n as u64;
|
||||
*size += rem as u64;
|
||||
|
||||
Poll::Ready(Ok(ChunkedState::Size))
|
||||
}
|
||||
None => {
|
||||
log::debug!("chunk size would overflow u64");
|
||||
Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Size is too big",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
// LWS can follow the chunk size, but no more digits can come
|
||||
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size linear white space",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
// strictly 0x20 (space) should be disallowed but we don't parse quoted strings here
|
||||
0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid character in chunk extension",
|
||||
))),
|
||||
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
||||
}
|
||||
}
|
||||
fn read_size_lf(
|
||||
rdr: &mut BytesMut,
|
||||
size: u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' if size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
|
||||
b'\n' if size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body(
|
||||
rdr: &mut BytesMut,
|
||||
rem: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
log::trace!("Chunked read, remaining={:?}", rem);
|
||||
|
||||
let len = rdr.len() as u64;
|
||||
if len == 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
let slice;
|
||||
if *rem > len {
|
||||
slice = rdr.split().freeze();
|
||||
*rem -= len;
|
||||
} else {
|
||||
slice = rdr.split_to(*rem as usize).freeze();
|
||||
*rem = 0;
|
||||
}
|
||||
*buf = Some(slice);
|
||||
if *rem > 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
Poll::Ready(Ok(ChunkedState::BodyCr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use actix_codec::Decoder as _;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::Method;
|
||||
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
h1::decoder::{MessageDecoder, PayloadItem},
|
||||
HttpMessage as _, Request,
|
||||
};
|
||||
|
||||
macro_rules! parse_ready {
|
||||
($e:expr) => {{
|
||||
match MessageDecoder::<Request>::default().decode($e) {
|
||||
Ok(Some((msg, _))) => msg,
|
||||
Ok(_) => unreachable!("Eof during parsing http request"),
|
||||
Err(err) => unreachable!("Error during parsing http request: {:?}", err),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! expect_parse_err {
|
||||
($e:expr) => {{
|
||||
match MessageDecoder::<Request>::default().decode($e) {
|
||||
Err(err) => match err {
|
||||
ParseError::Io(_) => unreachable!("Parse error expected"),
|
||||
_ => {}
|
||||
},
|
||||
_ => unreachable!("Error expected"),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_chunked_payload_chunk_extension() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(msg.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"data"));
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"line"));
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_chunked() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
|
||||
// intentional typo in "chunked"
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chnked\r\n\r\n",
|
||||
);
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"data"
|
||||
);
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"line"
|
||||
);
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_and_next_message() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(
|
||||
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
|
||||
POST /test2 HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n"
|
||||
.iter(),
|
||||
);
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"line");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert!(req.chunked().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_chunks() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\n1111\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"1111");
|
||||
|
||||
buf.extend(b"4\r\ndata\r");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
|
||||
buf.extend(b"\n4");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
buf.extend(b"\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"li");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"li");
|
||||
|
||||
//trailers
|
||||
//buf.feed_data("test: test\r\n");
|
||||
//not_ready!(reader.parse(&mut buf, &mut readbuf));
|
||||
|
||||
buf.extend(b"ne\r\n0\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"ne");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chunk_extension_quoted() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
Host: localhost:8080\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
2;hello=b;one=\"1 2 3\"\r\n\
|
||||
xx",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"xx")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_chunk_extension_invalid() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: localhost:8080\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
2;x\nx\r\n\
|
||||
4c\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let err = pl.decode(&mut buf).unwrap_err();
|
||||
assert!(err
|
||||
.to_string()
|
||||
.contains("Invalid character in chunk extension"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_chunk_size_overflow() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
f0000000000000003\r\n\
|
||||
abc\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let err = pl.decode(&mut buf).unwrap_err();
|
||||
assert!(err
|
||||
.to_string()
|
||||
.contains("Invalid chunk size line: Size is too big"));
|
||||
}
|
||||
}
|
@ -1,18 +1,18 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::io;
|
||||
use std::marker::PhantomData;
|
||||
use std::task::Poll;
|
||||
use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll};
|
||||
|
||||
use actix_codec::Decoder;
|
||||
use bytes::{Buf, Bytes, BytesMut};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::header::{HeaderName, HeaderValue};
|
||||
use http::{header, Method, StatusCode, Uri, Version};
|
||||
use log::{debug, error, trace};
|
||||
|
||||
use crate::error::ParseError;
|
||||
use crate::header::HeaderMap;
|
||||
use crate::message::{ConnectionType, ResponseHead};
|
||||
use crate::request::Request;
|
||||
use super::chunked::ChunkedState;
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
header::HeaderMap,
|
||||
message::{ConnectionType, ResponseHead},
|
||||
request::Request,
|
||||
};
|
||||
|
||||
pub(crate) const MAX_BUFFER_SIZE: usize = 131_072;
|
||||
const MAX_HEADERS: usize = 96;
|
||||
@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized {
|
||||
let mut has_upgrade_websocket = false;
|
||||
let mut expect = false;
|
||||
let mut chunked = false;
|
||||
let mut seen_te = false;
|
||||
let mut content_length = None;
|
||||
|
||||
{
|
||||
@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized {
|
||||
};
|
||||
|
||||
match name {
|
||||
header::CONTENT_LENGTH => {
|
||||
if let Ok(s) = value.to_str() {
|
||||
header::CONTENT_LENGTH if content_length.is_some() => {
|
||||
debug!("multiple Content-Length");
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::CONTENT_LENGTH => match value.to_str() {
|
||||
Ok(s) if s.trim().starts_with('+') => {
|
||||
debug!("illegal Content-Length: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
Ok(s) => {
|
||||
if let Ok(len) = s.parse::<u64>() {
|
||||
if len != 0 {
|
||||
content_length = Some(len);
|
||||
@ -95,22 +105,38 @@ pub(crate) trait MessageType: Sized {
|
||||
debug!("illegal Content-Length: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
} else {
|
||||
}
|
||||
Err(_) => {
|
||||
debug!("illegal Content-Length: {:?}", value);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// transfer-encoding
|
||||
header::TRANSFER_ENCODING if seen_te => {
|
||||
debug!("multiple Transfer-Encoding not allowed");
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::TRANSFER_ENCODING => {
|
||||
if let Ok(s) = value.to_str().map(|s| s.trim()) {
|
||||
chunked = s.eq_ignore_ascii_case("chunked");
|
||||
seen_te = true;
|
||||
|
||||
if let Ok(s) = value.to_str().map(str::trim) {
|
||||
if s.eq_ignore_ascii_case("chunked") {
|
||||
chunked = true;
|
||||
} else if s.eq_ignore_ascii_case("identity") {
|
||||
// allow silently since multiple TE headers are already checked
|
||||
} else {
|
||||
debug!("illegal Transfer-Encoding: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
} else {
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
// connection keep-alive state
|
||||
header::CONNECTION => {
|
||||
ka = if let Ok(conn) = value.to_str().map(|conn| conn.trim()) {
|
||||
ka = if let Ok(conn) = value.to_str().map(str::trim) {
|
||||
if conn.eq_ignore_ascii_case("keep-alive") {
|
||||
Some(ConnectionType::KeepAlive)
|
||||
} else if conn.eq_ignore_ascii_case("close") {
|
||||
@ -125,7 +151,7 @@ pub(crate) trait MessageType: Sized {
|
||||
};
|
||||
}
|
||||
header::UPGRADE => {
|
||||
if let Ok(val) = value.to_str().map(|val| val.trim()) {
|
||||
if let Ok(val) = value.to_str().map(str::trim) {
|
||||
if val.eq_ignore_ascii_case("websocket") {
|
||||
has_upgrade_websocket = true;
|
||||
}
|
||||
@ -186,10 +212,17 @@ impl MessageType for Request {
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||
|
||||
let (len, method, uri, ver, h_len) = {
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
||||
// SAFETY:
|
||||
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
|
||||
// safe because the type we are claiming to have initialized here is a
|
||||
// bunch of `MaybeUninit`s, which do not require initialization.
|
||||
let mut parsed = unsafe {
|
||||
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||
.assume_init()
|
||||
};
|
||||
|
||||
let mut req = httparse::Request::new(&mut parsed);
|
||||
match req.parse(src)? {
|
||||
let mut req = httparse::Request::new(&mut []);
|
||||
match req.parse_with_uninit_headers(src, &mut parsed)? {
|
||||
httparse::Status::Complete(len) => {
|
||||
let method = Method::from_bytes(req.method.unwrap().as_bytes())
|
||||
.map_err(|_| ParseError::Method)?;
|
||||
@ -408,20 +441,6 @@ enum Kind {
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
enum ChunkedState {
|
||||
Size,
|
||||
SizeLws,
|
||||
Extension,
|
||||
SizeLf,
|
||||
Body,
|
||||
BodyCr,
|
||||
BodyLf,
|
||||
EndCr,
|
||||
EndLf,
|
||||
End,
|
||||
}
|
||||
|
||||
impl Decoder for PayloadDecoder {
|
||||
type Item = PayloadItem;
|
||||
type Error = io::Error;
|
||||
@ -451,19 +470,23 @@ impl Decoder for PayloadDecoder {
|
||||
Kind::Chunked(ref mut state, ref mut size) => {
|
||||
loop {
|
||||
let mut buf = None;
|
||||
|
||||
// advances the chunked state
|
||||
*state = match state.step(src, size, &mut buf) {
|
||||
Poll::Pending => return Ok(None),
|
||||
Poll::Ready(Ok(state)) => state,
|
||||
Poll::Ready(Err(e)) => return Err(e),
|
||||
};
|
||||
|
||||
if *state == ChunkedState::End {
|
||||
trace!("End of chunked stream");
|
||||
return Ok(Some(PayloadItem::Eof));
|
||||
}
|
||||
|
||||
if let Some(buf) = buf {
|
||||
return Ok(Some(PayloadItem::Chunk(buf)));
|
||||
}
|
||||
|
||||
if src.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
@ -480,201 +503,40 @@ impl Decoder for PayloadDecoder {
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! byte (
|
||||
($rdr:ident) => ({
|
||||
if $rdr.len() > 0 {
|
||||
let b = $rdr[0];
|
||||
$rdr.advance(1);
|
||||
b
|
||||
} else {
|
||||
return Poll::Pending
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
impl ChunkedState {
|
||||
fn step(
|
||||
&self,
|
||||
body: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
use self::ChunkedState::*;
|
||||
match *self {
|
||||
Size => ChunkedState::read_size(body, size),
|
||||
SizeLws => ChunkedState::read_size_lws(body),
|
||||
Extension => ChunkedState::read_extension(body),
|
||||
SizeLf => ChunkedState::read_size_lf(body, size),
|
||||
Body => ChunkedState::read_body(body, size, buf),
|
||||
BodyCr => ChunkedState::read_body_cr(body),
|
||||
BodyLf => ChunkedState::read_body_lf(body),
|
||||
EndCr => ChunkedState::read_end_cr(body),
|
||||
EndLf => ChunkedState::read_end_lf(body),
|
||||
End => Poll::Ready(Ok(ChunkedState::End)),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
let radix = 16;
|
||||
match byte!(rdr) {
|
||||
b @ b'0'..=b'9' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b - b'0');
|
||||
}
|
||||
b @ b'a'..=b'f' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b + 10 - b'a');
|
||||
}
|
||||
b @ b'A'..=b'F' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b + 10 - b'A');
|
||||
}
|
||||
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => {
|
||||
return Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Invalid Size",
|
||||
)));
|
||||
}
|
||||
}
|
||||
Poll::Ready(Ok(ChunkedState::Size))
|
||||
}
|
||||
|
||||
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
trace!("read_size_lws");
|
||||
match byte!(rdr) {
|
||||
// LWS can follow the chunk size, but no more digits can come
|
||||
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size linear white space",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
||||
}
|
||||
}
|
||||
fn read_size_lf(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
|
||||
b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body(
|
||||
rdr: &mut BytesMut,
|
||||
rem: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
trace!("Chunked read, remaining={:?}", rem);
|
||||
|
||||
let len = rdr.len() as u64;
|
||||
if len == 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
let slice;
|
||||
if *rem > len {
|
||||
slice = rdr.split().freeze();
|
||||
*rem -= len;
|
||||
} else {
|
||||
slice = rdr.split_to(*rem as usize).freeze();
|
||||
*rem = 0;
|
||||
}
|
||||
*buf = Some(slice);
|
||||
if *rem > 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
Poll::Ready(Ok(ChunkedState::BodyCr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::{Method, Version};
|
||||
|
||||
use super::*;
|
||||
use crate::error::ParseError;
|
||||
use crate::http::header::{HeaderName, SET_COOKIE};
|
||||
use crate::HttpMessage;
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
http::header::{HeaderName, SET_COOKIE},
|
||||
HttpMessage as _,
|
||||
};
|
||||
|
||||
impl PayloadType {
|
||||
fn unwrap(self) -> PayloadDecoder {
|
||||
pub(crate) fn unwrap(self) -> PayloadDecoder {
|
||||
match self {
|
||||
PayloadType::Payload(pl) => pl,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_unhandled(&self) -> bool {
|
||||
pub(crate) fn is_unhandled(&self) -> bool {
|
||||
matches!(self, PayloadType::Stream(_))
|
||||
}
|
||||
}
|
||||
|
||||
impl PayloadItem {
|
||||
fn chunk(self) -> Bytes {
|
||||
pub(crate) fn chunk(self) -> Bytes {
|
||||
match self {
|
||||
PayloadItem::Chunk(chunk) => chunk,
|
||||
_ => panic!("error"),
|
||||
}
|
||||
}
|
||||
fn eof(&self) -> bool {
|
||||
|
||||
pub(crate) fn eof(&self) -> bool {
|
||||
matches!(*self, PayloadItem::Eof)
|
||||
}
|
||||
}
|
||||
@ -967,34 +829,6 @@ mod tests {
|
||||
assert!(req.upgrade());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_chunked() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
|
||||
// intentional typo in "chunked"
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chnked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(!val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_headers_content_length_err_1() {
|
||||
let mut buf = BytesMut::from(
|
||||
@ -1112,126 +946,6 @@ mod tests {
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"data"
|
||||
);
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"line"
|
||||
);
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_and_next_message() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(
|
||||
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
|
||||
POST /test2 HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n"
|
||||
.iter(),
|
||||
);
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"line");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert!(req.chunked().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_chunks() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\n1111\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"1111");
|
||||
|
||||
buf.extend(b"4\r\ndata\r");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
|
||||
buf.extend(b"\n4");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
buf.extend(b"\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"li");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"li");
|
||||
|
||||
//trailers
|
||||
//buf.feed_data("test: test\r\n");
|
||||
//not_ready!(reader.parse(&mut buf, &mut readbuf));
|
||||
|
||||
buf.extend(b"ne\r\n0\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"ne");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_chunked_payload_chunk_extension() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(msg.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"data"));
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"line"));
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_response_http10_read_until_eof() {
|
||||
let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data");
|
||||
@ -1243,4 +957,84 @@ mod tests {
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_multiple_content_length() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 4\r\n\
|
||||
Content-Length: 2\r\n\
|
||||
\r\n\
|
||||
abcd",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_content_length_plus() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: +3\r\n\
|
||||
\r\n\
|
||||
000",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_unknown_transfer_encoding() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Transfer-Encoding: JUNK\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
5\r\n\
|
||||
hello\r\n\
|
||||
0",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_multiple_transfer_encoding() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 51\r\n\
|
||||
Transfer-Encoding: identity\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
0\r\n\
|
||||
\r\n\
|
||||
GET /forbidden HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\r\n",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn transfer_encoding_agrees() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 3\r\n\
|
||||
Transfer-Encoding: identity\r\n\
|
||||
\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"0\r\n")));
|
||||
}
|
||||
}
|
||||
|
@ -303,9 +303,9 @@ where
|
||||
body: &impl MessageBody,
|
||||
) -> Result<BodySize, DispatchError> {
|
||||
let size = body.size();
|
||||
let mut this = self.project();
|
||||
let this = self.project();
|
||||
this.codec
|
||||
.encode(Message::Item((message, size)), &mut this.write_buf)
|
||||
.encode(Message::Item((message, size)), this.write_buf)
|
||||
.map_err(|err| {
|
||||
if let Some(mut payload) = this.payload.take() {
|
||||
payload.set_error(PayloadError::Incomplete(None));
|
||||
@ -425,13 +425,13 @@ where
|
||||
Poll::Ready(Some(Ok(item))) => {
|
||||
this.codec.encode(
|
||||
Message::Chunk(Some(item)),
|
||||
&mut this.write_buf,
|
||||
this.write_buf,
|
||||
)?;
|
||||
}
|
||||
|
||||
Poll::Ready(None) => {
|
||||
this.codec
|
||||
.encode(Message::Chunk(None), &mut this.write_buf)?;
|
||||
.encode(Message::Chunk(None), this.write_buf)?;
|
||||
// payload stream finished.
|
||||
// set state to None and handle next message
|
||||
this.state.set(State::None);
|
||||
@ -460,13 +460,13 @@ where
|
||||
Poll::Ready(Some(Ok(item))) => {
|
||||
this.codec.encode(
|
||||
Message::Chunk(Some(item)),
|
||||
&mut this.write_buf,
|
||||
this.write_buf,
|
||||
)?;
|
||||
}
|
||||
|
||||
Poll::Ready(None) => {
|
||||
this.codec
|
||||
.encode(Message::Chunk(None), &mut this.write_buf)?;
|
||||
.encode(Message::Chunk(None), this.write_buf)?;
|
||||
// payload stream finished.
|
||||
// set state to None and handle next message
|
||||
this.state.set(State::None);
|
||||
@ -515,14 +515,13 @@ where
|
||||
cx: &mut Context<'_>,
|
||||
) -> Result<(), DispatchError> {
|
||||
// Handle `EXPECT: 100-Continue` header
|
||||
let mut this = self.as_mut().project();
|
||||
if req.head().expect() {
|
||||
// set dispatcher state so the future is pinned.
|
||||
let mut this = self.as_mut().project();
|
||||
let task = this.flow.expect.call(req);
|
||||
this.state.set(State::ExpectCall(task));
|
||||
} else {
|
||||
// the same as above.
|
||||
let mut this = self.as_mut().project();
|
||||
let task = this.flow.service.call(req);
|
||||
this.state.set(State::ServiceCall(task));
|
||||
};
|
||||
@ -593,7 +592,7 @@ where
|
||||
let mut updated = false;
|
||||
let mut this = self.as_mut().project();
|
||||
loop {
|
||||
match this.codec.decode(&mut this.read_buf) {
|
||||
match this.codec.decode(this.read_buf) {
|
||||
Ok(Some(msg)) => {
|
||||
updated = true;
|
||||
this.flags.insert(Flags::STARTED);
|
||||
@ -1061,7 +1060,7 @@ mod tests {
|
||||
fn stabilize_date_header(payload: &mut [u8]) {
|
||||
let mut from = 0;
|
||||
|
||||
while let Some(pos) = find_slice(&payload, b"date", from) {
|
||||
while let Some(pos) = find_slice(payload, b"date", from) {
|
||||
payload[(from + pos)..(from + pos + 35)]
|
||||
.copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC");
|
||||
from += 35;
|
||||
|
@ -20,6 +20,7 @@ const AVERAGE_HEADER_SIZE: usize = 30;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MessageEncoder<T: MessageType> {
|
||||
#[allow(dead_code)]
|
||||
pub length: BodySize,
|
||||
pub te: TransferEncoding,
|
||||
_phantom: PhantomData<T>,
|
||||
@ -81,6 +82,7 @@ pub(crate) trait MessageType: Sized {
|
||||
match length {
|
||||
BodySize::Stream => {
|
||||
if chunked {
|
||||
skip_len = true;
|
||||
if camel_case {
|
||||
dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n")
|
||||
} else {
|
||||
@ -174,7 +176,7 @@ pub(crate) trait MessageType: Sized {
|
||||
unsafe {
|
||||
if camel_case {
|
||||
// use Camel-Case headers
|
||||
write_camel_case(k, from_raw_parts_mut(buf, k_len));
|
||||
write_camel_case(k, buf, k_len);
|
||||
} else {
|
||||
write_data(k, buf, k_len);
|
||||
}
|
||||
@ -472,15 +474,22 @@ impl TransferEncoding {
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Callers must ensure that the given length matches given value length.
|
||||
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
|
||||
/// valid for writes of at least `len` bytes.
|
||||
unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) {
|
||||
debug_assert_eq!(value.len(), len);
|
||||
copy_nonoverlapping(value.as_ptr(), buf, len);
|
||||
}
|
||||
|
||||
fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
||||
/// # Safety
|
||||
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
|
||||
/// valid for writes of at least `len` bytes.
|
||||
unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
||||
// first copy entire (potentially wrong) slice to output
|
||||
buffer[..value.len()].copy_from_slice(value);
|
||||
write_data(value, buf, len);
|
||||
|
||||
// SAFETY: We just initialized the buffer with `value`
|
||||
let buffer = from_raw_parts_mut(buf, len);
|
||||
|
||||
let mut iter = value.iter();
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
//! HTTP/1 protocol implementation.
|
||||
|
||||
use bytes::{Bytes, BytesMut};
|
||||
|
||||
mod chunked;
|
||||
mod client;
|
||||
mod codec;
|
||||
mod decoder;
|
||||
|
@ -186,8 +186,7 @@ impl Inner {
|
||||
if self
|
||||
.task
|
||||
.as_ref()
|
||||
.map(|w| !cx.waker().will_wake(w))
|
||||
.unwrap_or(true)
|
||||
.map_or(true, |w| !cx.waker().will_wake(w))
|
||||
{
|
||||
self.task = Some(cx.waker().clone());
|
||||
}
|
||||
@ -199,8 +198,7 @@ impl Inner {
|
||||
if self
|
||||
.io_task
|
||||
.as_ref()
|
||||
.map(|w| !cx.waker().will_wake(w))
|
||||
.unwrap_or(true)
|
||||
.map_or(true, |w| !cx.waker().will_wake(w))
|
||||
{
|
||||
self.io_task = Some(cx.waker().clone());
|
||||
}
|
||||
|
@ -63,7 +63,6 @@ where
|
||||
.is_write_buf_full()
|
||||
{
|
||||
let next =
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
|
||||
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
|
||||
Poll::Ready(Some(Err(err))) => {
|
||||
|
@ -177,7 +177,7 @@ mod rustls {
|
||||
> {
|
||||
let mut protos = vec![b"h2".to_vec()];
|
||||
protos.extend_from_slice(&config.alpn_protocols);
|
||||
config.set_protocols(&protos);
|
||||
config.alpn_protocols = protos;
|
||||
|
||||
Acceptor::new(config)
|
||||
.map_err(TlsError::Tls)
|
||||
|
@ -249,7 +249,7 @@ impl HeaderMap {
|
||||
/// assert!(map.get("INVALID HEADER NAME").is_none());
|
||||
/// ```
|
||||
pub fn get(&self, key: impl AsHeaderName) -> Option<&HeaderValue> {
|
||||
self.get_value(key).map(|val| val.first())
|
||||
self.get_value(key).map(Value::first)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the _first_ value associated a header name.
|
||||
@ -280,8 +280,8 @@ impl HeaderMap {
|
||||
/// ```
|
||||
pub fn get_mut(&mut self, key: impl AsHeaderName) -> Option<&mut HeaderValue> {
|
||||
match key.try_as_name(super::as_name::Seal).ok()? {
|
||||
Cow::Borrowed(name) => self.inner.get_mut(name).map(|v| v.first_mut()),
|
||||
Cow::Owned(name) => self.inner.get_mut(&name).map(|v| v.first_mut()),
|
||||
Cow::Borrowed(name) => self.inner.get_mut(name).map(Value::first_mut),
|
||||
Cow::Owned(name) => self.inner.get_mut(&name).map(Value::first_mut),
|
||||
}
|
||||
}
|
||||
|
||||
@ -684,7 +684,7 @@ impl<'a> Iterator for Iter<'a> {
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// handle in-progress multi value lists first
|
||||
if let Some((ref name, ref mut vals)) = self.multi_inner {
|
||||
if let Some((name, ref mut vals)) = self.multi_inner {
|
||||
match vals.get(self.multi_idx) {
|
||||
Some(val) => {
|
||||
self.multi_idx += 1;
|
||||
|
@ -1,5 +1,6 @@
|
||||
use std::{convert::Infallible, str::FromStr};
|
||||
use std::{convert::TryFrom, str::FromStr};
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
use http::header::InvalidHeaderValue;
|
||||
|
||||
use crate::{
|
||||
@ -8,8 +9,16 @@ use crate::{
|
||||
HttpMessage,
|
||||
};
|
||||
|
||||
/// Error return when a content encoding is unknown.
|
||||
///
|
||||
/// Example: 'compress'
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "unsupported content encoding")]
|
||||
pub struct ContentEncodingParseError;
|
||||
|
||||
/// Represents a supported content encoding.
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
#[non_exhaustive]
|
||||
pub enum ContentEncoding {
|
||||
/// Automatically select encoding based on encoding negotiation.
|
||||
Auto,
|
||||
@ -37,7 +46,7 @@ impl ContentEncoding {
|
||||
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
|
||||
}
|
||||
|
||||
/// Convert content encoding to string
|
||||
/// Convert content encoding to string.
|
||||
#[inline]
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
@ -48,18 +57,6 @@ impl ContentEncoding {
|
||||
ContentEncoding::Identity | ContentEncoding::Auto => "identity",
|
||||
}
|
||||
}
|
||||
|
||||
/// Default Q-factor (quality) value.
|
||||
#[inline]
|
||||
pub fn quality(self) -> f64 {
|
||||
match self {
|
||||
ContentEncoding::Br => 1.1,
|
||||
ContentEncoding::Gzip => 1.0,
|
||||
ContentEncoding::Deflate => 0.9,
|
||||
ContentEncoding::Identity | ContentEncoding::Auto => 0.1,
|
||||
ContentEncoding::Zstd => 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ContentEncoding {
|
||||
@ -69,31 +66,33 @@ impl Default for ContentEncoding {
|
||||
}
|
||||
|
||||
impl FromStr for ContentEncoding {
|
||||
type Err = Infallible;
|
||||
type Err = ContentEncodingParseError;
|
||||
|
||||
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
||||
Ok(Self::from(val))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for ContentEncoding {
|
||||
fn from(val: &str) -> ContentEncoding {
|
||||
let val = val.trim();
|
||||
|
||||
if val.eq_ignore_ascii_case("br") {
|
||||
ContentEncoding::Br
|
||||
Ok(ContentEncoding::Br)
|
||||
} else if val.eq_ignore_ascii_case("gzip") {
|
||||
ContentEncoding::Gzip
|
||||
Ok(ContentEncoding::Gzip)
|
||||
} else if val.eq_ignore_ascii_case("deflate") {
|
||||
ContentEncoding::Deflate
|
||||
Ok(ContentEncoding::Deflate)
|
||||
} else if val.eq_ignore_ascii_case("zstd") {
|
||||
ContentEncoding::Zstd
|
||||
Ok(ContentEncoding::Zstd)
|
||||
} else {
|
||||
ContentEncoding::default()
|
||||
Err(ContentEncodingParseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ContentEncoding {
|
||||
type Error = ContentEncodingParseError;
|
||||
|
||||
fn try_from(val: &str) -> Result<Self, Self::Error> {
|
||||
val.parse()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for ContentEncoding {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
|
82
actix-http/src/header/shared/http_date.rs
Normal file
82
actix-http/src/header/shared/http_date.rs
Normal file
@ -0,0 +1,82 @@
|
||||
use std::{fmt, io::Write, str::FromStr, time::SystemTime};
|
||||
|
||||
use bytes::BytesMut;
|
||||
use http::header::{HeaderValue, InvalidHeaderValue};
|
||||
|
||||
use crate::{
|
||||
config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue,
|
||||
helpers::MutWriter,
|
||||
};
|
||||
|
||||
/// A timestamp with HTTP formatting and parsing.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct HttpDate(SystemTime);
|
||||
|
||||
impl FromStr for HttpDate {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
|
||||
match httpdate::parse_http_date(s) {
|
||||
Ok(sys_time) => Ok(HttpDate(sys_time)),
|
||||
Err(_) => Err(ParseError::Header),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for HttpDate {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let date_str = httpdate::fmt_http_date(self.0);
|
||||
f.write_str(&date_str)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for HttpDate {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
|
||||
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH);
|
||||
let mut wrt = MutWriter(&mut buf);
|
||||
|
||||
// unwrap: date output is known to be well formed and of known length
|
||||
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
|
||||
|
||||
HeaderValue::from_maybe_shared(buf.split().freeze())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SystemTime> for HttpDate {
|
||||
fn from(sys_time: SystemTime) -> HttpDate {
|
||||
HttpDate(sys_time)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HttpDate> for SystemTime {
|
||||
fn from(HttpDate(sys_time): HttpDate) -> SystemTime {
|
||||
sys_time
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::time::Duration;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn date_header() {
|
||||
macro_rules! assert_parsed_date {
|
||||
($case:expr, $exp:expr) => {
|
||||
assert_eq!($case.parse::<HttpDate>().unwrap(), $exp);
|
||||
};
|
||||
}
|
||||
|
||||
// 784198117 = SystemTime::from(datetime!(1994-11-07 08:48:37).assume_utc()).duration_since(SystemTime::UNIX_EPOCH));
|
||||
let nov_07 = HttpDate(SystemTime::UNIX_EPOCH + Duration::from_secs(784198117));
|
||||
|
||||
assert_parsed_date!("Mon, 07 Nov 1994 08:48:37 GMT", nov_07);
|
||||
assert_parsed_date!("Monday, 07-Nov-94 08:48:37 GMT", nov_07);
|
||||
assert_parsed_date!("Mon Nov 7 08:48:37 1994", nov_07);
|
||||
|
||||
assert!("this-is-no-date".parse::<HttpDate>().is_err());
|
||||
}
|
||||
}
|
@ -1,97 +0,0 @@
|
||||
use std::{
|
||||
fmt,
|
||||
io::Write,
|
||||
str::FromStr,
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
use bytes::buf::BufMut;
|
||||
use bytes::BytesMut;
|
||||
use http::header::{HeaderValue, InvalidHeaderValue};
|
||||
use time::{OffsetDateTime, PrimitiveDateTime, UtcOffset};
|
||||
|
||||
use crate::error::ParseError;
|
||||
use crate::header::IntoHeaderValue;
|
||||
use crate::time_parser;
|
||||
|
||||
/// A timestamp with HTTP formatting and parsing.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct HttpDate(OffsetDateTime);
|
||||
|
||||
impl FromStr for HttpDate {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
|
||||
match time_parser::parse_http_date(s) {
|
||||
Some(t) => Ok(HttpDate(t.assume_utc())),
|
||||
None => Err(ParseError::Header),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for HttpDate {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.0.format("%a, %d %b %Y %H:%M:%S GMT"), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SystemTime> for HttpDate {
|
||||
fn from(sys: SystemTime) -> HttpDate {
|
||||
HttpDate(PrimitiveDateTime::from(sys).assume_utc())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for HttpDate {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
|
||||
let mut wrt = BytesMut::with_capacity(29).writer();
|
||||
write!(
|
||||
wrt,
|
||||
"{}",
|
||||
self.0
|
||||
.to_offset(UtcOffset::UTC)
|
||||
.format("%a, %d %b %Y %H:%M:%S GMT")
|
||||
)
|
||||
.unwrap();
|
||||
HeaderValue::from_maybe_shared(wrt.get_mut().split().freeze())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HttpDate> for SystemTime {
|
||||
fn from(date: HttpDate) -> SystemTime {
|
||||
let dt = date.0;
|
||||
let epoch = OffsetDateTime::unix_epoch();
|
||||
|
||||
UNIX_EPOCH + (dt - epoch)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::HttpDate;
|
||||
use time::{date, time, PrimitiveDateTime};
|
||||
|
||||
#[test]
|
||||
fn test_date() {
|
||||
let nov_07 = HttpDate(
|
||||
PrimitiveDateTime::new(date!(1994 - 11 - 07), time!(8:48:37)).assume_utc(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"Sun, 07 Nov 1994 08:48:37 GMT".parse::<HttpDate>().unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert_eq!(
|
||||
"Sunday, 07-Nov-94 08:48:37 GMT"
|
||||
.parse::<HttpDate>()
|
||||
.unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert_eq!(
|
||||
"Sun Nov 7 08:48:37 1994".parse::<HttpDate>().unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert!("this-is-no-date".parse::<HttpDate>().is_err());
|
||||
}
|
||||
}
|
@ -3,12 +3,12 @@
|
||||
mod charset;
|
||||
mod content_encoding;
|
||||
mod extended;
|
||||
mod httpdate;
|
||||
mod http_date;
|
||||
mod quality_item;
|
||||
|
||||
pub use self::charset::Charset;
|
||||
pub use self::content_encoding::ContentEncoding;
|
||||
pub use self::extended::{parse_extended_value, ExtendedValue};
|
||||
pub use self::httpdate::HttpDate;
|
||||
pub use self::http_date::HttpDate;
|
||||
pub use self::quality_item::{q, qitem, Quality, QualityItem};
|
||||
pub use language_tags::LanguageTag;
|
||||
|
@ -1,11 +1,14 @@
|
||||
use std::{
|
||||
cmp,
|
||||
convert::{TryFrom, TryInto},
|
||||
fmt, str,
|
||||
fmt,
|
||||
str::{self, FromStr},
|
||||
};
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
|
||||
use crate::error::ParseError;
|
||||
|
||||
const MAX_QUALITY: u16 = 1000;
|
||||
const MAX_FLOAT_QUALITY: f32 = 1.0;
|
||||
|
||||
@ -113,12 +116,12 @@ impl<T: fmt::Display> fmt::Display for QualityItem<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
type Err = crate::error::ParseError;
|
||||
impl<T: FromStr> FromStr for QualityItem<T> {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(qitem_str: &str) -> Result<QualityItem<T>, crate::error::ParseError> {
|
||||
fn from_str(qitem_str: &str) -> Result<Self, Self::Err> {
|
||||
if !qitem_str.is_ascii() {
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
// Set defaults used if parsing fails.
|
||||
@ -139,7 +142,7 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
if parts[0].len() < 2 {
|
||||
// Can't possibly be an attribute since an attribute needs at least a name followed
|
||||
// by an equals sign. And bare identifiers are forbidden.
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
let start = &parts[0][0..2];
|
||||
@ -148,25 +151,21 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
let q_val = &parts[0][2..];
|
||||
if q_val.len() > 5 {
|
||||
// longer than 5 indicates an over-precise q-factor
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
let q_value = q_val
|
||||
.parse::<f32>()
|
||||
.map_err(|_| crate::error::ParseError::Header)?;
|
||||
let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
|
||||
|
||||
if (0f32..=1f32).contains(&q_value) {
|
||||
quality = q_value;
|
||||
raw_item = parts[1];
|
||||
} else {
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let item = raw_item
|
||||
.parse::<T>()
|
||||
.map_err(|_| crate::error::ParseError::Header)?;
|
||||
let item = raw_item.parse::<T>().map_err(|_| ParseError::Header)?;
|
||||
|
||||
// we already checked above that the quality is within range
|
||||
Ok(QualityItem::new(item, Quality::from_f32(quality)))
|
||||
@ -224,7 +223,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
impl str::FromStr for Encoding {
|
||||
impl FromStr for Encoding {
|
||||
type Err = crate::error::ParseError;
|
||||
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
|
||||
use Encoding::*;
|
||||
|
@ -1,18 +1,20 @@
|
||||
//! HTTP primitives for the Actix ecosystem.
|
||||
//!
|
||||
//! ## Crate Features
|
||||
//! | Feature | Functionality |
|
||||
//! | ---------------- | ----------------------------------------------------- |
|
||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||
//! | `rustls` | TLS support via [rustls]. |
|
||||
//! | `compress` | Payload compression support. (Deflate, Gzip & Brotli) |
|
||||
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
|
||||
//! | Feature | Functionality |
|
||||
//! | ------------------- | ------------------------------------------- |
|
||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||
//! | `rustls` | TLS support via [rustls]. |
|
||||
//! | `compress-brotli` | Payload compression support: Brotli. |
|
||||
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
||||
//! | `compress-zstd` | Payload compression support: Zstd. |
|
||||
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
|
||||
//!
|
||||
//! [OpenSSL]: https://crates.io/crates/openssl
|
||||
//! [rustls]: https://crates.io/crates/rustls
|
||||
//! [trust-dns]: https://crates.io/crates/trust-dns
|
||||
|
||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||
#![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)]
|
||||
#![allow(
|
||||
clippy::type_complexity,
|
||||
clippy::too_many_arguments,
|
||||
@ -25,14 +27,12 @@
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
pub mod body;
|
||||
mod builder;
|
||||
pub mod client;
|
||||
mod config;
|
||||
#[cfg(feature = "compress")]
|
||||
|
||||
#[cfg(feature = "__compress")]
|
||||
pub mod encoding;
|
||||
mod extensions;
|
||||
pub mod header;
|
||||
@ -44,7 +44,6 @@ mod request;
|
||||
mod response;
|
||||
mod response_builder;
|
||||
mod service;
|
||||
mod time_parser;
|
||||
|
||||
pub mod error;
|
||||
pub mod h1;
|
||||
@ -104,14 +103,9 @@ type ConnectCallback<IO> = dyn Fn(&IO, &mut Extensions);
|
||||
///
|
||||
/// # Implementation Details
|
||||
/// Uses Option to reduce necessary allocations when merging with request extensions.
|
||||
#[derive(Default)]
|
||||
pub(crate) struct OnConnectData(Option<Extensions>);
|
||||
|
||||
impl Default for OnConnectData {
|
||||
fn default() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl OnConnectData {
|
||||
/// Construct by calling the on-connect callback with the underlying transport I/O.
|
||||
pub(crate) fn from_io<T>(
|
||||
|
@ -1,110 +0,0 @@
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! downcast_get_type_id {
|
||||
() => {
|
||||
/// A helper method to get the type ID of the type
|
||||
/// this trait is implemented on.
|
||||
/// This method is unsafe to *implement*, since `downcast_ref` relies
|
||||
/// on the returned `TypeId` to perform a cast.
|
||||
///
|
||||
/// Unfortunately, Rust has no notion of a trait method that is
|
||||
/// unsafe to implement (marking it as `unsafe` makes it unsafe
|
||||
/// to *call*). As a workaround, we require this method
|
||||
/// to return a private type along with the `TypeId`. This
|
||||
/// private type (`PrivateHelper`) has a private constructor,
|
||||
/// making it impossible for safe code to construct outside of
|
||||
/// this module. This ensures that safe code cannot violate
|
||||
/// type-safety by implementing this method.
|
||||
///
|
||||
/// We also take `PrivateHelper` as a parameter, to ensure that
|
||||
/// safe code cannot obtain a `PrivateHelper` instance by
|
||||
/// delegating to an existing implementation of `__private_get_type_id__`
|
||||
#[doc(hidden)]
|
||||
fn __private_get_type_id__(
|
||||
&self,
|
||||
_: PrivateHelper,
|
||||
) -> (std::any::TypeId, PrivateHelper)
|
||||
where
|
||||
Self: 'static,
|
||||
{
|
||||
(std::any::TypeId::of::<Self>(), PrivateHelper(()))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//Generate implementation for dyn $name
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! downcast {
|
||||
($name:ident) => {
|
||||
/// A struct with a private constructor, for use with
|
||||
/// `__private_get_type_id__`. Its single field is private,
|
||||
/// ensuring that it can only be constructed from this module
|
||||
#[doc(hidden)]
|
||||
pub struct PrivateHelper(());
|
||||
|
||||
impl dyn $name + 'static {
|
||||
/// Downcasts generic body to a specific type.
|
||||
pub fn downcast_ref<T: $name + 'static>(&self) -> Option<&T> {
|
||||
if self.__private_get_type_id__(PrivateHelper(())).0
|
||||
== std::any::TypeId::of::<T>()
|
||||
{
|
||||
// SAFETY: external crates cannot override the default
|
||||
// implementation of `__private_get_type_id__`, since
|
||||
// it requires returning a private type. We can therefore
|
||||
// rely on the returned `TypeId`, which ensures that this
|
||||
// case is correct.
|
||||
unsafe { Some(&*(self as *const dyn $name as *const T)) }
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Downcasts a generic body to a mutable specific type.
|
||||
pub fn downcast_mut<T: $name + 'static>(&mut self) -> Option<&mut T> {
|
||||
if self.__private_get_type_id__(PrivateHelper(())).0
|
||||
== std::any::TypeId::of::<T>()
|
||||
{
|
||||
// SAFETY: external crates cannot override the default
|
||||
// implementation of `__private_get_type_id__`, since
|
||||
// it requires returning a private type. We can therefore
|
||||
// rely on the returned `TypeId`, which ensures that this
|
||||
// case is correct.
|
||||
unsafe {
|
||||
Some(&mut *(self as *const dyn $name as *const T as *mut T))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::upper_case_acronyms)]
|
||||
|
||||
trait MB {
|
||||
downcast_get_type_id!();
|
||||
}
|
||||
|
||||
downcast!(MB);
|
||||
|
||||
impl MB for String {}
|
||||
impl MB for () {}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_any_casting() {
|
||||
let mut body = String::from("hello cast");
|
||||
let resp_body: &mut dyn MB = &mut body;
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast");
|
||||
let body = &mut resp_body.downcast_mut::<String>().unwrap();
|
||||
body.push('!');
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast!");
|
||||
let not_body = resp_body.downcast_ref::<()>();
|
||||
assert!(not_body.is_none());
|
||||
}
|
||||
}
|
@ -152,15 +152,16 @@ impl RequestHead {
|
||||
|
||||
/// Connection upgrade status
|
||||
pub fn upgrade(&self) -> bool {
|
||||
if let Some(hdr) = self.headers().get(header::CONNECTION) {
|
||||
if let Ok(s) = hdr.to_str() {
|
||||
s.to_ascii_lowercase().contains("upgrade")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
self.headers()
|
||||
.get(header::CONNECTION)
|
||||
.map(|hdr| {
|
||||
if let Ok(s) = hdr.to_str() {
|
||||
s.to_ascii_lowercase().contains("upgrade")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -208,7 +209,7 @@ impl RequestHeadType {
|
||||
impl AsRef<RequestHead> for RequestHeadType {
|
||||
fn as_ref(&self) -> &RequestHead {
|
||||
match self {
|
||||
RequestHeadType::Owned(head) => &head,
|
||||
RequestHeadType::Owned(head) => head,
|
||||
RequestHeadType::Rc(head, _) => head.as_ref(),
|
||||
}
|
||||
}
|
||||
@ -308,13 +309,11 @@ impl ResponseHead {
|
||||
/// Get custom reason for the response
|
||||
#[inline]
|
||||
pub fn reason(&self) -> &str {
|
||||
if let Some(reason) = self.reason {
|
||||
reason
|
||||
} else {
|
||||
self.reason.unwrap_or_else(|| {
|
||||
self.status
|
||||
.canonical_reason()
|
||||
.unwrap_or("<unknown status code>")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -356,7 +355,7 @@ pub struct Message<T: Head> {
|
||||
impl<T: Head> Message<T> {
|
||||
/// Get new message from the pool of objects
|
||||
pub fn new() -> Self {
|
||||
T::with_pool(|p| p.get_message())
|
||||
T::with_pool(MessagePool::get_message)
|
||||
}
|
||||
}
|
||||
|
||||
@ -364,7 +363,7 @@ impl<T: Head> std::ops::Deref for Message<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.head.as_ref()
|
||||
self.head.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ use crate::{
|
||||
HttpMessage,
|
||||
};
|
||||
|
||||
/// Request
|
||||
/// An HTTP request.
|
||||
pub struct Request<P = PayloadStream> {
|
||||
pub(crate) payload: Payload<P>,
|
||||
pub(crate) head: Message<RequestHead>,
|
||||
|
@ -263,7 +263,7 @@ mod openssl {
|
||||
mod rustls {
|
||||
use std::io;
|
||||
|
||||
use actix_tls::accept::rustls::{Acceptor, ServerConfig, Session, TlsStream};
|
||||
use actix_tls::accept::rustls::{Acceptor, ServerConfig, TlsStream};
|
||||
use actix_tls::accept::TlsError;
|
||||
|
||||
use super::*;
|
||||
@ -308,14 +308,13 @@ mod rustls {
|
||||
> {
|
||||
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||
protos.extend_from_slice(&config.alpn_protocols);
|
||||
config.set_protocols(&protos);
|
||||
config.alpn_protocols = protos;
|
||||
|
||||
Acceptor::new(config)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!())
|
||||
.and_then(|io: TlsStream<TcpStream>| async {
|
||||
let proto = if let Some(protos) = io.get_ref().1.get_alpn_protocol()
|
||||
{
|
||||
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
|
||||
if protos.windows(2).any(|window| window == b"h2") {
|
||||
Protocol::Http2
|
||||
} else {
|
||||
|
@ -1,72 +0,0 @@
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
|
||||
/// Attempt to parse a `time` string as one of either RFC 1123, RFC 850, or asctime.
|
||||
pub(crate) fn parse_http_date(time: &str) -> Option<PrimitiveDateTime> {
|
||||
try_parse_rfc_1123(time)
|
||||
.or_else(|| try_parse_rfc_850(time))
|
||||
.or_else(|| try_parse_asctime(time))
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string as a RFC 1123 formatted date time string.
|
||||
///
|
||||
/// Eg: `Fri, 12 Feb 2021 00:14:29 GMT`
|
||||
fn try_parse_rfc_1123(time: &str) -> Option<PrimitiveDateTime> {
|
||||
time::parse(time, "%a, %d %b %Y %H:%M:%S").ok()
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string as a RFC 850 formatted date time string.
|
||||
///
|
||||
/// Eg: `Wednesday, 11-Jan-21 13:37:41 UTC`
|
||||
fn try_parse_rfc_850(time: &str) -> Option<PrimitiveDateTime> {
|
||||
let dt = PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S").ok()?;
|
||||
|
||||
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
|
||||
// we consider the year as part of this century if it's within the next 50 years,
|
||||
// otherwise we consider as part of the previous century.
|
||||
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let century_start_year = (now.year() / 100) * 100;
|
||||
let mut expanded_year = century_start_year + dt.year();
|
||||
|
||||
if expanded_year > now.year() + 50 {
|
||||
expanded_year -= 100;
|
||||
}
|
||||
|
||||
let date = Date::try_from_ymd(expanded_year, dt.month(), dt.day()).ok()?;
|
||||
Some(PrimitiveDateTime::new(date, dt.time()))
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string using ANSI C's `asctime` format.
|
||||
///
|
||||
/// Eg: `Wed Feb 13 15:46:11 2013`
|
||||
fn try_parse_asctime(time: &str) -> Option<PrimitiveDateTime> {
|
||||
time::parse(time, "%a %b %_d %H:%M:%S %Y").ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use time::{date, time};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_rfc_850_year_shift() {
|
||||
let date = try_parse_rfc_850("Friday, 19-Nov-82 16:14:55 EST").unwrap();
|
||||
assert_eq!(date, date!(1982 - 11 - 19).with_time(time!(16:14:55)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-62 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2062 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-21 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2021 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-23 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2023 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-99 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(1999 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-00 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2000 - 01 - 11).with_time(time!(13:37:41)));
|
||||
}
|
||||
}
|
@ -25,8 +25,8 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
|
||||
//
|
||||
// un aligned prefix and suffix would be mask/unmask per byte.
|
||||
// proper aligned middle slice goes into fast path and operates on 4-byte blocks.
|
||||
let (mut prefix, words, mut suffix) = unsafe { buf.align_to_mut::<u32>() };
|
||||
apply_mask_fallback(&mut prefix, mask);
|
||||
let (prefix, words, suffix) = unsafe { buf.align_to_mut::<u32>() };
|
||||
apply_mask_fallback(prefix, mask);
|
||||
let head = prefix.len() & 3;
|
||||
let mask_u32 = if head > 0 {
|
||||
if cfg!(target_endian = "big") {
|
||||
@ -40,7 +40,7 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
|
||||
for word in words.iter_mut() {
|
||||
*word ^= mask_u32;
|
||||
}
|
||||
apply_mask_fallback(&mut suffix, mask_u32.to_ne_bytes());
|
||||
apply_mask_fallback(suffix, mask_u32.to_ne_bytes());
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -3,7 +3,7 @@
|
||||
extern crate tls_rustls as rustls;
|
||||
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
convert::{Infallible, TryFrom},
|
||||
io::{self, BufReader, Write},
|
||||
net::{SocketAddr, TcpStream as StdTcpStream},
|
||||
sync::Arc,
|
||||
@ -20,16 +20,17 @@ use actix_http::{
|
||||
};
|
||||
use actix_http_test::test_server;
|
||||
use actix_service::{fn_factory_with_config, fn_service};
|
||||
use actix_tls::connect::tls::rustls::webpki_roots_cert_store;
|
||||
use actix_utils::future::{err, ok};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use derive_more::{Display, Error};
|
||||
use futures_core::Stream;
|
||||
use futures_util::stream::{once, StreamExt as _};
|
||||
use rustls::{
|
||||
internal::pemfile::{certs, pkcs8_private_keys},
|
||||
NoClientAuth, ServerConfig as RustlsServerConfig, Session,
|
||||
Certificate, OwnedTrustAnchor, PrivateKey, RootCertStore,
|
||||
ServerConfig as RustlsServerConfig, ServerName,
|
||||
};
|
||||
use webpki::DNSNameRef;
|
||||
use rustls_pemfile::{certs, pkcs8_private_keys};
|
||||
|
||||
async fn load_body<S>(mut stream: S) -> Result<BytesMut, PayloadError>
|
||||
where
|
||||
@ -47,13 +48,24 @@ fn tls_config() -> RustlsServerConfig {
|
||||
let cert_file = cert.serialize_pem().unwrap();
|
||||
let key_file = cert.serialize_private_key_pem();
|
||||
|
||||
let mut config = RustlsServerConfig::new(NoClientAuth::new());
|
||||
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
||||
let key_file = &mut BufReader::new(key_file.as_bytes());
|
||||
|
||||
let cert_chain = certs(cert_file).unwrap();
|
||||
let cert_chain = certs(cert_file)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(Certificate)
|
||||
.collect();
|
||||
let mut keys = pkcs8_private_keys(key_file).unwrap();
|
||||
config.set_single_cert(cert_chain, keys.remove(0)).unwrap();
|
||||
|
||||
let mut config = RustlsServerConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_no_client_auth()
|
||||
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
|
||||
.unwrap();
|
||||
|
||||
config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec());
|
||||
config.alpn_protocols.push(H2_ALPN_PROTOCOL.to_vec());
|
||||
|
||||
config
|
||||
}
|
||||
@ -62,19 +74,28 @@ pub fn get_negotiated_alpn_protocol(
|
||||
addr: SocketAddr,
|
||||
client_alpn_protocol: &[u8],
|
||||
) -> Option<Vec<u8>> {
|
||||
let mut config = rustls::ClientConfig::new();
|
||||
let mut config = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(webpki_roots_cert_store())
|
||||
.with_no_client_auth();
|
||||
|
||||
config.alpn_protocols.push(client_alpn_protocol.to_vec());
|
||||
let mut sess = rustls::ClientSession::new(
|
||||
&Arc::new(config),
|
||||
DNSNameRef::try_from_ascii_str("localhost").unwrap(),
|
||||
);
|
||||
|
||||
let mut sess = rustls::ClientConnection::new(
|
||||
Arc::new(config),
|
||||
ServerName::try_from("localhost").unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut sock = StdTcpStream::connect(addr).unwrap();
|
||||
let mut stream = rustls::Stream::new(&mut sess, &mut sock);
|
||||
|
||||
// The handshake will fails because the client will not be able to verify the server
|
||||
// certificate, but it doesn't matter here as we are just interested in the negotiated ALPN
|
||||
// protocol
|
||||
let _ = stream.flush();
|
||||
sess.get_alpn_protocol().map(|proto| proto.to_vec())
|
||||
|
||||
sess.alpn_protocol().map(|proto| proto.to_vec())
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -183,6 +183,7 @@ async fn test_chunked_payload() {
|
||||
Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(),
|
||||
None => panic!("Failed to find size in HTTP Response: {}", data),
|
||||
};
|
||||
|
||||
size
|
||||
};
|
||||
|
||||
|
@ -1,6 +1,11 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 0.4.0-beta.6 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 0.4.0-beta.5 - 2021-06-17
|
||||
|
@ -1,13 +1,11 @@
|
||||
[package]
|
||||
name = "actix-multipart"
|
||||
version = "0.4.0-beta.5"
|
||||
version = "0.4.0-beta.6"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Multipart form support for Actix Web"
|
||||
readme = "README.md"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-multipart"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -16,7 +14,7 @@ name = "actix_multipart"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false }
|
||||
actix-web = { version = "4.0.0-beta.10", default-features = false }
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
bytes = "1"
|
||||
@ -31,6 +29,6 @@ twoway = "0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-http = "3.0.0-beta.11"
|
||||
tokio = { version = "1", features = ["sync"] }
|
||||
tokio-stream = "0.1"
|
||||
|
@ -3,15 +3,15 @@
|
||||
> Multipart form support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.6)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.6)
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-multipart)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum Supported Rust Version (MSRV): 1.46.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -33,7 +33,6 @@ use crate::server::Multipart;
|
||||
impl FromRequest for Multipart {
|
||||
type Error = Error;
|
||||
type Future = Ready<Result<Multipart, Error>>;
|
||||
type Config = ();
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
|
132
actix-router/CHANGES.md
Normal file
132
actix-router/CHANGES.md
Normal file
@ -0,0 +1,132 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 0.5.0-beta.2 - 2021-09-09
|
||||
* Introduce `ResourceDef::join`. [#380]
|
||||
* Disallow prefix routes with tail segments. [#379]
|
||||
* Enforce path separators on dynamic prefixes. [#378]
|
||||
* Improve malformed path error message. [#384]
|
||||
* Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
|
||||
* Prefix segments with trailing slashes define a trailing empty segment. [#2355]
|
||||
* Support multi-pattern prefixes and joins. [#2356]
|
||||
* `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356]
|
||||
* Support `build_resource_path` on multi-pattern resources. [#2356]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
[#378]: https://github.com/actix/actix-net/pull/378
|
||||
[#379]: https://github.com/actix/actix-net/pull/379
|
||||
[#380]: https://github.com/actix/actix-net/pull/380
|
||||
[#384]: https://github.com/actix/actix-net/pull/384
|
||||
[#2355]: https://github.com/actix/actix-web/pull/2355
|
||||
[#2356]: https://github.com/actix/actix-web/pull/2356
|
||||
|
||||
|
||||
## 0.5.0-beta.1 - 2021-07-20
|
||||
* Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366]
|
||||
* Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373]
|
||||
* Fix segment interpolation leaving `Path` in unintended state after matching. [#368]
|
||||
* Fix `ResourceDef` `PartialEq` implementation. [#373]
|
||||
* Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372]
|
||||
* Implement `IntoPatterns` for `bytestring::ByteString`. [#372]
|
||||
* Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370]
|
||||
* Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371]
|
||||
* `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373]
|
||||
* Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371]
|
||||
* Rename `ResourceDef::{is_prefix_match => find_match}`. [#373]
|
||||
* Rename `ResourceDef::{match_path => capture_match_info}`. [#373]
|
||||
* Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373]
|
||||
* Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373]
|
||||
* Rename `Router::{*_checked => *_fn}`. [#373]
|
||||
* Return type of `ResourceDef::name` is now `Option<&str>`. [#373]
|
||||
* Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373]
|
||||
|
||||
[#368]: https://github.com/actix/actix-net/pull/368
|
||||
[#366]: https://github.com/actix/actix-net/pull/366
|
||||
[#368]: https://github.com/actix/actix-net/pull/368
|
||||
[#370]: https://github.com/actix/actix-net/pull/370
|
||||
[#371]: https://github.com/actix/actix-net/pull/371
|
||||
[#372]: https://github.com/actix/actix-net/pull/372
|
||||
[#373]: https://github.com/actix/actix-net/pull/373
|
||||
|
||||
|
||||
## 0.4.0 - 2021-06-06
|
||||
* When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357]
|
||||
* Path tail patterns now match new lines (`\n`) in request URL. [#360]
|
||||
* Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359]
|
||||
* Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345]
|
||||
|
||||
[#345]: https://github.com/actix/actix-net/pull/345
|
||||
[#357]: https://github.com/actix/actix-net/pull/357
|
||||
[#359]: https://github.com/actix/actix-net/pull/359
|
||||
[#360]: https://github.com/actix/actix-net/pull/360
|
||||
|
||||
|
||||
## 0.3.0 - 2019-12-31
|
||||
* Version was yanked previously. See https://crates.io/crates/actix-router/0.3.0
|
||||
|
||||
|
||||
## 0.2.7 - 2021-02-06
|
||||
* Add `Router::recognize_checked` [#247]
|
||||
|
||||
[#247]: https://github.com/actix/actix-net/pull/247
|
||||
|
||||
|
||||
## 0.2.6 - 2021-01-09
|
||||
* Use `bytestring` version range compatible with Bytes v1.0. [#246]
|
||||
|
||||
[#246]: https://github.com/actix/actix-net/pull/246
|
||||
|
||||
|
||||
## 0.2.5 - 2020-09-20
|
||||
* Fix `from_hex()` method
|
||||
|
||||
|
||||
## 0.2.4 - 2019-12-31
|
||||
* Add `ResourceDef::resource_path_named()` path generation method
|
||||
|
||||
|
||||
## 0.2.3 - 2019-12-25
|
||||
* Add impl `IntoPattern` for `&String`
|
||||
|
||||
|
||||
## 0.2.2 - 2019-12-25
|
||||
* Use `IntoPattern` for `RouterBuilder::path()`
|
||||
|
||||
|
||||
## 0.2.1 - 2019-12-25
|
||||
* Add `IntoPattern` trait
|
||||
* Add multi-pattern resources
|
||||
|
||||
|
||||
## 0.2.0 - 2019-12-07
|
||||
* Update http to 0.2
|
||||
* Update regex to 1.3
|
||||
* Use bytestring instead of string
|
||||
|
||||
|
||||
## 0.1.5 - 2019-05-15
|
||||
* Remove debug prints
|
||||
|
||||
|
||||
## 0.1.4 - 2019-05-15
|
||||
* Fix checked resource match
|
||||
|
||||
|
||||
## 0.1.3 - 2019-04-22
|
||||
* Added support for `remainder match` (i.e "/path/{tail}*")
|
||||
|
||||
|
||||
## 0.1.2 - 2019-04-07
|
||||
* Export `Quoter` type
|
||||
* Allow to reset `Path` instance
|
||||
|
||||
|
||||
## 0.1.1 - 2019-04-03
|
||||
* Get dynamic segment by name instead of iterator.
|
||||
|
||||
|
||||
## 0.1.0 - 2019-03-09
|
||||
* Initial release
|
38
actix-router/Cargo.toml
Normal file
38
actix-router/Cargo.toml
Normal file
@ -0,0 +1,38 @@
|
||||
[package]
|
||||
name = "actix-router"
|
||||
version = "0.5.0-beta.2"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
description = "Resource path matching and router"
|
||||
keywords = ["actix", "router", "routing"]
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "actix_router"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[features]
|
||||
default = ["http"]
|
||||
|
||||
[dependencies]
|
||||
bytestring = ">=0.1.5, <2"
|
||||
firestorm = "0.4"
|
||||
http = { version = "0.2.3", optional = true }
|
||||
log = "0.4"
|
||||
regex = "1.5"
|
||||
serde = "1"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
firestorm = { version = "0.4", features = ["enable_system_time"] }
|
||||
http = "0.2.3"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
|
||||
[[bench]]
|
||||
name = "router"
|
||||
harness = false
|
1
actix-router/LICENSE-APACHE
Symbolic link
1
actix-router/LICENSE-APACHE
Symbolic link
@ -0,0 +1 @@
|
||||
../LICENSE-APACHE
|
1
actix-router/LICENSE-MIT
Symbolic link
1
actix-router/LICENSE-MIT
Symbolic link
@ -0,0 +1 @@
|
||||
../LICENSE-MIT
|
194
actix-router/benches/router.rs
Normal file
194
actix-router/benches/router.rs
Normal file
@ -0,0 +1,194 @@
|
||||
//! Based on https://github.com/ibraheemdev/matchit/blob/master/benches/bench.rs
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
|
||||
macro_rules! register {
|
||||
(colon) => {{
|
||||
register!(finish => ":p1", ":p2", ":p3", ":p4")
|
||||
}};
|
||||
(brackets) => {{
|
||||
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
|
||||
}};
|
||||
(regex) => {{
|
||||
register!(finish => "(.*)", "(.*)", "(.*)", "(.*)")
|
||||
}};
|
||||
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
|
||||
let arr = [
|
||||
concat!("/authorizations"),
|
||||
concat!("/authorizations/", $p1),
|
||||
concat!("/applications/", $p1, "/tokens/", $p2),
|
||||
concat!("/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/events"),
|
||||
concat!("/networks/", $p1, "/", $p2, "/events"),
|
||||
concat!("/orgs/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/received_events"),
|
||||
concat!("/users/", $p1, "/received_events/public"),
|
||||
concat!("/users/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/events/public"),
|
||||
concat!("/users/", $p1, "/events/orgs/", $p2),
|
||||
concat!("/feeds"),
|
||||
concat!("/notifications"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/notifications"),
|
||||
concat!("/notifications/threads/", $p1),
|
||||
concat!("/notifications/threads/", $p1, "/subscription"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
|
||||
concat!("/users/", $p1, "/starred"),
|
||||
concat!("/user/starred"),
|
||||
concat!("/user/starred/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
|
||||
concat!("/users/", $p1, "/subscriptions"),
|
||||
concat!("/user/subscriptions"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscription"),
|
||||
concat!("/user/subscriptions/", $p1, "/", $p2),
|
||||
concat!("/users/", $p1, "/gists"),
|
||||
concat!("/gists"),
|
||||
concat!("/gists/", $p1),
|
||||
concat!("/gists/", $p1, "/star"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
|
||||
concat!("/issues"),
|
||||
concat!("/user/issues"),
|
||||
concat!("/orgs/", $p1, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
|
||||
concat!("/emojis"),
|
||||
concat!("/gitignore/templates"),
|
||||
concat!("/gitignore/templates/", $p1),
|
||||
concat!("/meta"),
|
||||
concat!("/rate_limit"),
|
||||
concat!("/users/", $p1, "/orgs"),
|
||||
concat!("/user/orgs"),
|
||||
concat!("/orgs/", $p1),
|
||||
concat!("/orgs/", $p1, "/members"),
|
||||
concat!("/orgs/", $p1, "/members", $p2),
|
||||
concat!("/orgs/", $p1, "/public_members"),
|
||||
concat!("/orgs/", $p1, "/public_members/", $p2),
|
||||
concat!("/orgs/", $p1, "/teams"),
|
||||
concat!("/teams/", $p1),
|
||||
concat!("/teams/", $p1, "/members"),
|
||||
concat!("/teams/", $p1, "/members", $p2),
|
||||
concat!("/teams/", $p1, "/repos"),
|
||||
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
|
||||
concat!("/user/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
|
||||
concat!("/user/repos"),
|
||||
concat!("/users/", $p1, "/repos"),
|
||||
concat!("/orgs/", $p1, "/repos"),
|
||||
concat!("/repositories"),
|
||||
concat!("/repos/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/languages"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/tags"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/readme"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/forks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
|
||||
concat!("/search/repositories"),
|
||||
concat!("/search/code"),
|
||||
concat!("/search/issues"),
|
||||
concat!("/search/users"),
|
||||
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
|
||||
concat!("/legacy/repos/search/", $p1),
|
||||
concat!("/legacy/user/search/", $p1),
|
||||
concat!("/legacy/user/email/", $p1),
|
||||
concat!("/users/", $p1),
|
||||
concat!("/user"),
|
||||
concat!("/users"),
|
||||
concat!("/user/emails"),
|
||||
concat!("/users/", $p1, "/followers"),
|
||||
concat!("/user/followers"),
|
||||
concat!("/users/", $p1, "/following"),
|
||||
concat!("/user/following"),
|
||||
concat!("/user/following/", $p1),
|
||||
concat!("/users/", $p1, "/following", $p2),
|
||||
concat!("/users/", $p1, "/keys"),
|
||||
concat!("/user/keys"),
|
||||
concat!("/user/keys/", $p1),
|
||||
];
|
||||
std::array::IntoIter::new(arr)
|
||||
}};
|
||||
}
|
||||
|
||||
fn call() -> impl Iterator<Item = &'static str> {
|
||||
let arr = [
|
||||
"/authorizations",
|
||||
"/user/repos",
|
||||
"/repos/rust-lang/rust/stargazers",
|
||||
"/orgs/rust-lang/public_members/nikomatsakis",
|
||||
"/repos/rust-lang/rust/releases/1.51.0",
|
||||
];
|
||||
|
||||
std::array::IntoIter::new(arr)
|
||||
}
|
||||
|
||||
fn compare_routers(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Compare Routers");
|
||||
|
||||
let mut actix = actix_router::Router::<bool>::build();
|
||||
for route in register!(brackets) {
|
||||
actix.path(route, true);
|
||||
}
|
||||
let actix = actix.finish();
|
||||
group.bench_function("actix", |b| {
|
||||
b.iter(|| {
|
||||
for route in call() {
|
||||
let mut path = actix_router::Path::new(route);
|
||||
black_box(actix.recognize(&mut path).unwrap());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
let regex_set = regex::RegexSet::new(register!(regex)).unwrap();
|
||||
group.bench_function("regex", |b| {
|
||||
b.iter(|| {
|
||||
for route in call() {
|
||||
black_box(regex_set.matches(route));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(benches, compare_routers);
|
||||
criterion_main!(benches);
|
169
actix-router/examples/flamegraph.rs
Normal file
169
actix-router/examples/flamegraph.rs
Normal file
@ -0,0 +1,169 @@
|
||||
macro_rules! register {
|
||||
(brackets) => {{
|
||||
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
|
||||
}};
|
||||
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
|
||||
let arr = [
|
||||
concat!("/authorizations"),
|
||||
concat!("/authorizations/", $p1),
|
||||
concat!("/applications/", $p1, "/tokens/", $p2),
|
||||
concat!("/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/events"),
|
||||
concat!("/networks/", $p1, "/", $p2, "/events"),
|
||||
concat!("/orgs/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/received_events"),
|
||||
concat!("/users/", $p1, "/received_events/public"),
|
||||
concat!("/users/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/events/public"),
|
||||
concat!("/users/", $p1, "/events/orgs/", $p2),
|
||||
concat!("/feeds"),
|
||||
concat!("/notifications"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/notifications"),
|
||||
concat!("/notifications/threads/", $p1),
|
||||
concat!("/notifications/threads/", $p1, "/subscription"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
|
||||
concat!("/users/", $p1, "/starred"),
|
||||
concat!("/user/starred"),
|
||||
concat!("/user/starred/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
|
||||
concat!("/users/", $p1, "/subscriptions"),
|
||||
concat!("/user/subscriptions"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscription"),
|
||||
concat!("/user/subscriptions/", $p1, "/", $p2),
|
||||
concat!("/users/", $p1, "/gists"),
|
||||
concat!("/gists"),
|
||||
concat!("/gists/", $p1),
|
||||
concat!("/gists/", $p1, "/star"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
|
||||
concat!("/issues"),
|
||||
concat!("/user/issues"),
|
||||
concat!("/orgs/", $p1, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
|
||||
concat!("/emojis"),
|
||||
concat!("/gitignore/templates"),
|
||||
concat!("/gitignore/templates/", $p1),
|
||||
concat!("/meta"),
|
||||
concat!("/rate_limit"),
|
||||
concat!("/users/", $p1, "/orgs"),
|
||||
concat!("/user/orgs"),
|
||||
concat!("/orgs/", $p1),
|
||||
concat!("/orgs/", $p1, "/members"),
|
||||
concat!("/orgs/", $p1, "/members", $p2),
|
||||
concat!("/orgs/", $p1, "/public_members"),
|
||||
concat!("/orgs/", $p1, "/public_members/", $p2),
|
||||
concat!("/orgs/", $p1, "/teams"),
|
||||
concat!("/teams/", $p1),
|
||||
concat!("/teams/", $p1, "/members"),
|
||||
concat!("/teams/", $p1, "/members", $p2),
|
||||
concat!("/teams/", $p1, "/repos"),
|
||||
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
|
||||
concat!("/user/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
|
||||
concat!("/user/repos"),
|
||||
concat!("/users/", $p1, "/repos"),
|
||||
concat!("/orgs/", $p1, "/repos"),
|
||||
concat!("/repositories"),
|
||||
concat!("/repos/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/languages"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/tags"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/readme"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/forks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
|
||||
concat!("/search/repositories"),
|
||||
concat!("/search/code"),
|
||||
concat!("/search/issues"),
|
||||
concat!("/search/users"),
|
||||
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
|
||||
concat!("/legacy/repos/search/", $p1),
|
||||
concat!("/legacy/user/search/", $p1),
|
||||
concat!("/legacy/user/email/", $p1),
|
||||
concat!("/users/", $p1),
|
||||
concat!("/user"),
|
||||
concat!("/users"),
|
||||
concat!("/user/emails"),
|
||||
concat!("/users/", $p1, "/followers"),
|
||||
concat!("/user/followers"),
|
||||
concat!("/users/", $p1, "/following"),
|
||||
concat!("/user/following"),
|
||||
concat!("/user/following/", $p1),
|
||||
concat!("/users/", $p1, "/following", $p2),
|
||||
concat!("/users/", $p1, "/keys"),
|
||||
concat!("/user/keys"),
|
||||
concat!("/user/keys/", $p1),
|
||||
];
|
||||
|
||||
arr.to_vec()
|
||||
}};
|
||||
}
|
||||
|
||||
static PATHS: [&str; 5] = [
|
||||
"/authorizations",
|
||||
"/user/repos",
|
||||
"/repos/rust-lang/rust/stargazers",
|
||||
"/orgs/rust-lang/public_members/nikomatsakis",
|
||||
"/repos/rust-lang/rust/releases/1.51.0",
|
||||
];
|
||||
|
||||
fn main() {
|
||||
let mut router = actix_router::Router::<bool>::build();
|
||||
|
||||
for route in register!(brackets) {
|
||||
router.path(route, true);
|
||||
}
|
||||
|
||||
let actix = router.finish();
|
||||
|
||||
if firestorm::enabled() {
|
||||
firestorm::bench("target", || {
|
||||
for &route in &PATHS {
|
||||
let mut path = actix_router::Path::new(route);
|
||||
actix.recognize(&mut path).unwrap();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
}
|
723
actix-router/src/de.rs
Normal file
723
actix-router/src/de.rs
Normal file
@ -0,0 +1,723 @@
|
||||
use serde::de::{self, Deserializer, Error as DeError, Visitor};
|
||||
use serde::forward_to_deserialize_any;
|
||||
|
||||
use crate::path::{Path, PathIter};
|
||||
use crate::ResourcePath;
|
||||
|
||||
macro_rules! unsupported_type {
|
||||
($trait_fn:ident, $name:expr) => {
|
||||
fn $trait_fn<V>(self, _: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom(concat!(
|
||||
"unsupported type: ",
|
||||
$name
|
||||
)))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! parse_single_value {
|
||||
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
|
||||
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() != 1 {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected 1",
|
||||
self.path.segment_count()
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
let v = self.path[0].parse().map_err(|_| {
|
||||
de::value::Error::custom(format!(
|
||||
"can not parse {:?} to a {}",
|
||||
&self.path[0], $tp
|
||||
))
|
||||
})?;
|
||||
visitor.$visit_fn(v)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub struct PathDeserializer<'de, T: ResourcePath> {
|
||||
path: &'de Path<T>,
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath + 'de> PathDeserializer<'de, T> {
|
||||
pub fn new(path: &'de Path<T>) -> Self {
|
||||
PathDeserializer { path }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath + 'de> Deserializer<'de> for PathDeserializer<'de, T> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_map(ParamsDeserializer {
|
||||
params: self.path.iter(),
|
||||
current: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
self.deserialize_map(visitor)
|
||||
}
|
||||
|
||||
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_unit_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
self.deserialize_unit(visitor)
|
||||
}
|
||||
|
||||
fn deserialize_newtype_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_newtype_struct(self)
|
||||
}
|
||||
|
||||
fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() < len {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected {}",
|
||||
self.path.segment_count(),
|
||||
len
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
visitor.visit_seq(ParamsSeq {
|
||||
params: self.path.iter(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_tuple_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
len: usize,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() < len {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected {}",
|
||||
self.path.segment_count(),
|
||||
len
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
visitor.visit_seq(ParamsSeq {
|
||||
params: self.path.iter(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.is_empty() {
|
||||
Err(de::value::Error::custom("expected at least one parameters"))
|
||||
} else {
|
||||
visitor.visit_enum(ValueEnum {
|
||||
value: &self.path[0],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() != 1 {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected 1",
|
||||
self.path.segment_count()
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
visitor.visit_str(&self.path[0])
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_seq(ParamsSeq {
|
||||
params: self.path.iter(),
|
||||
})
|
||||
}
|
||||
|
||||
unsupported_type!(deserialize_any, "'any'");
|
||||
unsupported_type!(deserialize_bytes, "bytes");
|
||||
unsupported_type!(deserialize_option, "Option<T>");
|
||||
unsupported_type!(deserialize_identifier, "identifier");
|
||||
unsupported_type!(deserialize_ignored_any, "ignored_any");
|
||||
|
||||
parse_single_value!(deserialize_bool, visit_bool, "bool");
|
||||
parse_single_value!(deserialize_i8, visit_i8, "i8");
|
||||
parse_single_value!(deserialize_i16, visit_i16, "i16");
|
||||
parse_single_value!(deserialize_i32, visit_i32, "i32");
|
||||
parse_single_value!(deserialize_i64, visit_i64, "i64");
|
||||
parse_single_value!(deserialize_u8, visit_u8, "u8");
|
||||
parse_single_value!(deserialize_u16, visit_u16, "u16");
|
||||
parse_single_value!(deserialize_u32, visit_u32, "u32");
|
||||
parse_single_value!(deserialize_u64, visit_u64, "u64");
|
||||
parse_single_value!(deserialize_f32, visit_f32, "f32");
|
||||
parse_single_value!(deserialize_f64, visit_f64, "f64");
|
||||
parse_single_value!(deserialize_string, visit_string, "String");
|
||||
parse_single_value!(deserialize_byte_buf, visit_string, "String");
|
||||
parse_single_value!(deserialize_char, visit_char, "char");
|
||||
}
|
||||
|
||||
struct ParamsDeserializer<'de, T: ResourcePath> {
|
||||
params: PathIter<'de, T>,
|
||||
current: Option<(&'de str, &'de str)>,
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath> de::MapAccess<'de> for ParamsDeserializer<'de, T> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
|
||||
where
|
||||
K: de::DeserializeSeed<'de>,
|
||||
{
|
||||
self.current = self.params.next().map(|ref item| (item.0, item.1));
|
||||
match self.current {
|
||||
Some((key, _)) => Ok(Some(seed.deserialize(Key { key })?)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: de::DeserializeSeed<'de>,
|
||||
{
|
||||
if let Some((_, value)) = self.current.take() {
|
||||
seed.deserialize(Value { value })
|
||||
} else {
|
||||
Err(de::value::Error::custom("unexpected item"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Key<'de> {
|
||||
key: &'de str,
|
||||
}
|
||||
|
||||
impl<'de> Deserializer<'de> for Key<'de> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_str(self.key)
|
||||
}
|
||||
|
||||
fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("Unexpected"))
|
||||
}
|
||||
|
||||
forward_to_deserialize_any! {
|
||||
bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
|
||||
byte_buf option unit unit_struct newtype_struct seq tuple
|
||||
tuple_struct map struct enum ignored_any
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! parse_value {
|
||||
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
|
||||
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
let v = self.value.parse().map_err(|_| {
|
||||
de::value::Error::custom(format!("can not parse {:?} to a {}", self.value, $tp))
|
||||
})?;
|
||||
visitor.$visit_fn(v)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
struct Value<'de> {
|
||||
value: &'de str,
|
||||
}
|
||||
|
||||
impl<'de> Deserializer<'de> for Value<'de> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
parse_value!(deserialize_bool, visit_bool, "bool");
|
||||
parse_value!(deserialize_i8, visit_i8, "i8");
|
||||
parse_value!(deserialize_i16, visit_i16, "i16");
|
||||
parse_value!(deserialize_i32, visit_i32, "i16");
|
||||
parse_value!(deserialize_i64, visit_i64, "i64");
|
||||
parse_value!(deserialize_u8, visit_u8, "u8");
|
||||
parse_value!(deserialize_u16, visit_u16, "u16");
|
||||
parse_value!(deserialize_u32, visit_u32, "u32");
|
||||
parse_value!(deserialize_u64, visit_u64, "u64");
|
||||
parse_value!(deserialize_f32, visit_f32, "f32");
|
||||
parse_value!(deserialize_f64, visit_f64, "f64");
|
||||
parse_value!(deserialize_string, visit_string, "String");
|
||||
parse_value!(deserialize_byte_buf, visit_string, "String");
|
||||
parse_value!(deserialize_char, visit_char, "char");
|
||||
|
||||
fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_unit_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_borrowed_bytes(self.value.as_bytes())
|
||||
}
|
||||
|
||||
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_borrowed_str(self.value)
|
||||
}
|
||||
|
||||
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_some(self)
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_enum(ValueEnum { value: self.value })
|
||||
}
|
||||
|
||||
fn deserialize_newtype_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_newtype_struct(self)
|
||||
}
|
||||
|
||||
fn deserialize_tuple<V>(self, _: usize, _: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("unsupported type: tuple"))
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
_: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("unsupported type: struct"))
|
||||
}
|
||||
|
||||
fn deserialize_tuple_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: usize,
|
||||
_: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("unsupported type: tuple struct"))
|
||||
}
|
||||
|
||||
unsupported_type!(deserialize_any, "any");
|
||||
unsupported_type!(deserialize_seq, "seq");
|
||||
unsupported_type!(deserialize_map, "map");
|
||||
unsupported_type!(deserialize_identifier, "identifier");
|
||||
}
|
||||
|
||||
struct ParamsSeq<'de, T: ResourcePath> {
|
||||
params: PathIter<'de, T>,
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath> de::SeqAccess<'de> for ParamsSeq<'de, T> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn next_element_seed<U>(&mut self, seed: U) -> Result<Option<U::Value>, Self::Error>
|
||||
where
|
||||
U: de::DeserializeSeed<'de>,
|
||||
{
|
||||
match self.params.next() {
|
||||
Some(item) => Ok(Some(seed.deserialize(Value { value: item.1 })?)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ValueEnum<'de> {
|
||||
value: &'de str,
|
||||
}
|
||||
|
||||
impl<'de> de::EnumAccess<'de> for ValueEnum<'de> {
|
||||
type Error = de::value::Error;
|
||||
type Variant = UnitVariant;
|
||||
|
||||
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
|
||||
where
|
||||
V: de::DeserializeSeed<'de>,
|
||||
{
|
||||
Ok((seed.deserialize(Key { key: self.value })?, UnitVariant))
|
||||
}
|
||||
}
|
||||
|
||||
struct UnitVariant;
|
||||
|
||||
impl<'de> de::VariantAccess<'de> for UnitVariant {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn unit_variant(self) -> Result<(), Self::Error> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Self::Error>
|
||||
where
|
||||
T: de::DeserializeSeed<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("not supported"))
|
||||
}
|
||||
|
||||
fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("not supported"))
|
||||
}
|
||||
|
||||
fn struct_variant<V>(
|
||||
self,
|
||||
_: &'static [&'static str],
|
||||
_: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("not supported"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde::{de, Deserialize};
|
||||
|
||||
use super::*;
|
||||
use crate::path::Path;
|
||||
use crate::router::Router;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct MyStruct {
|
||||
key: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Id {
|
||||
_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Test1(String, u32);
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Test2 {
|
||||
key: String,
|
||||
value: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
enum TestEnum {
|
||||
Val1,
|
||||
Val2,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Test3 {
|
||||
val: TestEnum,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_extract() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{key}/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name/user1/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
|
||||
let s: MyStruct = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.key, "name");
|
||||
assert_eq!(s.value, "user1");
|
||||
|
||||
let s: (String, String) =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.0, "name");
|
||||
assert_eq!(s.1, "user1");
|
||||
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{key}/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name/32/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
|
||||
let s: Test1 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.0, "name");
|
||||
assert_eq!(s.1, 32);
|
||||
|
||||
let s: Test2 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.key, "name");
|
||||
assert_eq!(s.value, 32);
|
||||
|
||||
let s: (String, u8) =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.0, "name");
|
||||
assert_eq!(s.1, 32);
|
||||
|
||||
let res: Vec<String> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(res[0], "name".to_owned());
|
||||
assert_eq!(res[1], "32".to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_path_single() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/32/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: i8 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i, 32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_enum() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{val}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/val1/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: TestEnum = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i, TestEnum::Val1);
|
||||
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{val1}/{val2}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/val1/val2/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: (TestEnum, TestEnum) =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i, (TestEnum::Val1, TestEnum::Val2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_enum_value() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{val}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/val1/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: Test3 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i.val, TestEnum::Val1);
|
||||
|
||||
let mut path = Path::new("/val3/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: Result<Test3, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(i.is_err());
|
||||
assert!(format!("{:?}", i).contains("unknown variant"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_errors() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
|
||||
let s: Result<Test1, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("wrong number of parameters"));
|
||||
|
||||
let s: Result<Test2, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("can not parse"));
|
||||
|
||||
let s: Result<(String, String), de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("wrong number of parameters"));
|
||||
|
||||
let s: Result<u32, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("can not parse"));
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_extract_path_decode() {
|
||||
// let mut router = Router::<()>::default();
|
||||
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
|
||||
|
||||
// macro_rules! test_single_value {
|
||||
// ($value:expr, $expected:expr) => {{
|
||||
// let req = TestRequest::with_uri($value).finish();
|
||||
// let info = router.recognize(&req, &(), 0);
|
||||
// let req = req.with_route_info(info);
|
||||
// assert_eq!(
|
||||
// *Path::<String>::from_request(&req, &PathConfig::default()).unwrap(),
|
||||
// $expected
|
||||
// );
|
||||
// }};
|
||||
// }
|
||||
|
||||
// test_single_value!("/%25/", "%");
|
||||
// test_single_value!("/%40%C2%A3%24%25%5E%26%2B%3D/", "@£$%^&+=");
|
||||
// test_single_value!("/%2B/", "+");
|
||||
// test_single_value!("/%252B/", "%2B");
|
||||
// test_single_value!("/%2F/", "/");
|
||||
// test_single_value!("/%252F/", "%2F");
|
||||
// test_single_value!(
|
||||
// "/http%3A%2F%2Flocalhost%3A80%2Ffoo/",
|
||||
// "http://localhost:80/foo"
|
||||
// );
|
||||
// test_single_value!("/%2Fvar%2Flog%2Fsyslog/", "/var/log/syslog");
|
||||
// test_single_value!(
|
||||
// "/http%3A%2F%2Flocalhost%3A80%2Ffile%2F%252Fvar%252Flog%252Fsyslog/",
|
||||
// "http://localhost:80/file/%2Fvar%2Flog%2Fsyslog"
|
||||
// );
|
||||
|
||||
// let req = TestRequest::with_uri("/%25/7/?id=test").finish();
|
||||
|
||||
// let mut router = Router::<()>::default();
|
||||
// router.register_resource(Resource::new(ResourceDef::new("/{key}/{value}/")));
|
||||
// let info = router.recognize(&req, &(), 0);
|
||||
// let req = req.with_route_info(info);
|
||||
|
||||
// let s = Path::<Test2>::from_request(&req, &PathConfig::default()).unwrap();
|
||||
// assert_eq!(s.key, "%");
|
||||
// assert_eq!(s.value, 7);
|
||||
|
||||
// let s = Path::<(String, String)>::from_request(&req, &PathConfig::default()).unwrap();
|
||||
// assert_eq!(s.0, "%");
|
||||
// assert_eq!(s.1, "7");
|
||||
// }
|
||||
|
||||
// #[test]
|
||||
// fn test_extract_path_no_decode() {
|
||||
// let mut router = Router::<()>::default();
|
||||
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
|
||||
|
||||
// let req = TestRequest::with_uri("/%25/").finish();
|
||||
// let info = router.recognize(&req, &(), 0);
|
||||
// let req = req.with_route_info(info);
|
||||
// assert_eq!(
|
||||
// *Path::<String>::from_request(&req, &&PathConfig::default().disable_decoding())
|
||||
// .unwrap(),
|
||||
// "%25"
|
||||
// );
|
||||
// }
|
||||
}
|
149
actix-router/src/lib.rs
Normal file
149
actix-router/src/lib.rs
Normal file
@ -0,0 +1,149 @@
|
||||
//! Resource path matching and router.
|
||||
|
||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||
|
||||
mod de;
|
||||
mod path;
|
||||
mod resource;
|
||||
mod router;
|
||||
|
||||
pub use self::de::PathDeserializer;
|
||||
pub use self::path::Path;
|
||||
pub use self::resource::ResourceDef;
|
||||
pub use self::router::{ResourceInfo, Router, RouterBuilder};
|
||||
|
||||
// TODO: this trait is necessary, document it
|
||||
// see impl Resource for ServiceRequest
|
||||
pub trait Resource<T: ResourcePath> {
|
||||
fn resource_path(&mut self) -> &mut Path<T>;
|
||||
}
|
||||
|
||||
pub trait ResourcePath {
|
||||
fn path(&self) -> &str;
|
||||
}
|
||||
|
||||
impl ResourcePath for String {
|
||||
fn path(&self) -> &str {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ResourcePath for &'a str {
|
||||
fn path(&self) -> &str {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResourcePath for bytestring::ByteString {
|
||||
fn path(&self) -> &str {
|
||||
&*self
|
||||
}
|
||||
}
|
||||
|
||||
/// One or many patterns.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Patterns {
|
||||
Single(String),
|
||||
List(Vec<String>),
|
||||
}
|
||||
|
||||
impl Patterns {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Patterns::Single(_) => false,
|
||||
Patterns::List(pats) => pats.is_empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper trait for type that could be converted to one or more path pattern.
|
||||
pub trait IntoPatterns {
|
||||
fn patterns(&self) -> Patterns;
|
||||
}
|
||||
|
||||
impl IntoPatterns for String {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoPatterns for &'a String {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single((*self).clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoPatterns for &'a str {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single((*self).to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPatterns for bytestring::ByteString {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single(self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPatterns for Patterns {
|
||||
fn patterns(&self) -> Patterns {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AsRef<str>> IntoPatterns for Vec<T> {
|
||||
fn patterns(&self) -> Patterns {
|
||||
let mut patterns = self.iter().map(|v| v.as_ref().to_owned());
|
||||
|
||||
match patterns.size_hint() {
|
||||
(1, _) => Patterns::Single(patterns.next().unwrap()),
|
||||
_ => Patterns::List(patterns.collect()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! array_patterns_single (($tp:ty) => {
|
||||
impl IntoPatterns for [$tp; 1] {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single(self[0].to_owned())
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
macro_rules! array_patterns_multiple (($tp:ty, $str_fn:expr, $($num:tt) +) => {
|
||||
// for each array length specified in $num
|
||||
$(
|
||||
impl IntoPatterns for [$tp; $num] {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::List(self.iter().map($str_fn).collect())
|
||||
}
|
||||
}
|
||||
)+
|
||||
});
|
||||
|
||||
array_patterns_single!(&str);
|
||||
array_patterns_multiple!(&str, |&v| v.to_owned(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
|
||||
|
||||
array_patterns_single!(String);
|
||||
array_patterns_multiple!(String, |v| v.clone(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
|
||||
|
||||
#[cfg(feature = "http")]
|
||||
mod url;
|
||||
|
||||
#[cfg(feature = "http")]
|
||||
pub use self::url::{Quoter, Url};
|
||||
|
||||
#[cfg(feature = "http")]
|
||||
mod http_impls {
|
||||
use http::Uri;
|
||||
|
||||
use super::ResourcePath;
|
||||
|
||||
impl ResourcePath for Uri {
|
||||
fn path(&self) -> &str {
|
||||
self.path()
|
||||
}
|
||||
}
|
||||
}
|
220
actix-router/src/path.rs
Normal file
220
actix-router/src/path.rs
Normal file
@ -0,0 +1,220 @@
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Index;
|
||||
|
||||
use firestorm::profile_method;
|
||||
use serde::de;
|
||||
|
||||
use crate::{de::PathDeserializer, Resource, ResourcePath};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) enum PathItem {
|
||||
Static(Cow<'static, str>),
|
||||
Segment(u16, u16),
|
||||
}
|
||||
|
||||
impl Default for PathItem {
|
||||
fn default() -> Self {
|
||||
Self::Static(Cow::Borrowed(""))
|
||||
}
|
||||
}
|
||||
|
||||
/// Resource path match information.
|
||||
///
|
||||
/// If resource path contains variable patterns, `Path` stores them.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Path<T> {
|
||||
path: T,
|
||||
pub(crate) skip: u16,
|
||||
pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>,
|
||||
}
|
||||
|
||||
impl<T: ResourcePath> Path<T> {
|
||||
pub fn new(path: T) -> Path<T> {
|
||||
Path {
|
||||
path,
|
||||
skip: 0,
|
||||
segments: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get reference to inner path instance.
|
||||
#[inline]
|
||||
pub fn get_ref(&self) -> &T {
|
||||
&self.path
|
||||
}
|
||||
|
||||
/// Get mutable reference to inner path instance.
|
||||
#[inline]
|
||||
pub fn get_mut(&mut self) -> &mut T {
|
||||
&mut self.path
|
||||
}
|
||||
|
||||
/// Path.
|
||||
#[inline]
|
||||
pub fn path(&self) -> &str {
|
||||
profile_method!(path);
|
||||
|
||||
let skip = self.skip as usize;
|
||||
let path = self.path.path();
|
||||
if skip <= path.len() {
|
||||
&path[skip..]
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
/// Set new path.
|
||||
#[inline]
|
||||
pub fn set(&mut self, path: T) {
|
||||
self.skip = 0;
|
||||
self.path = path;
|
||||
self.segments.clear();
|
||||
}
|
||||
|
||||
/// Reset state.
|
||||
#[inline]
|
||||
pub fn reset(&mut self) {
|
||||
self.skip = 0;
|
||||
self.segments.clear();
|
||||
}
|
||||
|
||||
/// Skip first `n` chars in path.
|
||||
#[inline]
|
||||
pub fn skip(&mut self, n: u16) {
|
||||
self.skip += n;
|
||||
}
|
||||
|
||||
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
|
||||
profile_method!(add);
|
||||
|
||||
match value {
|
||||
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
|
||||
PathItem::Segment(begin, end) => self.segments.push((
|
||||
name.into(),
|
||||
PathItem::Segment(self.skip + begin, self.skip + end),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn add_static(
|
||||
&mut self,
|
||||
name: impl Into<Cow<'static, str>>,
|
||||
value: impl Into<Cow<'static, str>>,
|
||||
) {
|
||||
self.segments
|
||||
.push((name.into(), PathItem::Static(value.into())));
|
||||
}
|
||||
|
||||
/// Check if there are any matched patterns.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.segments.is_empty()
|
||||
}
|
||||
|
||||
/// Returns number of interpolated segments.
|
||||
#[inline]
|
||||
pub fn segment_count(&self) -> usize {
|
||||
self.segments.len()
|
||||
}
|
||||
|
||||
/// Get matched parameter by name without type conversion
|
||||
pub fn get(&self, name: &str) -> Option<&str> {
|
||||
profile_method!(get);
|
||||
|
||||
for (seg_name, val) in self.segments.iter() {
|
||||
if name == seg_name {
|
||||
return match val {
|
||||
PathItem::Static(ref s) => Some(s),
|
||||
PathItem::Segment(s, e) => {
|
||||
Some(&self.path.path()[(*s as usize)..(*e as usize)])
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Get unprocessed part of the path
|
||||
pub fn unprocessed(&self) -> &str {
|
||||
&self.path.path()[(self.skip as usize)..]
|
||||
}
|
||||
|
||||
/// Get matched parameter by name.
|
||||
///
|
||||
/// If keyed parameter is not available empty string is used as default value.
|
||||
pub fn query(&self, key: &str) -> &str {
|
||||
profile_method!(query);
|
||||
|
||||
if let Some(s) = self.get(key) {
|
||||
s
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
/// Return iterator to items in parameter container.
|
||||
pub fn iter(&self) -> PathIter<'_, T> {
|
||||
PathIter {
|
||||
idx: 0,
|
||||
params: self,
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to deserialize matching parameters to a specified type `U`
|
||||
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
|
||||
profile_method!(load);
|
||||
de::Deserialize::deserialize(PathDeserializer::new(self))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PathIter<'a, T> {
|
||||
idx: usize,
|
||||
params: &'a Path<T>,
|
||||
}
|
||||
|
||||
impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> {
|
||||
type Item = (&'a str, &'a str);
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<(&'a str, &'a str)> {
|
||||
if self.idx < self.params.segment_count() {
|
||||
let idx = self.idx;
|
||||
let res = match self.params.segments[idx].1 {
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)],
|
||||
};
|
||||
self.idx += 1;
|
||||
return Some((&self.params.segments[idx].0, res));
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ResourcePath> Index<&'a str> for Path<T> {
|
||||
type Output = str;
|
||||
|
||||
fn index(&self, name: &'a str) -> &str {
|
||||
self.get(name)
|
||||
.expect("Value for parameter is not available")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ResourcePath> Index<usize> for Path<T> {
|
||||
type Output = str;
|
||||
|
||||
fn index(&self, idx: usize) -> &str {
|
||||
match self.segments[idx].1 {
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ResourcePath> Resource<T> for Path<T> {
|
||||
fn resource_path(&mut self) -> &mut Self {
|
||||
self
|
||||
}
|
||||
}
|
1814
actix-router/src/resource.rs
Normal file
1814
actix-router/src/resource.rs
Normal file
File diff suppressed because it is too large
Load Diff
282
actix-router/src/router.rs
Normal file
282
actix-router/src/router.rs
Normal file
@ -0,0 +1,282 @@
|
||||
use firestorm::profile_method;
|
||||
|
||||
use crate::{IntoPatterns, Resource, ResourceDef, ResourcePath};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub struct ResourceId(pub u16);
|
||||
|
||||
/// Information about current resource
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ResourceInfo {
|
||||
#[allow(dead_code)]
|
||||
resource: ResourceId,
|
||||
}
|
||||
|
||||
/// Resource router.
|
||||
// T is the resource itself
|
||||
// U is any other data needed for routing like method guards
|
||||
pub struct Router<T, U = ()> {
|
||||
routes: Vec<(ResourceDef, T, Option<U>)>,
|
||||
}
|
||||
|
||||
impl<T, U> Router<T, U> {
|
||||
pub fn build() -> RouterBuilder<T, U> {
|
||||
RouterBuilder {
|
||||
resources: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn recognize<R, P>(&self, resource: &mut R) -> Option<(&T, ResourceId)>
|
||||
where
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize);
|
||||
|
||||
for item in self.routes.iter() {
|
||||
if item.0.capture_match_info(resource.resource_path()) {
|
||||
return Some((&item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn recognize_mut<R, P>(&mut self, resource: &mut R) -> Option<(&mut T, ResourceId)>
|
||||
where
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize_mut);
|
||||
|
||||
for item in self.routes.iter_mut() {
|
||||
if item.0.capture_match_info(resource.resource_path()) {
|
||||
return Some((&mut item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn recognize_fn<R, P, F>(&self, resource: &mut R, check: F) -> Option<(&T, ResourceId)>
|
||||
where
|
||||
F: Fn(&R, &Option<U>) -> bool,
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize_checked);
|
||||
|
||||
for item in self.routes.iter() {
|
||||
if item.0.capture_match_info_fn(resource, &check, &item.2) {
|
||||
return Some((&item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn recognize_mut_fn<R, P, F>(
|
||||
&mut self,
|
||||
resource: &mut R,
|
||||
check: F,
|
||||
) -> Option<(&mut T, ResourceId)>
|
||||
where
|
||||
F: Fn(&R, &Option<U>) -> bool,
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize_mut_checked);
|
||||
|
||||
for item in self.routes.iter_mut() {
|
||||
if item.0.capture_match_info_fn(resource, &check, &item.2) {
|
||||
return Some((&mut item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RouterBuilder<T, U = ()> {
|
||||
resources: Vec<(ResourceDef, T, Option<U>)>,
|
||||
}
|
||||
|
||||
impl<T, U> RouterBuilder<T, U> {
|
||||
/// Register resource for specified path.
|
||||
pub fn path<P: IntoPatterns>(
|
||||
&mut self,
|
||||
path: P,
|
||||
resource: T,
|
||||
) -> &mut (ResourceDef, T, Option<U>) {
|
||||
profile_method!(path);
|
||||
|
||||
self.resources
|
||||
.push((ResourceDef::new(path), resource, None));
|
||||
self.resources.last_mut().unwrap()
|
||||
}
|
||||
|
||||
/// Register resource for specified path prefix.
|
||||
pub fn prefix(&mut self, prefix: &str, resource: T) -> &mut (ResourceDef, T, Option<U>) {
|
||||
profile_method!(prefix);
|
||||
|
||||
self.resources
|
||||
.push((ResourceDef::prefix(prefix), resource, None));
|
||||
self.resources.last_mut().unwrap()
|
||||
}
|
||||
|
||||
/// Register resource for ResourceDef
|
||||
pub fn rdef(&mut self, rdef: ResourceDef, resource: T) -> &mut (ResourceDef, T, Option<U>) {
|
||||
profile_method!(rdef);
|
||||
|
||||
self.resources.push((rdef, resource, None));
|
||||
self.resources.last_mut().unwrap()
|
||||
}
|
||||
|
||||
/// Finish configuration and create router instance.
|
||||
pub fn finish(self) -> Router<T, U> {
|
||||
Router {
|
||||
routes: self.resources,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::path::Path;
|
||||
use crate::router::{ResourceId, Router};
|
||||
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
#[test]
|
||||
fn test_recognizer_1() {
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/name", 10).0.set_id(0);
|
||||
router.path("/name/{val}", 11).0.set_id(1);
|
||||
router.path("/name/{val}/index.html", 12).0.set_id(2);
|
||||
router.path("/file/{file}.{ext}", 13).0.set_id(3);
|
||||
router.path("/v{val}/{val2}/index.html", 14).0.set_id(4);
|
||||
router.path("/v/{tail:.*}", 15).0.set_id(5);
|
||||
router.path("/test2/{test}.html", 16).0.set_id(6);
|
||||
router.path("/{test}/index.html", 17).0.set_id(7);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/unknown");
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/name");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
assert_eq!(info, ResourceId(0));
|
||||
assert!(path.is_empty());
|
||||
|
||||
let mut path = Path::new("/name/value");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
assert_eq!(info, ResourceId(1));
|
||||
assert_eq!(path.get("val").unwrap(), "value");
|
||||
assert_eq!(&path["val"], "value");
|
||||
|
||||
let mut path = Path::new("/name/value2/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 12);
|
||||
assert_eq!(info, ResourceId(2));
|
||||
assert_eq!(path.get("val").unwrap(), "value2");
|
||||
|
||||
let mut path = Path::new("/file/file.gz");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 13);
|
||||
assert_eq!(info, ResourceId(3));
|
||||
assert_eq!(path.get("file").unwrap(), "file");
|
||||
assert_eq!(path.get("ext").unwrap(), "gz");
|
||||
|
||||
let mut path = Path::new("/vtest/ttt/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 14);
|
||||
assert_eq!(info, ResourceId(4));
|
||||
assert_eq!(path.get("val").unwrap(), "test");
|
||||
assert_eq!(path.get("val2").unwrap(), "ttt");
|
||||
|
||||
let mut path = Path::new("/v/blah-blah/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 15);
|
||||
assert_eq!(info, ResourceId(5));
|
||||
assert_eq!(path.get("tail").unwrap(), "blah-blah/index.html");
|
||||
|
||||
let mut path = Path::new("/test2/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 16);
|
||||
assert_eq!(info, ResourceId(6));
|
||||
assert_eq!(path.get("test").unwrap(), "index");
|
||||
|
||||
let mut path = Path::new("/bbb/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 17);
|
||||
assert_eq!(info, ResourceId(7));
|
||||
assert_eq!(path.get("test").unwrap(), "bbb");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recognizer_2() {
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/index.json", 10);
|
||||
router.path("/{source}.json", 11);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/index.json");
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
|
||||
let mut path = Path::new("/test.json");
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recognizer_with_prefix() {
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/name", 10).0.set_id(0);
|
||||
router.path("/name/{val}", 11).0.set_id(1);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name");
|
||||
path.skip(5);
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/test/name");
|
||||
path.skip(5);
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
|
||||
let mut path = Path::new("/test/name/value");
|
||||
path.skip(5);
|
||||
let (h, id) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
assert_eq!(id, ResourceId(1));
|
||||
assert_eq!(path.get("val").unwrap(), "value");
|
||||
assert_eq!(&path["val"], "value");
|
||||
|
||||
// same patterns
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/name", 10);
|
||||
router.path("/name/{val}", 11);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name");
|
||||
path.skip(6);
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/test2/name");
|
||||
path.skip(6);
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
|
||||
let mut path = Path::new("/test2/name-test");
|
||||
path.skip(6);
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/test2/name/ttt");
|
||||
path.skip(6);
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
assert_eq!(&path["val"], "ttt");
|
||||
}
|
||||
}
|
288
actix-router/src/url.rs
Normal file
288
actix-router/src/url.rs
Normal file
@ -0,0 +1,288 @@
|
||||
use crate::ResourcePath;
|
||||
|
||||
#[allow(dead_code)]
|
||||
const GEN_DELIMS: &[u8] = b":/?#[]@";
|
||||
#[allow(dead_code)]
|
||||
const SUB_DELIMS_WITHOUT_QS: &[u8] = b"!$'()*,";
|
||||
#[allow(dead_code)]
|
||||
const SUB_DELIMS: &[u8] = b"!$'()*,+?=;";
|
||||
#[allow(dead_code)]
|
||||
const RESERVED: &[u8] = b":/?#[]@!$'()*,+?=;";
|
||||
#[allow(dead_code)]
|
||||
const UNRESERVED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||
1234567890
|
||||
-._~";
|
||||
const ALLOWED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||
1234567890
|
||||
-._~
|
||||
!$'()*,";
|
||||
const QS: &[u8] = b"+&=;b";
|
||||
|
||||
#[inline]
|
||||
fn bit_at(array: &[u8], ch: u8) -> bool {
|
||||
array[(ch >> 3) as usize] & (1 << (ch & 7)) != 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn set_bit(array: &mut [u8], ch: u8) {
|
||||
array[(ch >> 3) as usize] |= 1 << (ch & 7)
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
static DEFAULT_QUOTER: Quoter = Quoter::new(b"@:", b"%/+");
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct Url {
|
||||
uri: http::Uri,
|
||||
path: Option<String>,
|
||||
}
|
||||
|
||||
impl Url {
|
||||
pub fn new(uri: http::Uri) -> Url {
|
||||
let path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
|
||||
|
||||
Url { uri, path }
|
||||
}
|
||||
|
||||
pub fn with_quoter(uri: http::Uri, quoter: &Quoter) -> Url {
|
||||
Url {
|
||||
path: quoter.requote(uri.path().as_bytes()),
|
||||
uri,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn uri(&self) -> &http::Uri {
|
||||
&self.uri
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &str {
|
||||
if let Some(ref s) = self.path {
|
||||
s
|
||||
} else {
|
||||
self.uri.path()
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn update(&mut self, uri: &http::Uri) {
|
||||
self.uri = uri.clone();
|
||||
self.path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn update_with_quoter(&mut self, uri: &http::Uri, quoter: &Quoter) {
|
||||
self.uri = uri.clone();
|
||||
self.path = quoter.requote(uri.path().as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
impl ResourcePath for Url {
|
||||
#[inline]
|
||||
fn path(&self) -> &str {
|
||||
self.path()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Quoter {
|
||||
safe_table: [u8; 16],
|
||||
protected_table: [u8; 16],
|
||||
}
|
||||
|
||||
impl Quoter {
|
||||
pub fn new(safe: &[u8], protected: &[u8]) -> Quoter {
|
||||
let mut q = Quoter {
|
||||
safe_table: [0; 16],
|
||||
protected_table: [0; 16],
|
||||
};
|
||||
|
||||
// prepare safe table
|
||||
for i in 0..128 {
|
||||
if ALLOWED.contains(&i) {
|
||||
set_bit(&mut q.safe_table, i);
|
||||
}
|
||||
if QS.contains(&i) {
|
||||
set_bit(&mut q.safe_table, i);
|
||||
}
|
||||
}
|
||||
|
||||
for ch in safe {
|
||||
set_bit(&mut q.safe_table, *ch)
|
||||
}
|
||||
|
||||
// prepare protected table
|
||||
for ch in protected {
|
||||
set_bit(&mut q.safe_table, *ch);
|
||||
set_bit(&mut q.protected_table, *ch);
|
||||
}
|
||||
|
||||
q
|
||||
}
|
||||
|
||||
pub fn requote(&self, val: &[u8]) -> Option<String> {
|
||||
let mut has_pct = 0;
|
||||
let mut pct = [b'%', 0, 0];
|
||||
let mut idx = 0;
|
||||
let mut cloned: Option<Vec<u8>> = None;
|
||||
|
||||
let len = val.len();
|
||||
while idx < len {
|
||||
let ch = val[idx];
|
||||
|
||||
if has_pct != 0 {
|
||||
pct[has_pct] = val[idx];
|
||||
has_pct += 1;
|
||||
if has_pct == 3 {
|
||||
has_pct = 0;
|
||||
let buf = cloned.as_mut().unwrap();
|
||||
|
||||
if let Some(ch) = restore_ch(pct[1], pct[2]) {
|
||||
if ch < 128 {
|
||||
if bit_at(&self.protected_table, ch) {
|
||||
buf.extend_from_slice(&pct);
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if bit_at(&self.safe_table, ch) {
|
||||
buf.push(ch);
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
buf.push(ch);
|
||||
} else {
|
||||
buf.extend_from_slice(&pct[..]);
|
||||
}
|
||||
}
|
||||
} else if ch == b'%' {
|
||||
has_pct = 1;
|
||||
if cloned.is_none() {
|
||||
let mut c = Vec::with_capacity(len);
|
||||
c.extend_from_slice(&val[..idx]);
|
||||
cloned = Some(c);
|
||||
}
|
||||
} else if let Some(ref mut cloned) = cloned {
|
||||
cloned.push(ch)
|
||||
}
|
||||
idx += 1;
|
||||
}
|
||||
|
||||
cloned.map(|data| String::from_utf8_lossy(&data).into_owned())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn from_hex(v: u8) -> Option<u8> {
|
||||
if (b'0'..=b'9').contains(&v) {
|
||||
Some(v - 0x30) // ord('0') == 0x30
|
||||
} else if (b'A'..=b'F').contains(&v) {
|
||||
Some(v - 0x41 + 10) // ord('A') == 0x41
|
||||
} else if (b'a'..=b'f').contains(&v) {
|
||||
Some(v - 0x61 + 10) // ord('a') == 0x61
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn restore_ch(d1: u8, d2: u8) -> Option<u8> {
|
||||
from_hex(d1).and_then(|d1| from_hex(d2).map(move |d2| d1 << 4 | d2))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use http::Uri;
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use super::*;
|
||||
use crate::{Path, ResourceDef};
|
||||
|
||||
const PROTECTED: &[u8] = b"%/+";
|
||||
|
||||
fn match_url(pattern: &'static str, url: impl AsRef<str>) -> Path<Url> {
|
||||
let re = ResourceDef::new(pattern);
|
||||
let uri = Uri::try_from(url.as_ref()).unwrap();
|
||||
let mut path = Path::new(Url::new(uri));
|
||||
assert!(re.capture_match_info(&mut path));
|
||||
path
|
||||
}
|
||||
|
||||
fn percent_encode(data: &[u8]) -> String {
|
||||
data.iter().map(|c| format!("%{:02X}", c)).collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_url() {
|
||||
let re = "/user/{id}/test";
|
||||
|
||||
let path = match_url(re, "/user/2345/test");
|
||||
assert_eq!(path.get("id").unwrap(), "2345");
|
||||
|
||||
// "%25" should never be decoded into '%' to guarantee the output is a valid
|
||||
// percent-encoded format
|
||||
let path = match_url(re, "/user/qwe%25/test");
|
||||
assert_eq!(path.get("id").unwrap(), "qwe%25");
|
||||
|
||||
let path = match_url(re, "/user/qwe%25rty/test");
|
||||
assert_eq!(path.get("id").unwrap(), "qwe%25rty");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_protected_chars() {
|
||||
let encoded = percent_encode(PROTECTED);
|
||||
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
|
||||
assert_eq!(path.get("id").unwrap(), &encoded);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_protecteed_ascii() {
|
||||
let nonprotected_ascii = ('\u{0}'..='\u{7F}')
|
||||
.filter(|&c| c.is_ascii() && !PROTECTED.contains(&(c as u8)))
|
||||
.collect::<String>();
|
||||
let encoded = percent_encode(nonprotected_ascii.as_bytes());
|
||||
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
|
||||
assert_eq!(path.get("id").unwrap(), &nonprotected_ascii);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_utf8_multibyte() {
|
||||
let test = ('\u{FF00}'..='\u{FFFF}').collect::<String>();
|
||||
let encoded = percent_encode(test.as_bytes());
|
||||
let path = match_url("/a/{id}/b", format!("/a/{}/b", &encoded));
|
||||
assert_eq!(path.get("id").unwrap(), &test);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_utf8() {
|
||||
let invalid_utf8 = percent_encode((0x80..=0xff).collect::<Vec<_>>().as_slice());
|
||||
let uri = Uri::try_from(format!("/{}", invalid_utf8)).unwrap();
|
||||
let path = Path::new(Url::new(uri));
|
||||
|
||||
// We should always get a valid utf8 string
|
||||
assert!(String::from_utf8(path.path().as_bytes().to_owned()).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_hex() {
|
||||
let hex = b"0123456789abcdefABCDEF";
|
||||
|
||||
for i in 0..256 {
|
||||
let c = i as u8;
|
||||
if hex.contains(&c) {
|
||||
assert!(from_hex(c).is_some())
|
||||
} else {
|
||||
assert!(from_hex(c).is_none())
|
||||
}
|
||||
}
|
||||
|
||||
let expected = [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15,
|
||||
];
|
||||
for i in 0..hex.len() {
|
||||
assert_eq!(from_hex(hex[i]).unwrap(), expected[i]);
|
||||
}
|
||||
}
|
||||
}
|
@ -3,6 +3,21 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.1.0-beta.5 - 2021-10-20
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 0.1.0-beta.4 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 0.1.0-beta.3 - 2021-06-20
|
||||
* No significant changes from `0.1.0-beta.2`.
|
||||
|
||||
|
||||
## 0.1.0-beta.2 - 2021-04-17
|
||||
* No significant changes from `0.1.0-beta.1`.
|
||||
|
||||
|
@ -1,13 +1,22 @@
|
||||
[package]
|
||||
name = "actix-test"
|
||||
version = "0.1.0-beta.2"
|
||||
version = "0.1.0-beta.5"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
edition = "2018"
|
||||
description = "Integration testing tools for Actix Web applications"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
@ -20,13 +29,13 @@ openssl = ["tls-openssl", "actix-http/openssl"]
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = [] }
|
||||
actix-http = "3.0.0-beta.11"
|
||||
actix-http-test = "3.0.0-beta.5"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false, features = ["cookies"] }
|
||||
actix-web = { version = "4.0.0-beta.10", default-features = false, features = ["cookies"] }
|
||||
actix-rt = "2.1"
|
||||
awc = { version = "3.0.0-beta.6", default-features = false, features = ["cookies"] }
|
||||
awc = { version = "3.0.0-beta.9", default-features = false, features = ["cookies"] }
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
futures-util = { version = "0.3.7", default-features = false, features = [] }
|
||||
@ -35,4 +44,4 @@ serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
serde_urlencoded = "0.7"
|
||||
tls-openssl = { package = "openssl", version = "0.10.9", optional = true }
|
||||
tls-rustls = { package = "rustls", version = "0.19.0", optional = true }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0", optional = true }
|
||||
|
@ -64,7 +64,7 @@ pub use actix_web::test::{
|
||||
/// Ok(HttpResponse::Ok())
|
||||
/// }
|
||||
///
|
||||
/// #[actix_rt::test]
|
||||
/// #[actix_web::test]
|
||||
/// async fn test_example() {
|
||||
/// let srv = actix_test::start(||
|
||||
/// App::new().service(my_handler)
|
||||
@ -104,7 +104,7 @@ where
|
||||
/// Ok(HttpResponse::Ok())
|
||||
/// }
|
||||
///
|
||||
/// #[actix_rt::test]
|
||||
/// #[actix_web::test]
|
||||
/// async fn test_example() {
|
||||
/// let srv = actix_test::start_with(actix_test::config().h1(), ||
|
||||
/// App::new().service(my_handler)
|
||||
|
@ -1,6 +1,17 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
|
||||
## 4.0.0-beta.7 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 4.0.0-beta.6 - 2021-06-26
|
||||
* Update `actix` to `0.12`. [#2277]
|
||||
|
||||
[#2277]: https://github.com/actix/actix-web/pull/2277
|
||||
|
||||
|
||||
## 4.0.0-beta.5 - 2021-06-17
|
||||
|
@ -1,13 +1,11 @@
|
||||
[package]
|
||||
name = "actix-web-actors"
|
||||
version = "4.0.0-beta.5"
|
||||
version = "4.0.0-beta.7"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix actors support for Actix Web"
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-web-actors/"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -16,10 +14,10 @@ name = "actix_web_actors"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix = { version = "0.11.0-beta.3", default-features = false }
|
||||
actix = { version = "0.12.0", default-features = false }
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false }
|
||||
actix-http = "3.0.0-beta.11"
|
||||
actix-web = { version = "4.0.0-beta.10", default-features = false }
|
||||
|
||||
bytes = "1"
|
||||
bytestring = "1"
|
||||
@ -29,8 +27,8 @@ tokio = { version = "1", features = ["sync"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.2"
|
||||
actix-test = "0.1.0-beta.5"
|
||||
|
||||
awc = { version = "3.0.0-beta.6", default-features = false }
|
||||
awc = { version = "3.0.0-beta.9", default-features = false }
|
||||
env_logger = "0.8"
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
|
@ -3,16 +3,15 @@
|
||||
> Actix actors support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7)
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-web-actors)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum supported Rust version: 1.46 or later
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
@ -3,6 +3,22 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.5.0-beta.5 - 2021-10-20
|
||||
* Improve error recovery potential when macro input is invalid. [#2410]
|
||||
* Add `#[actix_web::test]` macro for setting up tests with a runtime. [#2409]
|
||||
* Minimum supported Rust version (MSRV) is now 1.52.
|
||||
|
||||
[#2410]: https://github.com/actix/actix-web/pull/2410
|
||||
[#2409]: https://github.com/actix/actix-web/pull/2409
|
||||
|
||||
|
||||
## 0.5.0-beta.4 - 2021-09-09
|
||||
* In routing macros, paths are now validated at compile time. [#2350]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
[#2350]: https://github.com/actix/actix-web/pull/2350
|
||||
|
||||
|
||||
## 0.5.0-beta.3 - 2021-06-17
|
||||
* No notable changes.
|
||||
|
||||
|
@ -1,12 +1,13 @@
|
||||
[package]
|
||||
name = "actix-web-codegen"
|
||||
version = "0.5.0-beta.3"
|
||||
version = "0.5.0-beta.5"
|
||||
description = "Routing and runtime macros for Actix Web"
|
||||
readme = "README.md"
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
documentation = "https://docs.rs/actix-web-codegen"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -17,12 +18,14 @@ proc-macro = true
|
||||
quote = "1"
|
||||
syn = { version = "1", features = ["full", "parsing"] }
|
||||
proc-macro2 = "1"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.2"
|
||||
actix-macros = "0.2.3"
|
||||
actix-test = "0.1.0-beta.5"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = "4.0.0-beta.7"
|
||||
actix-web = "4.0.0-beta.10"
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
trybuild = "1"
|
||||
|
@ -3,19 +3,18 @@
|
||||
> Routing and runtime macros for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.3)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3)
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.5)
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-web-codegen)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum supported Rust version: 1.46 or later.
|
||||
- Minimum Supported Rust Version (MSRV): 1.52
|
||||
|
||||
## Compile Testing
|
||||
|
||||
|
@ -59,6 +59,7 @@
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
|
||||
mod route;
|
||||
|
||||
@ -157,24 +158,41 @@ method_macro! {
|
||||
}
|
||||
|
||||
/// Marks async main function as the actix system entry-point.
|
||||
///
|
||||
/// # Actix Web Re-export
|
||||
/// This macro can be applied with `#[actix_web::main]` when used in Actix Web applications.
|
||||
///
|
||||
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// #[actix_web_codegen::main]
|
||||
/// #[actix_web::main]
|
||||
/// async fn main() {
|
||||
/// async { println!("Hello world"); }.await
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn main(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||
use quote::quote;
|
||||
let input = syn::parse_macro_input!(item as syn::ItemFn);
|
||||
(quote! {
|
||||
#[actix_web::rt::main(system = "::actix_web::rt::System")]
|
||||
#input
|
||||
let mut output: TokenStream = (quote! {
|
||||
#[::actix_web::rt::main(system = "::actix_web::rt::System")]
|
||||
})
|
||||
.into()
|
||||
.into();
|
||||
|
||||
output.extend(item);
|
||||
output
|
||||
}
|
||||
|
||||
/// Marks async test functions to use the actix system entry-point.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// #[actix_web::test]
|
||||
/// async fn test() {
|
||||
/// assert_eq!(async { "Hello world" }.await, "Hello world");
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn test(_: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let mut output: TokenStream = (quote! {
|
||||
#[::actix_web::rt::test(system = "::actix_web::rt::System")]
|
||||
})
|
||||
.into();
|
||||
|
||||
output.extend(item);
|
||||
output
|
||||
}
|
||||
|
@ -3,10 +3,11 @@ extern crate proc_macro;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use actix_router::ResourceDef;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||
use quote::{format_ident, quote, ToTokens, TokenStreamExt};
|
||||
use syn::{parse_macro_input, AttributeArgs, Ident, NestedMeta};
|
||||
use syn::{parse_macro_input, AttributeArgs, Ident, LitStr, NestedMeta};
|
||||
|
||||
enum ResourceType {
|
||||
Async,
|
||||
@ -101,6 +102,7 @@ impl Args {
|
||||
match arg {
|
||||
NestedMeta::Lit(syn::Lit::Str(lit)) => match path {
|
||||
None => {
|
||||
let _ = ResourceDef::new(lit.value());
|
||||
path = Some(lit);
|
||||
}
|
||||
_ => {
|
||||
@ -218,7 +220,7 @@ fn guess_resource_type(typ: &syn::Type) -> ResourceType {
|
||||
impl Route {
|
||||
pub fn new(
|
||||
args: AttributeArgs,
|
||||
input: TokenStream,
|
||||
ast: syn::ItemFn,
|
||||
method: Option<MethodType>,
|
||||
) -> syn::Result<Self> {
|
||||
if args.is_empty() {
|
||||
@ -227,20 +229,16 @@ impl Route {
|
||||
format!(
|
||||
r#"invalid service definition, expected #[{}("<some path>")]"#,
|
||||
method
|
||||
.map(|it| it.as_str())
|
||||
.unwrap_or("route")
|
||||
.map_or("route", |it| it.as_str())
|
||||
.to_ascii_lowercase()
|
||||
),
|
||||
));
|
||||
}
|
||||
let ast: syn::ItemFn = syn::parse(input)?;
|
||||
|
||||
let name = ast.sig.ident.clone();
|
||||
|
||||
// Try and pull out the doc comments so that we can reapply them to the
|
||||
// generated struct.
|
||||
//
|
||||
// Note that multi line doc comments are converted to multiple doc
|
||||
// attributes.
|
||||
// Try and pull out the doc comments so that we can reapply them to the generated struct.
|
||||
// Note that multi line doc comments are converted to multiple doc attributes.
|
||||
let doc_attributes = ast
|
||||
.attrs
|
||||
.iter()
|
||||
@ -298,7 +296,7 @@ impl ToTokens for Route {
|
||||
} = self;
|
||||
let resource_name = resource_name
|
||||
.as_ref()
|
||||
.map_or_else(|| name.to_string(), |n| n.value());
|
||||
.map_or_else(|| name.to_string(), LitStr::value);
|
||||
let method_guards = {
|
||||
let mut others = methods.iter();
|
||||
// unwrapping since length is checked to be at least one
|
||||
@ -348,8 +346,28 @@ pub(crate) fn with_method(
|
||||
input: TokenStream,
|
||||
) -> TokenStream {
|
||||
let args = parse_macro_input!(args as syn::AttributeArgs);
|
||||
match Route::new(args, input, method) {
|
||||
|
||||
let ast = match syn::parse::<syn::ItemFn>(input.clone()) {
|
||||
Ok(ast) => ast,
|
||||
// on parse error, make IDEs happy; see fn docs
|
||||
Err(err) => return input_and_compile_error(input, err),
|
||||
};
|
||||
|
||||
match Route::new(args, ast, method) {
|
||||
Ok(route) => route.into_token_stream().into(),
|
||||
Err(err) => err.to_compile_error().into(),
|
||||
// on macro related error, make IDEs happy; see fn docs
|
||||
Err(err) => input_and_compile_error(input, err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the error to a token stream and appends it to the original input.
|
||||
///
|
||||
/// Returning the original input in addition to the error is good for IDEs which can gracefully
|
||||
/// recover and show more precise errors within the macro body.
|
||||
///
|
||||
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
|
||||
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
|
||||
let compile_err = TokenStream::from(err.to_compile_error());
|
||||
item.extend(compile_err);
|
||||
item
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ async fn test_auto_async() {
|
||||
assert!(response.status().is_success());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[actix_web::test]
|
||||
async fn test_wrap() {
|
||||
let srv = actix_test::start(|| App::new().service(get_wrap));
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#[rustversion::stable(1.46)] // MSRV
|
||||
#[rustversion::stable(1.52)] // MSRV
|
||||
#[test]
|
||||
fn compile_macros() {
|
||||
let t = trybuild::TestCases::new();
|
||||
@ -10,6 +10,9 @@ fn compile_macros() {
|
||||
t.compile_fail("tests/trybuild/route-missing-method-fail.rs");
|
||||
t.compile_fail("tests/trybuild/route-duplicate-method-fail.rs");
|
||||
t.compile_fail("tests/trybuild/route-unexpected-method-fail.rs");
|
||||
t.compile_fail("tests/trybuild/route-malformed-path-fail.rs");
|
||||
|
||||
t.pass("tests/trybuild/docstring-ok.rs");
|
||||
|
||||
t.pass("tests/trybuild/test-runtime.rs");
|
||||
}
|
||||
|
@ -4,8 +4,8 @@ error: HTTP method defined more than once: `GET`
|
||||
3 | #[route("/", method="GET", method="GET")]
|
||||
| ^^^^^
|
||||
|
||||
error[E0425]: cannot find value `index` in this scope
|
||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||
--> $DIR/route-duplicate-method-fail.rs:12:55
|
||||
|
|
||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||
| ^^^^^ not found in this scope
|
||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||
|
@ -0,0 +1,33 @@
|
||||
use actix_web_codegen::get;
|
||||
|
||||
#[get("/{")]
|
||||
async fn zero() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{foo")]
|
||||
async fn one() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{}")]
|
||||
async fn two() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/*")]
|
||||
async fn three() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{tail:\\d+}*")]
|
||||
async fn four() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")]
|
||||
async fn five() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
fn main() {}
|
@ -0,0 +1,42 @@
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:3:1
|
||||
|
|
||||
3 | #[get("/{")]
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: pattern "{" contains malformed dynamic segment
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:8:1
|
||||
|
|
||||
8 | #[get("/{foo")]
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: pattern "{foo" contains malformed dynamic segment
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:13:1
|
||||
|
|
||||
13 | #[get("/{}")]
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: Wrong path pattern: "/{}" regex parse error:
|
||||
((?s-m)^/(?P<>[^/]+))$
|
||||
^
|
||||
error: empty capture group name
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:23:1
|
||||
|
|
||||
23 | #[get("/{tail:\\d+}*")]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: custom regex is not supported for tail match
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:28:1
|
||||
|
|
||||
28 | #[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: Only 16 dynamic segments are allowed, provided: 17
|
@ -6,8 +6,8 @@ error: The #[route(..)] macro requires at least one `method` attribute
|
||||
|
|
||||
= note: this error originates in an attribute macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error[E0425]: cannot find value `index` in this scope
|
||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||
--> $DIR/route-missing-method-fail.rs:12:55
|
||||
|
|
||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||
| ^^^^^ not found in this scope
|
||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||
|
@ -4,8 +4,8 @@ error: Unexpected HTTP method: `UNEXPECTED`
|
||||
3 | #[route("/", method="UNEXPECTED")]
|
||||
| ^^^^^^^^^^^^
|
||||
|
||||
error[E0425]: cannot find value `index` in this scope
|
||||
error[E0277]: the trait bound `fn() -> impl std::future::Future {index}: HttpServiceFactory` is not satisfied
|
||||
--> $DIR/route-unexpected-method-fail.rs:12:55
|
||||
|
|
||||
12 | let srv = actix_test::start(|| App::new().service(index));
|
||||
| ^^^^^ not found in this scope
|
||||
| ^^^^^ the trait `HttpServiceFactory` is not implemented for `fn() -> impl std::future::Future {index}`
|
||||
|
6
actix-web-codegen/tests/trybuild/test-runtime.rs
Normal file
6
actix-web-codegen/tests/trybuild/test-runtime.rs
Normal file
@ -0,0 +1,6 @@
|
||||
#[actix_web::test]
|
||||
async fn my_test() {
|
||||
assert!(async { 1 }.await, 1);
|
||||
}
|
||||
|
||||
fn main() {}
|
@ -3,6 +3,26 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.9 - 2021-10-20
|
||||
* Updated rustls to v0.20. [#2414]
|
||||
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-09-09
|
||||
### Changed
|
||||
* Send headers within the redirect requests. [#2310]
|
||||
|
||||
[#2310]: https://github.com/actix/actix-web/pull/2310
|
||||
|
||||
|
||||
## 3.0.0-beta.7 - 2021-06-26
|
||||
### Changed
|
||||
* Change compression algorithm features flags. [#2250]
|
||||
|
||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||
|
||||
|
||||
## 3.0.0-beta.6 - 2021-06-17
|
||||
* No significant changes since 3.0.0-beta.5.
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user