mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-08 11:56:32 +02:00
Compare commits
52 Commits
files-v0.6
...
web-v4.0.0
Author | SHA1 | Date | |
---|---|---|---|
ba88d3b4bf | |||
8dd30611fa | |||
1383c7d701 | |||
d8a0f46f26 | |||
53ec66caf4 | |||
93112644d3 | |||
ddc8c16cb3 | |||
373b3f91df | |||
7d01ece355 | |||
c50eef6166 | |||
dade818eba | |||
ae35e69382 | |||
5128b1bdfc | |||
168b2f227d | |||
4bb32fb19b | |||
f9da6e48e0 | |||
ff07816b65 | |||
5f412c67db | |||
a0c0bff944 | |||
384164cc14 | |||
e965d8298f | |||
f6e69919ed | |||
293c52c3ef | |||
5a14ffeef2 | |||
7ae132cb68 | |||
d8deed0475 | |||
2504c2ecb0 | |||
604be5495f | |||
262c6bc828 | |||
5eba95b731 | |||
09afd033fc | |||
539697292a | |||
5a480d1d78 | |||
9a26393375 | |||
2eacb735a4 | |||
767e4efe22 | |||
e559a197cc | |||
93aa86e30b | |||
2d8d2f5ab0 | |||
083ee05d50 | |||
ed0516d724 | |||
7535a1ade8 | |||
8846808804 | |||
3b6333e65f | |||
b1148fd735 | |||
12f7720309 | |||
2d8530feb3 | |||
7faeffc5ab | |||
f81d4bdae7 | |||
6893773280 | |||
73a655544e | |||
baa5a663c4 |
@ -1,8 +1,9 @@
|
||||
[alias]
|
||||
chk = "check --workspace --all-features --tests --examples --bins"
|
||||
lint = "clippy --workspace --tests --examples"
|
||||
lint = "clippy --workspace --all-features --tests --examples --bins"
|
||||
ci-min = "hack check --workspace --no-default-features"
|
||||
ci-min-test = "hack check --workspace --no-default-features --tests --examples"
|
||||
ci-default = "hack check --workspace"
|
||||
ci-full = "check --workspace --bins --examples --tests"
|
||||
ci-test = "test --workspace --all-features --no-fail-fast"
|
||||
ci-default = "check --workspace --bins --tests --examples"
|
||||
ci-full = "check --workspace --all-features --bins --tests --examples"
|
||||
ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture"
|
||||
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
||||
|
13
.github/ISSUE_TEMPLATE/config.yml
vendored
13
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,15 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: GitHub Discussions
|
||||
url: https://github.com/actix/actix-web/discussions
|
||||
about: Actix Web Q&A
|
||||
- name: Gitter chat (actix-web)
|
||||
url: https://gitter.im/actix/actix-web
|
||||
about: Actix Web Q&A
|
||||
- name: Gitter chat (actix)
|
||||
url: https://gitter.im/actix/actix
|
||||
about: Actix (actor framework) Q&A
|
||||
- name: Actix Discord
|
||||
url: https://discord.gg/NWpN5mmg3x
|
||||
about: Actix developer discussion and community chat
|
||||
|
||||
- name: GitHub Discussions
|
||||
url: https://github.com/actix/actix-web/discussions
|
||||
about: Actix Web Q&A
|
||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -8,7 +8,7 @@ PR_TYPE
|
||||
|
||||
|
||||
## PR Checklist
|
||||
<!-- Check your PR fulfills the following items. ->>
|
||||
<!-- Check your PR fulfills the following items. -->
|
||||
<!-- For draft PRs check the boxes as you complete them. -->
|
||||
|
||||
- [ ] Tests for the changes have been added / updated.
|
||||
|
79
.github/workflows/ci.yml
vendored
79
.github/workflows/ci.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
|
||||
version:
|
||||
- 1.46.0 # MSRV
|
||||
- 1.51.0 # MSRV
|
||||
- stable
|
||||
- nightly
|
||||
|
||||
@ -24,6 +24,8 @@ jobs:
|
||||
runs-on: ${{ matrix.target.os }}
|
||||
|
||||
env:
|
||||
CI: 1
|
||||
CARGO_INCREMENTAL: 0
|
||||
VCPKGRS_DYNAMIC: 1
|
||||
|
||||
steps:
|
||||
@ -44,13 +46,6 @@ jobs:
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Install ${{ matrix.version }}
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.version }}-${{ matrix.target.triple }}
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -66,43 +61,26 @@ jobs:
|
||||
|
||||
- name: check minimal
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: hack
|
||||
args: check --workspace --no-default-features
|
||||
with: { command: ci-min }
|
||||
|
||||
- name: check minimal + tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: hack
|
||||
args: check --workspace --no-default-features --tests --examples
|
||||
with: { command: ci-min-test }
|
||||
|
||||
- name: check default
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-default }
|
||||
|
||||
- name: check full
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
args: --workspace --bins --examples --tests
|
||||
with: { command: ci-full }
|
||||
|
||||
- name: tests
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --workspace --all-features --no-fail-fast -- --nocapture
|
||||
--skip=test_h2_content_length
|
||||
--skip=test_reading_deflate_encoding_large_random_rustls
|
||||
|
||||
- name: tests (actix-http)
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with:
|
||||
command: test
|
||||
args: --package=actix-http --no-default-features --features=rustls -- --nocapture
|
||||
|
||||
- name: tests (awc)
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with:
|
||||
command: test
|
||||
args: --package=awc --no-default-features --features=rustls -- --nocapture
|
||||
command: ci-test
|
||||
args: --skip=test_reading_deflate_encoding_large_random_rustls
|
||||
|
||||
- name: Generate coverage file
|
||||
if: >
|
||||
@ -123,5 +101,36 @@ jobs:
|
||||
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.6.2 --no-default-features --features ci-autoclean
|
||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
|
||||
rustdoc:
|
||||
name: rustdoc
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: doc tests
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with: { command: ci-doctest }
|
||||
|
2
.github/workflows/clippy-fmt.yml
vendored
2
.github/workflows/clippy-fmt.yml
vendored
@ -36,4 +36,4 @@ jobs:
|
||||
uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --workspace --tests --all-features
|
||||
args: --workspace --all-features --tests
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -16,3 +16,6 @@ guide/build/
|
||||
|
||||
# Configuration directory generated by CLion
|
||||
.idea
|
||||
|
||||
# Configuration directory generated by VSCode
|
||||
.vscode
|
||||
|
45
CHANGES.md
45
CHANGES.md
@ -3,12 +3,55 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 4.0.0-beta.9 - 2021-09-09
|
||||
### Added
|
||||
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
|
||||
|
||||
### Changed
|
||||
* Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344]
|
||||
* Move `BaseHttpResponse` to `dev::Response`. [#2379]
|
||||
* Enable `TestRequest::param` to accept more than just static strings. [#2172]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
### Fixed
|
||||
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||
* Re-export correct type at `web::HttpResponse`. [#2379]
|
||||
|
||||
[#2172]: https://github.com/actix/actix-web/pull/2172
|
||||
[#2325]: https://github.com/actix/actix-web/pull/2325
|
||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||
[#2379]: https://github.com/actix/actix-web/pull/2379
|
||||
|
||||
|
||||
## 4.0.0-beta.8 - 2021-06-26
|
||||
### Added
|
||||
* Add `ServiceRequest::parts_mut`. [#2177]
|
||||
* Add extractors for `Uri` and `Method`. [#2263]
|
||||
* Add extractors for `ConnectionInfo` and `PeerAddr`. [#2263]
|
||||
* Add `Route::service` for using hand-written services as handlers. [#2262]
|
||||
|
||||
### Changed
|
||||
* Change compression algorithm features flags. [#2250]
|
||||
* Deprecate `App::data` and `App::data_factory`. [#2271]
|
||||
* Smarter extraction of `ConnectionInfo` parts. [#2282]
|
||||
|
||||
### Fixed
|
||||
* Scope and Resource middleware can access data items set on their own layer. [#2288]
|
||||
|
||||
[#2177]: https://github.com/actix/actix-web/pull/2177
|
||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||
[#2271]: https://github.com/actix/actix-web/pull/2271
|
||||
[#2262]: https://github.com/actix/actix-web/pull/2262
|
||||
[#2263]: https://github.com/actix/actix-web/pull/2263
|
||||
[#2282]: https://github.com/actix/actix-web/pull/2282
|
||||
[#2288]: https://github.com/actix/actix-web/pull/2288
|
||||
|
||||
|
||||
## 4.0.0-beta.7 - 2021-06-17
|
||||
### Added
|
||||
* `HttpServer::worker_max_blocking_threads` for setting block thread pool. [#2200]
|
||||
|
||||
### Changed
|
||||
|
||||
* Adjusted default JSON payload limit to 2MB (from 32kb) and included size and limits in the `JsonPayloadError::Overflow` error variant. [#2162]
|
||||
[#2162]: (https://github.com/actix/actix-web/pull/2162)
|
||||
* `ServiceResponse::error_response` now uses body type of `Body`. [#2201]
|
||||
|
47
Cargo.toml
47
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web"
|
||||
version = "4.0.0-beta.7"
|
||||
version = "4.0.0-beta.9"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
@ -17,13 +17,14 @@ edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress", "cookies", "secure-cookies"]
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
|
||||
|
||||
[lib]
|
||||
name = "actix_web"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
".",
|
||||
"awc",
|
||||
@ -34,15 +35,20 @@ members = [
|
||||
"actix-web-codegen",
|
||||
"actix-http-test",
|
||||
"actix-test",
|
||||
"actix-router",
|
||||
]
|
||||
# enable when MSRV is 1.51+
|
||||
# resolver = "2"
|
||||
|
||||
[features]
|
||||
default = ["compress", "cookies"]
|
||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
# content-encoding support
|
||||
compress = ["actix-http/compress"]
|
||||
# Brotli algorithm content-encoding support
|
||||
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
||||
# Gzip and deflate algorithms content-encoding support
|
||||
compress-gzip = ["actix-http/compress-gzip", "__compress"]
|
||||
# Zstd algorithm content-encoding support
|
||||
compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
||||
|
||||
# support for cookies
|
||||
cookies = ["cookie"]
|
||||
@ -56,21 +62,26 @@ openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
||||
# rustls
|
||||
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-macros = "0.2.1"
|
||||
actix-router = "0.2.7"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true }
|
||||
|
||||
actix-web-codegen = "0.5.0-beta.2"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-web-codegen = "0.5.0-beta.4"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
|
||||
ahash = "0.7"
|
||||
bytes = "1"
|
||||
cfg-if = "1"
|
||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
||||
derive_more = "0.99.5"
|
||||
either = "1.5.3"
|
||||
@ -88,26 +99,29 @@ regex = "1.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
smallvec = "1.6"
|
||||
smallvec = "1.6.1"
|
||||
socket2 = "0.4.0"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
url = "2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-test = { version = "0.1.0-beta.2", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.6", features = ["openssl"] }
|
||||
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.8", features = ["openssl"] }
|
||||
|
||||
brotli2 = "0.3.2"
|
||||
criterion = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
env_logger = "0.8"
|
||||
flate2 = "1.0.13"
|
||||
zstd = "0.7"
|
||||
rand = "0.8"
|
||||
rcgen = "0.8"
|
||||
serde_derive = "1.0"
|
||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||
tls-rustls = { package = "rustls", version = "0.19.0" }
|
||||
|
||||
[profile.dev]
|
||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||
debug = 0
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
opt-level = 3
|
||||
@ -118,6 +132,7 @@ actix-files = { path = "actix-files" }
|
||||
actix-http = { path = "actix-http" }
|
||||
actix-http-test = { path = "actix-http-test" }
|
||||
actix-multipart = { path = "actix-multipart" }
|
||||
actix-router = { path = "actix-router" }
|
||||
actix-test = { path = "actix-test" }
|
||||
actix-web = { path = "." }
|
||||
actix-web-actors = { path = "actix-web-actors" }
|
||||
@ -126,15 +141,15 @@ awc = { path = "awc" }
|
||||
|
||||
[[test]]
|
||||
name = "test_server"
|
||||
required-features = ["compress", "cookies"]
|
||||
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
[[example]]
|
||||
name = "basic"
|
||||
required-features = ["compress"]
|
||||
required-features = ["compress-gzip"]
|
||||
|
||||
[[example]]
|
||||
name = "uds"
|
||||
required-features = ["compress"]
|
||||
required-features = ["compress-gzip"]
|
||||
|
||||
[[example]]
|
||||
name = "on_connect"
|
||||
|
19
MIGRATION.md
19
MIGRATION.md
@ -3,13 +3,26 @@
|
||||
* The default `NormalizePath` behavior now strips trailing slashes by default. This was
|
||||
previously documented to be the case in v3 but the behavior now matches. The effect is that
|
||||
routes defined with trailing slashes will become inaccessible when
|
||||
using `NormalizePath::default()`.
|
||||
using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
|
||||
It is advised that the `new` method be used instead.
|
||||
|
||||
Before: `#[get("/test/")`
|
||||
After: `#[get("/test")`
|
||||
Before: `#[get("/test/")]`
|
||||
After: `#[get("/test")]`
|
||||
|
||||
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
|
||||
|
||||
* Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
|
||||
By default all compression algorithms are enabled.
|
||||
To select algorithm you want to include with `middleware::Compress` use following flags:
|
||||
- `compress-brotli`
|
||||
- `compress-gzip`
|
||||
- `compress-zstd`
|
||||
If you have set in your `Cargo.toml` dedicated `actix-web` features and you still want
|
||||
to have compression enabled. Please change features selection like bellow:
|
||||
|
||||
Before: `"compress"`
|
||||
After: `"compress-brotli", "compress-gzip", "compress-zstd"`
|
||||
|
||||
|
||||
## 3.0.0
|
||||
|
||||
|
10
README.md
10
README.md
@ -6,10 +6,10 @@
|
||||
<p>
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.9)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.7)
|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.9)
|
||||
<br />
|
||||
[](https://github.com/actix/actix-web/actions)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||
@ -25,14 +25,14 @@
|
||||
* Streaming and pipelining
|
||||
* Keep-alive and slow requests handling
|
||||
* Client/server [WebSockets](https://actix.rs/docs/websockets/) support
|
||||
* Transparent content compression/decompression (br, gzip, deflate)
|
||||
* Transparent content compression/decompression (br, gzip, deflate, zstd)
|
||||
* Powerful [request routing](https://actix.rs/docs/url-dispatch/)
|
||||
* Multipart streams
|
||||
* Static assets
|
||||
* SSL support using OpenSSL or Rustls
|
||||
* Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
||||
* Includes an async [HTTP client](https://docs.rs/awc/)
|
||||
* Runs on stable Rust 1.46+
|
||||
* Runs on stable Rust 1.51+
|
||||
|
||||
## Documentation
|
||||
|
||||
|
@ -3,6 +3,18 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.6.0-beta.7 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 0.6.0-beta.6 - 2021-06-26
|
||||
* Added `Files::path_filter()`. [#2274]
|
||||
* `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]
|
||||
|
||||
[#2274]: https://github.com/actix/actix-web/pull/2274
|
||||
[#2228]: https://github.com/actix/actix-web/pull/2228
|
||||
|
||||
|
||||
## 0.6.0-beta.5 - 2021-06-17
|
||||
* `NamedFile` now implements `ServiceFactory` and `HttpServiceFactory` making it much more useful in routing. For example, it can be used directly as a default service. [#2135]
|
||||
* For symbolic links, `Content-Disposition` header no longer shows the filename of the original file. [#2156]
|
||||
@ -16,12 +28,11 @@
|
||||
|
||||
|
||||
## 0.6.0-beta.4 - 2021-04-02
|
||||
* No notable changes.
|
||||
|
||||
* Add support for `.guard` in `Files` to selectively filter `Files` services. [#2046]
|
||||
|
||||
[#2046]: https://github.com/actix/actix-web/pull/2046
|
||||
|
||||
|
||||
## 0.6.0-beta.3 - 2021-03-09
|
||||
* No notable changes.
|
||||
|
||||
|
@ -1,13 +1,11 @@
|
||||
[package]
|
||||
name = "actix-files"
|
||||
version = "0.6.0-beta.5"
|
||||
version = "0.6.0-beta.7"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Static file serving for Actix Web"
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-files/"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
categories = ["asynchronous", "web-programming::http-server"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
@ -17,8 +15,8 @@ name = "actix_files"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false }
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
@ -35,5 +33,5 @@ percent-encoding = "2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-web = "4.0.0-beta.7"
|
||||
actix-test = "0.1.0-beta.2"
|
||||
actix-web = "4.0.0-beta.9"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
|
@ -3,17 +3,16 @@
|
||||
> Static file serving for Actix Web
|
||||
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.7)
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-files/)
|
||||
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum supported Rust version: 1.46 or later
|
||||
- Minimum supported Rust version: 1.51 or later
|
||||
|
@ -21,6 +21,7 @@ impl ResponseError for FilesError {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Display, Debug, PartialEq)]
|
||||
pub enum UriSegmentError {
|
||||
/// The segment started with the wrapped invalid character.
|
||||
|
@ -1,9 +1,17 @@
|
||||
use std::{cell::RefCell, fmt, io, path::PathBuf, rc::Rc};
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
fmt, io,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
|
||||
use actix_utils::future::ok;
|
||||
use actix_web::{
|
||||
dev::{AppService, HttpServiceFactory, ResourceDef, ServiceRequest, ServiceResponse},
|
||||
dev::{
|
||||
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest,
|
||||
ServiceResponse,
|
||||
},
|
||||
error::Error,
|
||||
guard::Guard,
|
||||
http::header::DispositionType,
|
||||
@ -13,7 +21,7 @@ use futures_core::future::LocalBoxFuture;
|
||||
|
||||
use crate::{
|
||||
directory_listing, named, Directory, DirectoryRenderer, FilesService, HttpNewService,
|
||||
MimeOverride,
|
||||
MimeOverride, PathFilter,
|
||||
};
|
||||
|
||||
/// Static files handling service.
|
||||
@ -36,6 +44,7 @@ pub struct Files {
|
||||
default: Rc<RefCell<Option<Rc<HttpNewService>>>>,
|
||||
renderer: Rc<DirectoryRenderer>,
|
||||
mime_override: Option<Rc<MimeOverride>>,
|
||||
path_filter: Option<Rc<PathFilter>>,
|
||||
file_flags: named::Flags,
|
||||
use_guards: Option<Rc<dyn Guard>>,
|
||||
guards: Vec<Rc<dyn Guard>>,
|
||||
@ -60,6 +69,7 @@ impl Clone for Files {
|
||||
file_flags: self.file_flags,
|
||||
path: self.path.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
path_filter: self.path_filter.clone(),
|
||||
use_guards: self.use_guards.clone(),
|
||||
guards: self.guards.clone(),
|
||||
hidden_files: self.hidden_files,
|
||||
@ -96,7 +106,7 @@ impl Files {
|
||||
};
|
||||
|
||||
Files {
|
||||
path: mount_path.to_owned(),
|
||||
path: mount_path.trim_end_matches('/').to_owned(),
|
||||
directory: dir,
|
||||
index: None,
|
||||
show_index: false,
|
||||
@ -104,6 +114,7 @@ impl Files {
|
||||
default: Rc::new(RefCell::new(None)),
|
||||
renderer: Rc::new(directory_listing),
|
||||
mime_override: None,
|
||||
path_filter: None,
|
||||
file_flags: named::Flags::default(),
|
||||
use_guards: None,
|
||||
guards: Vec::new(),
|
||||
@ -114,6 +125,9 @@ impl Files {
|
||||
/// Show files listing for directories.
|
||||
///
|
||||
/// By default show files listing is disabled.
|
||||
///
|
||||
/// When used with [`Files::index_file()`], files listing is shown as a fallback
|
||||
/// when the index file is not found.
|
||||
pub fn show_files_listing(mut self) -> Self {
|
||||
self.show_index = true;
|
||||
self
|
||||
@ -146,10 +160,45 @@ impl Files {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets path filtering closure.
|
||||
///
|
||||
/// The path provided to the closure is relative to `serve_from` path.
|
||||
/// You can safely join this path with the `serve_from` path to get the real path.
|
||||
/// However, the real path may not exist since the filter is called before checking path existence.
|
||||
///
|
||||
/// When a path doesn't pass the filter, [`Files::default_handler`] is called if set, otherwise,
|
||||
/// `404 Not Found` is returned.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use std::path::Path;
|
||||
/// use actix_files::Files;
|
||||
///
|
||||
/// // prevent searching subdirectories and following symlinks
|
||||
/// let files_service = Files::new("/", "./static").path_filter(|path, _| {
|
||||
/// path.components().count() == 1
|
||||
/// && Path::new("./static")
|
||||
/// .join(path)
|
||||
/// .symlink_metadata()
|
||||
/// .map(|m| !m.file_type().is_symlink())
|
||||
/// .unwrap_or(false)
|
||||
/// });
|
||||
/// ```
|
||||
pub fn path_filter<F>(mut self, f: F) -> Self
|
||||
where
|
||||
F: Fn(&Path, &RequestHead) -> bool + 'static,
|
||||
{
|
||||
self.path_filter = Some(Rc::new(f));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set index file
|
||||
///
|
||||
/// Shows specific index file for directory "/" instead of
|
||||
/// Shows specific index file for directories instead of
|
||||
/// showing files listing.
|
||||
///
|
||||
/// If the index file is not found, files listing is shown as a fallback if
|
||||
/// [`Files::show_files_listing()`] is set.
|
||||
pub fn index_file<T: Into<String>>(mut self, index: T) -> Self {
|
||||
self.index = Some(index.into());
|
||||
self
|
||||
@ -312,6 +361,7 @@ impl ServiceFactory<ServiceRequest> for Files {
|
||||
default: None,
|
||||
renderer: self.renderer.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
path_filter: self.path_filter.clone(),
|
||||
file_flags: self.file_flags,
|
||||
guards: self.use_guards.clone(),
|
||||
hidden_files: self.hidden_files,
|
||||
|
@ -16,11 +16,12 @@
|
||||
|
||||
use actix_service::boxed::{BoxService, BoxServiceFactory};
|
||||
use actix_web::{
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
dev::{RequestHead, ServiceRequest, ServiceResponse},
|
||||
error::Error,
|
||||
http::header::DispositionType,
|
||||
};
|
||||
use mime_guess::from_ext;
|
||||
use std::path::Path;
|
||||
|
||||
mod chunked;
|
||||
mod directory;
|
||||
@ -56,6 +57,8 @@ pub fn file_extension_to_mime(ext: &str) -> mime::Mime {
|
||||
|
||||
type MimeOverride = dyn Fn(&mime::Name<'_>) -> DispositionType;
|
||||
|
||||
type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{
|
||||
@ -872,4 +875,69 @@ mod tests {
|
||||
"inline; filename=\"symlink-test.png\""
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_index_with_show_files_listing() {
|
||||
let service = Files::new(".", ".")
|
||||
.index_file("lib.rs")
|
||||
.show_files_listing()
|
||||
.new_service(())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Serve the index if exists
|
||||
let req = TestRequest::default().uri("/src").to_srv_request();
|
||||
let resp = test::call_service(&service, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
assert_eq!(
|
||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||
"text/x-rust"
|
||||
);
|
||||
|
||||
// Show files listing, otherwise.
|
||||
let req = TestRequest::default().uri("/tests").to_srv_request();
|
||||
let resp = test::call_service(&service, req).await;
|
||||
assert_eq!(
|
||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
let bytes = test::read_body(resp).await;
|
||||
assert!(format!("{:?}", bytes).contains("/tests/test.png"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_path_filter() {
|
||||
// prevent searching subdirectories
|
||||
let st = Files::new("/", ".")
|
||||
.path_filter(|path, _| path.components().count() == 1)
|
||||
.new_service(())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let req = TestRequest::with_uri("/Cargo.toml").to_srv_request();
|
||||
let resp = test::call_service(&st, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
|
||||
let req = TestRequest::with_uri("/src/lib.rs").to_srv_request();
|
||||
let resp = test::call_service(&st, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_default_handler_filter() {
|
||||
let st = Files::new("/", ".")
|
||||
.default_handler(|req: ServiceRequest| {
|
||||
ok(req.into_response(HttpResponse::Ok().body("default content")))
|
||||
})
|
||||
.path_filter(|path, _| path.extension() == Some("png".as_ref()))
|
||||
.new_service(())
|
||||
.await
|
||||
.unwrap();
|
||||
let req = TestRequest::with_uri("/Cargo.toml").to_srv_request();
|
||||
let resp = test::call_service(&st, req).await;
|
||||
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
let bytes = test::read_body(resp).await;
|
||||
assert_eq!(bytes, web::Bytes::from_static(b"default content"));
|
||||
}
|
||||
}
|
||||
|
@ -355,8 +355,8 @@ impl NamedFile {
|
||||
} else if let (Some(ref m), Some(header::IfUnmodifiedSince(ref since))) =
|
||||
(last_modified, req.get_header())
|
||||
{
|
||||
let t1: SystemTime = m.clone().into();
|
||||
let t2: SystemTime = since.clone().into();
|
||||
let t1: SystemTime = (*m).into();
|
||||
let t2: SystemTime = (*since).into();
|
||||
|
||||
match (t1.duration_since(UNIX_EPOCH), t2.duration_since(UNIX_EPOCH)) {
|
||||
(Ok(t1), Ok(t2)) => t1.as_secs() > t2.as_secs(),
|
||||
@ -374,8 +374,8 @@ impl NamedFile {
|
||||
} else if let (Some(ref m), Some(header::IfModifiedSince(ref since))) =
|
||||
(last_modified, req.get_header())
|
||||
{
|
||||
let t1: SystemTime = m.clone().into();
|
||||
let t2: SystemTime = since.clone().into();
|
||||
let t1: SystemTime = (*m).into();
|
||||
let t2: SystemTime = (*since).into();
|
||||
|
||||
match (t1.duration_since(UNIX_EPOCH), t2.duration_since(UNIX_EPOCH)) {
|
||||
(Ok(t1), Ok(t2)) => t1.as_secs() <= t2.as_secs(),
|
||||
|
@ -13,7 +13,7 @@ use futures_core::future::LocalBoxFuture;
|
||||
|
||||
use crate::{
|
||||
named, Directory, DirectoryRenderer, FilesError, HttpService, MimeOverride, NamedFile,
|
||||
PathBufWrap,
|
||||
PathBufWrap, PathFilter,
|
||||
};
|
||||
|
||||
/// Assembled file serving service.
|
||||
@ -25,6 +25,7 @@ pub struct FilesService {
|
||||
pub(crate) default: Option<HttpService>,
|
||||
pub(crate) renderer: Rc<DirectoryRenderer>,
|
||||
pub(crate) mime_override: Option<Rc<MimeOverride>>,
|
||||
pub(crate) path_filter: Option<Rc<PathFilter>>,
|
||||
pub(crate) file_flags: named::Flags,
|
||||
pub(crate) guards: Option<Rc<dyn Guard>>,
|
||||
pub(crate) hidden_files: bool,
|
||||
@ -82,6 +83,18 @@ impl Service<ServiceRequest> for FilesService {
|
||||
Err(e) => return Box::pin(ok(req.error_response(e))),
|
||||
};
|
||||
|
||||
if let Some(filter) = &self.path_filter {
|
||||
if !filter(real_path.as_ref(), req.head()) {
|
||||
if let Some(ref default) = self.default {
|
||||
return Box::pin(default.call(req));
|
||||
} else {
|
||||
return Box::pin(ok(
|
||||
req.into_response(actix_web::HttpResponse::NotFound().finish())
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// full file path
|
||||
let path = self.directory.join(&real_path);
|
||||
if let Err(err) = path.canonicalize() {
|
||||
@ -102,26 +115,20 @@ impl Service<ServiceRequest> for FilesService {
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(ref redir_index) = self.index {
|
||||
let path = path.join(redir_index);
|
||||
|
||||
match NamedFile::open(path) {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition =
|
||||
mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Box::pin(ok(ServiceResponse::new(req, res)))
|
||||
}
|
||||
Err(err) => self.handle_err(err, req),
|
||||
let serve_named_file = |req: ServiceRequest, mut named_file: NamedFile| {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
} else if self.show_index {
|
||||
let dir = Directory::new(self.directory.clone(), path);
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Box::pin(ok(ServiceResponse::new(req, res)))
|
||||
};
|
||||
|
||||
let show_index = |req: ServiceRequest| {
|
||||
let dir = Directory::new(self.directory.clone(), path.clone());
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let x = (self.renderer)(&dir, &req);
|
||||
@ -130,11 +137,19 @@ impl Service<ServiceRequest> for FilesService {
|
||||
Ok(resp) => ok(resp),
|
||||
Err(err) => ok(ServiceResponse::from_err(err, req)),
|
||||
})
|
||||
} else {
|
||||
Box::pin(ok(ServiceResponse::from_err(
|
||||
};
|
||||
|
||||
match self.index {
|
||||
Some(ref index) => match NamedFile::open(path.join(index)) {
|
||||
Ok(named_file) => serve_named_file(req, named_file),
|
||||
Err(_) if self.show_index => show_index(req),
|
||||
Err(err) => self.handle_err(err, req),
|
||||
},
|
||||
None if self.show_index => show_index(req),
|
||||
_ => Box::pin(ok(ServiceResponse::from_err(
|
||||
FilesError::IsDirectory,
|
||||
req.into_parts().0,
|
||||
)))
|
||||
))),
|
||||
}
|
||||
} else {
|
||||
match NamedFile::open(path) {
|
||||
|
@ -3,6 +3,10 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.5 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 3.0.0-beta.4 - 2021-04-02
|
||||
* Added `TestServer::client_headers` method. [#2097]
|
||||
|
||||
|
@ -1,18 +1,18 @@
|
||||
[package]
|
||||
name = "actix-http-test"
|
||||
version = "3.0.0-beta.4"
|
||||
version = "3.0.0-beta.5"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Various helpers for Actix applications to use during testing"
|
||||
readme = "README.md"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-http-test/"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
exclude = [".gitignore", ".cargo/config"]
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
@ -35,7 +35,7 @@ actix-tls = "3.0.0-beta.5"
|
||||
actix-utils = "3.0.0"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
awc = { version = "3.0.0-beta.6", default-features = false }
|
||||
awc = { version = "3.0.0-beta.8", default-features = false }
|
||||
|
||||
base64 = "0.13"
|
||||
bytes = "1"
|
||||
@ -51,5 +51,5 @@ time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
|
@ -3,13 +3,15 @@
|
||||
> Various helpers for Actix applications to use during testing.
|
||||
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.4)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.4)
|
||||
[](https://gitter.im/actix/actix-web?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
<br>
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.5)
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-http-test)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum Supported Rust Version (MSRV): 1.46.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.51.0
|
||||
|
@ -3,6 +3,38 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.10 - 2021-09-09
|
||||
### Changed
|
||||
* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
### Fixed
|
||||
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
|
||||
* Remove `Into<Error>` bound on `Encoder` body types. [#2375]
|
||||
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||
|
||||
[#2364]: https://github.com/actix/actix-web/pull/2364
|
||||
[#2375]: https://github.com/actix/actix-web/pull/2375
|
||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||
[#2377]: https://github.com/actix/actix-web/pull/2377
|
||||
|
||||
|
||||
## 3.0.0-beta.9 - 2021-08-09
|
||||
### Fixed
|
||||
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-06-26
|
||||
### Changed
|
||||
* Change compression algorithm features flags. [#2250]
|
||||
|
||||
### Removed
|
||||
* `downcast` and `downcast_get_type_id` macros. [#2291]
|
||||
|
||||
[#2291]: https://github.com/actix/actix-web/pull/2291
|
||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||
|
||||
|
||||
## 3.0.0-beta.7 - 2021-06-17
|
||||
### Added
|
||||
* Alias `body::Body` as `body::AnyBody`. [#2215]
|
||||
@ -199,6 +231,11 @@
|
||||
[#1878]: https://github.com/actix/actix-web/pull/1878
|
||||
|
||||
|
||||
## 2.2.1 - 2021-08-09
|
||||
### Fixed
|
||||
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
|
||||
|
||||
|
||||
## 2.2.0 - 2020-11-25
|
||||
### Added
|
||||
* HttpResponse builders for 1xx status codes. [#1768]
|
||||
|
@ -1,13 +1,11 @@
|
||||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.0.0-beta.7"
|
||||
version = "3.0.0-beta.10"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "HTTP primitives for the Actix ecosystem"
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-http/"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
@ -16,7 +14,7 @@ edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress"]
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
||||
|
||||
[lib]
|
||||
name = "actix_http"
|
||||
@ -32,11 +30,17 @@ openssl = ["actix-tls/openssl"]
|
||||
rustls = ["actix-tls/rustls"]
|
||||
|
||||
# enable compression support
|
||||
compress = ["flate2", "brotli2", "zstd"]
|
||||
compress-brotli = ["brotli2", "__compress"]
|
||||
compress-gzip = ["flate2", "__compress"]
|
||||
compress-zstd = ["zstd", "__compress"]
|
||||
|
||||
# trust-dns as client dns resolver
|
||||
trust-dns = ["trust-dns-resolver"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-service = "2.0.0"
|
||||
actix-codec = "0.4.0"
|
||||
@ -55,7 +59,7 @@ futures-core = { version = "0.3.7", default-features = false, features = ["alloc
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] }
|
||||
h2 = "0.3.1"
|
||||
http = "0.2.2"
|
||||
httparse = "1.3"
|
||||
httparse = "1.5.1"
|
||||
itoa = "0.4"
|
||||
language-tags = "0.3"
|
||||
local-channel = "0.1"
|
||||
@ -69,7 +73,7 @@ rand = "0.8"
|
||||
regex = "1.3"
|
||||
serde = "1.0"
|
||||
sha-1 = "0.9"
|
||||
smallvec = "1.6"
|
||||
smallvec = "1.6.1"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
tokio = { version = "1.2", features = ["sync"] }
|
||||
|
||||
@ -82,7 +86,7 @@ trust-dns-resolver = { version = "0.20.0", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
async-stream = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
|
@ -3,19 +3,18 @@
|
||||
> HTTP primitives for the Actix ecosystem.
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.10)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.7)
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.10)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-http)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum Supported Rust Version (MSRV): 1.46.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.51.0
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -78,12 +78,12 @@ impl HeaderIndex {
|
||||
// test cases taken from:
|
||||
// https://github.com/seanmonstar/httparse/blob/master/benches/parse.rs
|
||||
|
||||
const REQ_SHORT: &'static [u8] = b"\
|
||||
const REQ_SHORT: &[u8] = b"\
|
||||
GET / HTTP/1.0\r\n\
|
||||
Host: example.com\r\n\
|
||||
Cookie: session=60; user_id=1\r\n\r\n";
|
||||
|
||||
const REQ: &'static [u8] = b"\
|
||||
const REQ: &[u8] = b"\
|
||||
GET /wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg HTTP/1.1\r\n\
|
||||
Host: www.kittyhell.com\r\n\
|
||||
User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\r\n\
|
||||
@ -119,6 +119,8 @@ mod _original {
|
||||
use std::mem::MaybeUninit;
|
||||
|
||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
||||
#![allow(clippy::uninit_assumed_init)]
|
||||
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
|
||||
|
@ -18,7 +18,8 @@ fn bench_write_camel_case(c: &mut Criterion) {
|
||||
group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| {
|
||||
b.iter(|| {
|
||||
let mut buf = black_box([0; 24]);
|
||||
_new::write_camel_case(black_box(bts), &mut buf)
|
||||
let len = black_box(bts.len());
|
||||
_new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -30,9 +31,12 @@ criterion_group!(benches, bench_write_camel_case);
|
||||
criterion_main!(benches);
|
||||
|
||||
mod _new {
|
||||
pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
||||
pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
||||
// first copy entire (potentially wrong) slice to output
|
||||
buffer[..value.len()].copy_from_slice(value);
|
||||
let buffer = unsafe {
|
||||
std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len);
|
||||
std::slice::from_raw_parts_mut(buf, len)
|
||||
};
|
||||
|
||||
let mut iter = value.iter();
|
||||
|
||||
|
@ -7,7 +7,7 @@ use std::{
|
||||
};
|
||||
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures_core::{ready, Stream};
|
||||
use futures_core::Stream;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
@ -74,14 +74,10 @@ impl MessageBody for AnyBody {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
AnyBody::Message(body) => match ready!(body.as_pin_mut().poll_next(cx)) {
|
||||
Some(Err(err)) => {
|
||||
Poll::Ready(Some(Err(Error::new_body().with_cause(err))))
|
||||
}
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
},
|
||||
AnyBody::Message(body) => body
|
||||
.as_pin_mut()
|
||||
.poll_next(cx)
|
||||
.map_err(|err| Error::new_body().with_cause(err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -223,11 +219,9 @@ impl MessageBody for BoxAnyBody {
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
match ready!(self.0.as_mut().poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(Error::new_body().with_cause(err)))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
}
|
||||
self.0
|
||||
.as_mut()
|
||||
.poll_next(cx)
|
||||
.map_err(|err| Error::new_body().with_cause(err))
|
||||
}
|
||||
}
|
||||
|
@ -11,8 +11,6 @@ use bytes::{Bytes, BytesMut};
|
||||
use futures_core::ready;
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
use super::BodySize;
|
||||
|
||||
/// An interface for response bodies.
|
||||
@ -47,7 +45,6 @@ impl MessageBody for () {
|
||||
impl<B> MessageBody for Box<B>
|
||||
where
|
||||
B: MessageBody + Unpin,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = B::Error;
|
||||
|
||||
@ -66,7 +63,6 @@ where
|
||||
impl<B> MessageBody for Pin<Box<B>>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = B::Error;
|
||||
|
||||
|
@ -80,7 +80,7 @@ mod tests {
|
||||
impl Body {
|
||||
pub(crate) fn get_ref(&self) -> &[u8] {
|
||||
match *self {
|
||||
Body::Bytes(ref bin) => &bin,
|
||||
Body::Bytes(ref bin) => bin,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use std::{
|
||||
};
|
||||
|
||||
use bytes::Bytes;
|
||||
use futures_core::{ready, Stream};
|
||||
use futures_core::Stream;
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
@ -77,12 +77,7 @@ where
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
match self.project() {
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
ResponseBodyProj::Body(body) => match ready!(body.poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(err.into()))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
},
|
||||
ResponseBodyProj::Body(body) => body.poll_next(cx).map_err(Into::into),
|
||||
ResponseBodyProj::Other(body) => Pin::new(body).poll_next(cx),
|
||||
}
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ impl Connector<()> {
|
||||
use bytes::{BufMut, BytesMut};
|
||||
|
||||
let mut alpn = BytesMut::with_capacity(20);
|
||||
for proto in protocols.iter() {
|
||||
for proto in &protocols {
|
||||
alpn.put_u8(proto.len() as u8);
|
||||
alpn.put(proto.as_slice());
|
||||
}
|
||||
@ -290,8 +290,7 @@ where
|
||||
let h2 = sock
|
||||
.ssl()
|
||||
.selected_alpn_protocol()
|
||||
.map(|protos| protos.windows(2).any(|w| w == H2))
|
||||
.unwrap_or(false);
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
@ -325,8 +324,7 @@ where
|
||||
.get_ref()
|
||||
.1
|
||||
.get_alpn_protocol()
|
||||
.map(|protos| protos.windows(2).any(|w| w == H2))
|
||||
.unwrap_or(false);
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
|
@ -168,14 +168,13 @@ where
|
||||
|
||||
if let Err(e) = send.send_data(bytes, false) {
|
||||
return Err(e.into());
|
||||
} else {
|
||||
if !b.is_empty() {
|
||||
send.reserve_capacity(b.len());
|
||||
} else {
|
||||
buf = None;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if !b.is_empty() {
|
||||
send.reserve_capacity(b.len());
|
||||
} else {
|
||||
buf = None;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Some(Err(e)) => return Err(e.into()),
|
||||
}
|
||||
|
@ -104,6 +104,8 @@ impl ServiceConfig {
|
||||
}
|
||||
|
||||
/// Returns the local address that this server is bound to.
|
||||
///
|
||||
/// Returns `None` for connections via UDS (Unix Domain Socket).
|
||||
#[inline]
|
||||
pub fn local_addr(&self) -> Option<net::SocketAddr> {
|
||||
self.0.local_addr
|
||||
@ -152,8 +154,8 @@ impl ServiceConfig {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Return keep-alive timer delay is configured.
|
||||
#[inline]
|
||||
pub fn keep_alive_timer(&self) -> Option<Sleep> {
|
||||
self.keep_alive().map(|ka| sleep_until(self.now() + ka))
|
||||
}
|
||||
@ -326,7 +328,7 @@ mod notify_on_drop {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use actix_rt::task::yield_now;
|
||||
use actix_rt::{task::yield_now, time::sleep};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_date_service_update() {
|
||||
@ -350,7 +352,14 @@ mod tests {
|
||||
assert_ne!(buf1, buf2);
|
||||
|
||||
drop(settings);
|
||||
assert!(notify_on_drop::is_dropped());
|
||||
|
||||
// Ensure the task will drop eventually
|
||||
let mut times = 0;
|
||||
while !notify_on_drop::is_dropped() {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
times += 1;
|
||||
assert!(times < 10, "Timeout waiting for task drop");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -365,14 +374,21 @@ mod tests {
|
||||
let clone3 = service.clone();
|
||||
|
||||
drop(clone1);
|
||||
assert_eq!(false, notify_on_drop::is_dropped());
|
||||
assert!(!notify_on_drop::is_dropped());
|
||||
drop(clone2);
|
||||
assert_eq!(false, notify_on_drop::is_dropped());
|
||||
assert!(!notify_on_drop::is_dropped());
|
||||
drop(clone3);
|
||||
assert_eq!(false, notify_on_drop::is_dropped());
|
||||
assert!(!notify_on_drop::is_dropped());
|
||||
|
||||
drop(service);
|
||||
assert!(notify_on_drop::is_dropped());
|
||||
|
||||
// Ensure the task will drop eventually
|
||||
let mut times = 0;
|
||||
while !notify_on_drop::is_dropped() {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
times += 1;
|
||||
assert!(times < 10, "Timeout waiting for task drop");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -8,10 +8,16 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||
use brotli2::write::BrotliDecoder;
|
||||
use bytes::Bytes;
|
||||
use flate2::write::{GzDecoder, ZlibDecoder};
|
||||
use futures_core::{ready, Stream};
|
||||
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
use brotli2::write::BrotliDecoder;
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
use flate2::write::{GzDecoder, ZlibDecoder};
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
use zstd::stream::write::Decoder as ZstdDecoder;
|
||||
|
||||
use crate::{
|
||||
@ -37,15 +43,19 @@ where
|
||||
#[inline]
|
||||
pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> {
|
||||
let decoder = match encoding {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoding::Br => Some(ContentDecoder::Br(Box::new(
|
||||
BrotliDecoder::new(Writer::new()),
|
||||
))),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
|
||||
ZlibDecoder::new(Writer::new()),
|
||||
))),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(
|
||||
GzDecoder::new(Writer::new()),
|
||||
))),
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
|
||||
ZstdDecoder::new(Writer::new()).expect(
|
||||
"Failed to create zstd decoder. This is a bug. \
|
||||
@ -70,7 +80,7 @@ where
|
||||
let encoding = headers
|
||||
.get(&CONTENT_ENCODING)
|
||||
.and_then(|val| val.to_str().ok())
|
||||
.map(ContentEncoding::from)
|
||||
.and_then(|x| x.parse().ok())
|
||||
.unwrap_or(ContentEncoding::Identity);
|
||||
|
||||
Self::new(stream, encoding)
|
||||
@ -148,17 +158,22 @@ where
|
||||
}
|
||||
|
||||
enum ContentDecoder {
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Deflate(Box<ZlibDecoder<Writer>>),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Gzip(Box<GzDecoder<Writer>>),
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
Br(Box<BrotliDecoder<Writer>>),
|
||||
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
|
||||
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
Zstd(Box<ZstdDecoder<'static, Writer>>),
|
||||
}
|
||||
|
||||
impl ContentDecoder {
|
||||
fn feed_eof(&mut self) -> io::Result<Option<Bytes>> {
|
||||
match self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentDecoder::Br(ref mut decoder) => match decoder.flush() {
|
||||
Ok(()) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -172,6 +187,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Gzip(ref mut decoder) => match decoder.try_finish() {
|
||||
Ok(_) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -185,6 +201,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Deflate(ref mut decoder) => match decoder.try_finish() {
|
||||
Ok(_) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -197,6 +214,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentDecoder::Zstd(ref mut decoder) => match decoder.flush() {
|
||||
Ok(_) => {
|
||||
let b = decoder.get_mut().take();
|
||||
@ -213,6 +231,7 @@ impl ContentDecoder {
|
||||
|
||||
fn feed_data(&mut self, data: Bytes) -> io::Result<Option<Bytes>> {
|
||||
match self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentDecoder::Br(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
@ -227,6 +246,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Gzip(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
@ -241,6 +261,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentDecoder::Deflate(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
@ -255,6 +276,7 @@ impl ContentDecoder {
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentDecoder::Zstd(ref mut decoder) => match decoder.write_all(&data) {
|
||||
Ok(_) => {
|
||||
decoder.flush()?;
|
||||
|
@ -9,12 +9,18 @@ use std::{
|
||||
};
|
||||
|
||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||
use brotli2::write::BrotliEncoder;
|
||||
use bytes::Bytes;
|
||||
use derive_more::Display;
|
||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||
use futures_core::ready;
|
||||
use pin_project::pin_project;
|
||||
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
use brotli2::write::BrotliEncoder;
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||
|
||||
use crate::{
|
||||
@ -23,7 +29,7 @@ use crate::{
|
||||
header::{ContentEncoding, CONTENT_ENCODING},
|
||||
HeaderValue, StatusCode,
|
||||
},
|
||||
Error, ResponseHead,
|
||||
ResponseHead,
|
||||
};
|
||||
|
||||
use super::Writer;
|
||||
@ -101,7 +107,6 @@ enum EncoderBody<B> {
|
||||
impl<B> MessageBody for EncoderBody<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = EncoderError<B::Error>;
|
||||
|
||||
@ -125,18 +130,9 @@ where
|
||||
Poll::Ready(Some(Ok(std::mem::take(b))))
|
||||
}
|
||||
}
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
EncoderBodyProj::Stream(b) => match ready!(b.poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Body(err)))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
},
|
||||
EncoderBodyProj::Stream(b) => b.poll_next(cx).map_err(EncoderError::Body),
|
||||
EncoderBodyProj::BoxedStream(ref mut b) => {
|
||||
match ready!(b.as_pin_mut().poll_next(cx)) {
|
||||
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Boxed(err)))),
|
||||
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
|
||||
None => Poll::Ready(None),
|
||||
}
|
||||
b.as_pin_mut().poll_next(cx).map_err(EncoderError::Boxed)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -145,7 +141,6 @@ where
|
||||
impl<B> MessageBody for Encoder<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = EncoderError<B::Error>;
|
||||
|
||||
@ -233,28 +228,36 @@ fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
|
||||
}
|
||||
|
||||
enum ContentEncoder {
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Deflate(ZlibEncoder<Writer>),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Gzip(GzEncoder<Writer>),
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
Br(BrotliEncoder<Writer>),
|
||||
// We need explicit 'static lifetime here because ZstdEncoder need lifetime
|
||||
// argument, and we use `spawn_blocking` in `Encoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
Zstd(ZstdEncoder<'static, Writer>),
|
||||
}
|
||||
|
||||
impl ContentEncoder {
|
||||
fn encoder(encoding: ContentEncoding) -> Option<Self> {
|
||||
match encoding {
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
|
||||
Writer::new(),
|
||||
flate2::Compression::fast(),
|
||||
))),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Gzip => Some(ContentEncoder::Gzip(GzEncoder::new(
|
||||
Writer::new(),
|
||||
flate2::Compression::fast(),
|
||||
))),
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoding::Br => {
|
||||
Some(ContentEncoder::Br(BrotliEncoder::new(Writer::new(), 3)))
|
||||
}
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoding::Zstd => {
|
||||
let encoder = ZstdEncoder::new(Writer::new(), 3).ok()?;
|
||||
Some(ContentEncoder::Zstd(encoder))
|
||||
@ -266,27 +269,35 @@ impl ContentEncoder {
|
||||
#[inline]
|
||||
pub(crate) fn take(&mut self) -> Bytes {
|
||||
match *self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoder::Br(ref mut encoder) => encoder.get_mut().take(),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Deflate(ref mut encoder) => encoder.get_mut().take(),
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Gzip(ref mut encoder) => encoder.get_mut().take(),
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoder::Zstd(ref mut encoder) => encoder.get_mut().take(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> Result<Bytes, io::Error> {
|
||||
match self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoder::Br(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Gzip(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Deflate(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoder::Zstd(encoder) => match encoder.finish() {
|
||||
Ok(writer) => Ok(writer.buf.freeze()),
|
||||
Err(err) => Err(err),
|
||||
@ -296,6 +307,7 @@ impl ContentEncoder {
|
||||
|
||||
fn write(&mut self, data: &[u8]) -> Result<(), io::Error> {
|
||||
match *self {
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
ContentEncoder::Br(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
@ -303,6 +315,7 @@ impl ContentEncoder {
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
@ -310,6 +323,7 @@ impl ContentEncoder {
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
@ -317,6 +331,7 @@ impl ContentEncoder {
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
|
@ -55,6 +55,8 @@ impl Error {
|
||||
Self::new(Kind::Io)
|
||||
}
|
||||
|
||||
// used in encoder behind feature flag so ignore unused warning
|
||||
#[allow(unused)]
|
||||
pub(crate) fn new_encoder() -> Self {
|
||||
Self::new(Kind::Encoder)
|
||||
}
|
||||
@ -125,7 +127,7 @@ impl fmt::Display for Error {
|
||||
|
||||
impl StdError for Error {
|
||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
self.inner.cause.as_ref().map(|err| err.as_ref())
|
||||
self.inner.cause.as_ref().map(Box::as_ref)
|
||||
}
|
||||
}
|
||||
|
||||
@ -194,7 +196,7 @@ pub enum ParseError {
|
||||
#[display(fmt = "IO error: {}", _0)]
|
||||
Io(io::Error),
|
||||
|
||||
/// Parsing a field as string failed
|
||||
/// Parsing a field as string failed.
|
||||
#[display(fmt = "UTF8 error: {}", _0)]
|
||||
Utf8(Utf8Error),
|
||||
}
|
||||
|
432
actix-http/src/h1/chunked.rs
Normal file
432
actix-http/src/h1/chunked.rs
Normal file
@ -0,0 +1,432 @@
|
||||
use std::{io, task::Poll};
|
||||
|
||||
use bytes::{Buf as _, Bytes, BytesMut};
|
||||
|
||||
macro_rules! byte (
|
||||
($rdr:ident) => ({
|
||||
if $rdr.len() > 0 {
|
||||
let b = $rdr[0];
|
||||
$rdr.advance(1);
|
||||
b
|
||||
} else {
|
||||
return Poll::Pending
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub(super) enum ChunkedState {
|
||||
Size,
|
||||
SizeLws,
|
||||
Extension,
|
||||
SizeLf,
|
||||
Body,
|
||||
BodyCr,
|
||||
BodyLf,
|
||||
EndCr,
|
||||
EndLf,
|
||||
End,
|
||||
}
|
||||
|
||||
impl ChunkedState {
|
||||
pub(super) fn step(
|
||||
&self,
|
||||
body: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
use self::ChunkedState::*;
|
||||
match *self {
|
||||
Size => ChunkedState::read_size(body, size),
|
||||
SizeLws => ChunkedState::read_size_lws(body),
|
||||
Extension => ChunkedState::read_extension(body),
|
||||
SizeLf => ChunkedState::read_size_lf(body, *size),
|
||||
Body => ChunkedState::read_body(body, size, buf),
|
||||
BodyCr => ChunkedState::read_body_cr(body),
|
||||
BodyLf => ChunkedState::read_body_lf(body),
|
||||
EndCr => ChunkedState::read_end_cr(body),
|
||||
EndLf => ChunkedState::read_end_lf(body),
|
||||
End => Poll::Ready(Ok(ChunkedState::End)),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
let radix = 16;
|
||||
|
||||
let rem = match byte!(rdr) {
|
||||
b @ b'0'..=b'9' => b - b'0',
|
||||
b @ b'a'..=b'f' => b + 10 - b'a',
|
||||
b @ b'A'..=b'F' => b + 10 - b'A',
|
||||
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => {
|
||||
return Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Invalid Size",
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
match size.checked_mul(radix) {
|
||||
Some(n) => {
|
||||
*size = n as u64;
|
||||
*size += rem as u64;
|
||||
|
||||
Poll::Ready(Ok(ChunkedState::Size))
|
||||
}
|
||||
None => {
|
||||
log::debug!("chunk size would overflow u64");
|
||||
Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Size is too big",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
// LWS can follow the chunk size, but no more digits can come
|
||||
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size linear white space",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
// strictly 0x20 (space) should be disallowed but we don't parse quoted strings here
|
||||
0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid character in chunk extension",
|
||||
))),
|
||||
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
||||
}
|
||||
}
|
||||
fn read_size_lf(
|
||||
rdr: &mut BytesMut,
|
||||
size: u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' if size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
|
||||
b'\n' if size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body(
|
||||
rdr: &mut BytesMut,
|
||||
rem: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
log::trace!("Chunked read, remaining={:?}", rem);
|
||||
|
||||
let len = rdr.len() as u64;
|
||||
if len == 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
let slice;
|
||||
if *rem > len {
|
||||
slice = rdr.split().freeze();
|
||||
*rem -= len;
|
||||
} else {
|
||||
slice = rdr.split_to(*rem as usize).freeze();
|
||||
*rem = 0;
|
||||
}
|
||||
*buf = Some(slice);
|
||||
if *rem > 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
Poll::Ready(Ok(ChunkedState::BodyCr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use actix_codec::Decoder as _;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::Method;
|
||||
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
h1::decoder::{MessageDecoder, PayloadItem},
|
||||
HttpMessage as _, Request,
|
||||
};
|
||||
|
||||
macro_rules! parse_ready {
|
||||
($e:expr) => {{
|
||||
match MessageDecoder::<Request>::default().decode($e) {
|
||||
Ok(Some((msg, _))) => msg,
|
||||
Ok(_) => unreachable!("Eof during parsing http request"),
|
||||
Err(err) => unreachable!("Error during parsing http request: {:?}", err),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! expect_parse_err {
|
||||
($e:expr) => {{
|
||||
match MessageDecoder::<Request>::default().decode($e) {
|
||||
Err(err) => match err {
|
||||
ParseError::Io(_) => unreachable!("Parse error expected"),
|
||||
_ => {}
|
||||
},
|
||||
_ => unreachable!("Error expected"),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_chunked_payload_chunk_extension() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(msg.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"data"));
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"line"));
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_chunked() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
|
||||
// intentional typo in "chunked"
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chnked\r\n\r\n",
|
||||
);
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"data"
|
||||
);
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"line"
|
||||
);
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_and_next_message() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(
|
||||
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
|
||||
POST /test2 HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n"
|
||||
.iter(),
|
||||
);
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"line");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert!(req.chunked().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_chunks() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\n1111\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"1111");
|
||||
|
||||
buf.extend(b"4\r\ndata\r");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
|
||||
buf.extend(b"\n4");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
buf.extend(b"\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"li");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"li");
|
||||
|
||||
//trailers
|
||||
//buf.feed_data("test: test\r\n");
|
||||
//not_ready!(reader.parse(&mut buf, &mut readbuf));
|
||||
|
||||
buf.extend(b"ne\r\n0\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"ne");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chunk_extension_quoted() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
Host: localhost:8080\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
2;hello=b;one=\"1 2 3\"\r\n\
|
||||
xx",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"xx")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_chunk_extension_invalid() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: localhost:8080\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
2;x\nx\r\n\
|
||||
4c\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let err = pl.decode(&mut buf).unwrap_err();
|
||||
assert!(err
|
||||
.to_string()
|
||||
.contains("Invalid character in chunk extension"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_chunk_size_overflow() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
f0000000000000003\r\n\
|
||||
abc\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let err = pl.decode(&mut buf).unwrap_err();
|
||||
assert!(err
|
||||
.to_string()
|
||||
.contains("Invalid chunk size line: Size is too big"));
|
||||
}
|
||||
}
|
@ -1,18 +1,18 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::io;
|
||||
use std::marker::PhantomData;
|
||||
use std::task::Poll;
|
||||
use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll};
|
||||
|
||||
use actix_codec::Decoder;
|
||||
use bytes::{Buf, Bytes, BytesMut};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::header::{HeaderName, HeaderValue};
|
||||
use http::{header, Method, StatusCode, Uri, Version};
|
||||
use log::{debug, error, trace};
|
||||
|
||||
use crate::error::ParseError;
|
||||
use crate::header::HeaderMap;
|
||||
use crate::message::{ConnectionType, ResponseHead};
|
||||
use crate::request::Request;
|
||||
use super::chunked::ChunkedState;
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
header::HeaderMap,
|
||||
message::{ConnectionType, ResponseHead},
|
||||
request::Request,
|
||||
};
|
||||
|
||||
pub(crate) const MAX_BUFFER_SIZE: usize = 131_072;
|
||||
const MAX_HEADERS: usize = 96;
|
||||
@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized {
|
||||
let mut has_upgrade_websocket = false;
|
||||
let mut expect = false;
|
||||
let mut chunked = false;
|
||||
let mut seen_te = false;
|
||||
let mut content_length = None;
|
||||
|
||||
{
|
||||
@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized {
|
||||
};
|
||||
|
||||
match name {
|
||||
header::CONTENT_LENGTH => {
|
||||
if let Ok(s) = value.to_str() {
|
||||
header::CONTENT_LENGTH if content_length.is_some() => {
|
||||
debug!("multiple Content-Length");
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::CONTENT_LENGTH => match value.to_str() {
|
||||
Ok(s) if s.trim().starts_with('+') => {
|
||||
debug!("illegal Content-Length: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
Ok(s) => {
|
||||
if let Ok(len) = s.parse::<u64>() {
|
||||
if len != 0 {
|
||||
content_length = Some(len);
|
||||
@ -95,22 +105,38 @@ pub(crate) trait MessageType: Sized {
|
||||
debug!("illegal Content-Length: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
} else {
|
||||
}
|
||||
Err(_) => {
|
||||
debug!("illegal Content-Length: {:?}", value);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// transfer-encoding
|
||||
header::TRANSFER_ENCODING if seen_te => {
|
||||
debug!("multiple Transfer-Encoding not allowed");
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::TRANSFER_ENCODING => {
|
||||
if let Ok(s) = value.to_str().map(|s| s.trim()) {
|
||||
chunked = s.eq_ignore_ascii_case("chunked");
|
||||
seen_te = true;
|
||||
|
||||
if let Ok(s) = value.to_str().map(str::trim) {
|
||||
if s.eq_ignore_ascii_case("chunked") {
|
||||
chunked = true;
|
||||
} else if s.eq_ignore_ascii_case("identity") {
|
||||
// allow silently since multiple TE headers are already checked
|
||||
} else {
|
||||
debug!("illegal Transfer-Encoding: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
} else {
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
// connection keep-alive state
|
||||
header::CONNECTION => {
|
||||
ka = if let Ok(conn) = value.to_str().map(|conn| conn.trim()) {
|
||||
ka = if let Ok(conn) = value.to_str().map(str::trim) {
|
||||
if conn.eq_ignore_ascii_case("keep-alive") {
|
||||
Some(ConnectionType::KeepAlive)
|
||||
} else if conn.eq_ignore_ascii_case("close") {
|
||||
@ -125,7 +151,7 @@ pub(crate) trait MessageType: Sized {
|
||||
};
|
||||
}
|
||||
header::UPGRADE => {
|
||||
if let Ok(val) = value.to_str().map(|val| val.trim()) {
|
||||
if let Ok(val) = value.to_str().map(str::trim) {
|
||||
if val.eq_ignore_ascii_case("websocket") {
|
||||
has_upgrade_websocket = true;
|
||||
}
|
||||
@ -186,10 +212,17 @@ impl MessageType for Request {
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||
|
||||
let (len, method, uri, ver, h_len) = {
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
||||
// SAFETY:
|
||||
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
|
||||
// safe because the type we are claiming to have initialized here is a
|
||||
// bunch of `MaybeUninit`s, which do not require initialization.
|
||||
let mut parsed = unsafe {
|
||||
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||
.assume_init()
|
||||
};
|
||||
|
||||
let mut req = httparse::Request::new(&mut parsed);
|
||||
match req.parse(src)? {
|
||||
let mut req = httparse::Request::new(&mut []);
|
||||
match req.parse_with_uninit_headers(src, &mut parsed)? {
|
||||
httparse::Status::Complete(len) => {
|
||||
let method = Method::from_bytes(req.method.unwrap().as_bytes())
|
||||
.map_err(|_| ParseError::Method)?;
|
||||
@ -408,20 +441,6 @@ enum Kind {
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
enum ChunkedState {
|
||||
Size,
|
||||
SizeLws,
|
||||
Extension,
|
||||
SizeLf,
|
||||
Body,
|
||||
BodyCr,
|
||||
BodyLf,
|
||||
EndCr,
|
||||
EndLf,
|
||||
End,
|
||||
}
|
||||
|
||||
impl Decoder for PayloadDecoder {
|
||||
type Item = PayloadItem;
|
||||
type Error = io::Error;
|
||||
@ -451,19 +470,23 @@ impl Decoder for PayloadDecoder {
|
||||
Kind::Chunked(ref mut state, ref mut size) => {
|
||||
loop {
|
||||
let mut buf = None;
|
||||
|
||||
// advances the chunked state
|
||||
*state = match state.step(src, size, &mut buf) {
|
||||
Poll::Pending => return Ok(None),
|
||||
Poll::Ready(Ok(state)) => state,
|
||||
Poll::Ready(Err(e)) => return Err(e),
|
||||
};
|
||||
|
||||
if *state == ChunkedState::End {
|
||||
trace!("End of chunked stream");
|
||||
return Ok(Some(PayloadItem::Eof));
|
||||
}
|
||||
|
||||
if let Some(buf) = buf {
|
||||
return Ok(Some(PayloadItem::Chunk(buf)));
|
||||
}
|
||||
|
||||
if src.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
@ -480,201 +503,40 @@ impl Decoder for PayloadDecoder {
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! byte (
|
||||
($rdr:ident) => ({
|
||||
if $rdr.len() > 0 {
|
||||
let b = $rdr[0];
|
||||
$rdr.advance(1);
|
||||
b
|
||||
} else {
|
||||
return Poll::Pending
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
impl ChunkedState {
|
||||
fn step(
|
||||
&self,
|
||||
body: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
use self::ChunkedState::*;
|
||||
match *self {
|
||||
Size => ChunkedState::read_size(body, size),
|
||||
SizeLws => ChunkedState::read_size_lws(body),
|
||||
Extension => ChunkedState::read_extension(body),
|
||||
SizeLf => ChunkedState::read_size_lf(body, size),
|
||||
Body => ChunkedState::read_body(body, size, buf),
|
||||
BodyCr => ChunkedState::read_body_cr(body),
|
||||
BodyLf => ChunkedState::read_body_lf(body),
|
||||
EndCr => ChunkedState::read_end_cr(body),
|
||||
EndLf => ChunkedState::read_end_lf(body),
|
||||
End => Poll::Ready(Ok(ChunkedState::End)),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
let radix = 16;
|
||||
match byte!(rdr) {
|
||||
b @ b'0'..=b'9' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b - b'0');
|
||||
}
|
||||
b @ b'a'..=b'f' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b + 10 - b'a');
|
||||
}
|
||||
b @ b'A'..=b'F' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b + 10 - b'A');
|
||||
}
|
||||
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => {
|
||||
return Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Invalid Size",
|
||||
)));
|
||||
}
|
||||
}
|
||||
Poll::Ready(Ok(ChunkedState::Size))
|
||||
}
|
||||
|
||||
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
trace!("read_size_lws");
|
||||
match byte!(rdr) {
|
||||
// LWS can follow the chunk size, but no more digits can come
|
||||
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size linear white space",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
||||
}
|
||||
}
|
||||
fn read_size_lf(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
|
||||
b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body(
|
||||
rdr: &mut BytesMut,
|
||||
rem: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
trace!("Chunked read, remaining={:?}", rem);
|
||||
|
||||
let len = rdr.len() as u64;
|
||||
if len == 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
let slice;
|
||||
if *rem > len {
|
||||
slice = rdr.split().freeze();
|
||||
*rem -= len;
|
||||
} else {
|
||||
slice = rdr.split_to(*rem as usize).freeze();
|
||||
*rem = 0;
|
||||
}
|
||||
*buf = Some(slice);
|
||||
if *rem > 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
Poll::Ready(Ok(ChunkedState::BodyCr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::{Method, Version};
|
||||
|
||||
use super::*;
|
||||
use crate::error::ParseError;
|
||||
use crate::http::header::{HeaderName, SET_COOKIE};
|
||||
use crate::HttpMessage;
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
http::header::{HeaderName, SET_COOKIE},
|
||||
HttpMessage as _,
|
||||
};
|
||||
|
||||
impl PayloadType {
|
||||
fn unwrap(self) -> PayloadDecoder {
|
||||
pub(crate) fn unwrap(self) -> PayloadDecoder {
|
||||
match self {
|
||||
PayloadType::Payload(pl) => pl,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_unhandled(&self) -> bool {
|
||||
pub(crate) fn is_unhandled(&self) -> bool {
|
||||
matches!(self, PayloadType::Stream(_))
|
||||
}
|
||||
}
|
||||
|
||||
impl PayloadItem {
|
||||
fn chunk(self) -> Bytes {
|
||||
pub(crate) fn chunk(self) -> Bytes {
|
||||
match self {
|
||||
PayloadItem::Chunk(chunk) => chunk,
|
||||
_ => panic!("error"),
|
||||
}
|
||||
}
|
||||
fn eof(&self) -> bool {
|
||||
|
||||
pub(crate) fn eof(&self) -> bool {
|
||||
matches!(*self, PayloadItem::Eof)
|
||||
}
|
||||
}
|
||||
@ -967,34 +829,6 @@ mod tests {
|
||||
assert!(req.upgrade());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_chunked() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
|
||||
// intentional typo in "chunked"
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chnked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(!val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_headers_content_length_err_1() {
|
||||
let mut buf = BytesMut::from(
|
||||
@ -1112,126 +946,6 @@ mod tests {
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"data"
|
||||
);
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"line"
|
||||
);
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_and_next_message() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(
|
||||
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
|
||||
POST /test2 HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n"
|
||||
.iter(),
|
||||
);
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"line");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert!(req.chunked().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_chunks() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\n1111\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"1111");
|
||||
|
||||
buf.extend(b"4\r\ndata\r");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
|
||||
buf.extend(b"\n4");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
buf.extend(b"\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"li");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"li");
|
||||
|
||||
//trailers
|
||||
//buf.feed_data("test: test\r\n");
|
||||
//not_ready!(reader.parse(&mut buf, &mut readbuf));
|
||||
|
||||
buf.extend(b"ne\r\n0\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"ne");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_chunked_payload_chunk_extension() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(msg.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"data"));
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"line"));
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_response_http10_read_until_eof() {
|
||||
let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data");
|
||||
@ -1243,4 +957,84 @@ mod tests {
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_multiple_content_length() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 4\r\n\
|
||||
Content-Length: 2\r\n\
|
||||
\r\n\
|
||||
abcd",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_content_length_plus() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: +3\r\n\
|
||||
\r\n\
|
||||
000",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_unknown_transfer_encoding() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Transfer-Encoding: JUNK\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
5\r\n\
|
||||
hello\r\n\
|
||||
0",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_multiple_transfer_encoding() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 51\r\n\
|
||||
Transfer-Encoding: identity\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
0\r\n\
|
||||
\r\n\
|
||||
GET /forbidden HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\r\n",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn transfer_encoding_agrees() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 3\r\n\
|
||||
Transfer-Encoding: identity\r\n\
|
||||
\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"0\r\n")));
|
||||
}
|
||||
}
|
||||
|
@ -515,14 +515,13 @@ where
|
||||
cx: &mut Context<'_>,
|
||||
) -> Result<(), DispatchError> {
|
||||
// Handle `EXPECT: 100-Continue` header
|
||||
let mut this = self.as_mut().project();
|
||||
if req.head().expect() {
|
||||
// set dispatcher state so the future is pinned.
|
||||
let mut this = self.as_mut().project();
|
||||
let task = this.flow.expect.call(req);
|
||||
this.state.set(State::ExpectCall(task));
|
||||
} else {
|
||||
// the same as above.
|
||||
let mut this = self.as_mut().project();
|
||||
let task = this.flow.service.call(req);
|
||||
this.state.set(State::ServiceCall(task));
|
||||
};
|
||||
@ -1061,7 +1060,7 @@ mod tests {
|
||||
fn stabilize_date_header(payload: &mut [u8]) {
|
||||
let mut from = 0;
|
||||
|
||||
while let Some(pos) = find_slice(&payload, b"date", from) {
|
||||
while let Some(pos) = find_slice(payload, b"date", from) {
|
||||
payload[(from + pos)..(from + pos + 35)]
|
||||
.copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC");
|
||||
from += 35;
|
||||
|
@ -81,6 +81,7 @@ pub(crate) trait MessageType: Sized {
|
||||
match length {
|
||||
BodySize::Stream => {
|
||||
if chunked {
|
||||
skip_len = true;
|
||||
if camel_case {
|
||||
dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n")
|
||||
} else {
|
||||
@ -174,7 +175,7 @@ pub(crate) trait MessageType: Sized {
|
||||
unsafe {
|
||||
if camel_case {
|
||||
// use Camel-Case headers
|
||||
write_camel_case(k, from_raw_parts_mut(buf, k_len));
|
||||
write_camel_case(k, buf, k_len);
|
||||
} else {
|
||||
write_data(k, buf, k_len);
|
||||
}
|
||||
@ -472,15 +473,22 @@ impl TransferEncoding {
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Callers must ensure that the given length matches given value length.
|
||||
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
|
||||
/// valid for writes of at least `len` bytes.
|
||||
unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) {
|
||||
debug_assert_eq!(value.len(), len);
|
||||
copy_nonoverlapping(value.as_ptr(), buf, len);
|
||||
}
|
||||
|
||||
fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
||||
/// # Safety
|
||||
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
|
||||
/// valid for writes of at least `len` bytes.
|
||||
unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
||||
// first copy entire (potentially wrong) slice to output
|
||||
buffer[..value.len()].copy_from_slice(value);
|
||||
write_data(value, buf, len);
|
||||
|
||||
// SAFETY: We just initialized the buffer with `value`
|
||||
let buffer = from_raw_parts_mut(buf, len);
|
||||
|
||||
let mut iter = value.iter();
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
//! HTTP/1 protocol implementation.
|
||||
|
||||
use bytes::{Bytes, BytesMut};
|
||||
|
||||
mod chunked;
|
||||
mod client;
|
||||
mod codec;
|
||||
mod decoder;
|
||||
|
@ -186,8 +186,7 @@ impl Inner {
|
||||
if self
|
||||
.task
|
||||
.as_ref()
|
||||
.map(|w| !cx.waker().will_wake(w))
|
||||
.unwrap_or(true)
|
||||
.map_or(true, |w| !cx.waker().will_wake(w))
|
||||
{
|
||||
self.task = Some(cx.waker().clone());
|
||||
}
|
||||
@ -199,8 +198,7 @@ impl Inner {
|
||||
if self
|
||||
.io_task
|
||||
.as_ref()
|
||||
.map(|w| !cx.waker().will_wake(w))
|
||||
.unwrap_or(true)
|
||||
.map_or(true, |w| !cx.waker().will_wake(w))
|
||||
{
|
||||
self.io_task = Some(cx.waker().clone());
|
||||
}
|
||||
|
@ -63,7 +63,6 @@ where
|
||||
.is_write_buf_full()
|
||||
{
|
||||
let next =
|
||||
// TODO: MSRV 1.51: poll_map_err
|
||||
match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
|
||||
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
|
||||
Poll::Ready(Some(Err(err))) => {
|
||||
|
@ -249,7 +249,7 @@ impl HeaderMap {
|
||||
/// assert!(map.get("INVALID HEADER NAME").is_none());
|
||||
/// ```
|
||||
pub fn get(&self, key: impl AsHeaderName) -> Option<&HeaderValue> {
|
||||
self.get_value(key).map(|val| val.first())
|
||||
self.get_value(key).map(Value::first)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the _first_ value associated a header name.
|
||||
@ -280,8 +280,8 @@ impl HeaderMap {
|
||||
/// ```
|
||||
pub fn get_mut(&mut self, key: impl AsHeaderName) -> Option<&mut HeaderValue> {
|
||||
match key.try_as_name(super::as_name::Seal).ok()? {
|
||||
Cow::Borrowed(name) => self.inner.get_mut(name).map(|v| v.first_mut()),
|
||||
Cow::Owned(name) => self.inner.get_mut(&name).map(|v| v.first_mut()),
|
||||
Cow::Borrowed(name) => self.inner.get_mut(name).map(Value::first_mut),
|
||||
Cow::Owned(name) => self.inner.get_mut(&name).map(Value::first_mut),
|
||||
}
|
||||
}
|
||||
|
||||
@ -684,7 +684,7 @@ impl<'a> Iterator for Iter<'a> {
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// handle in-progress multi value lists first
|
||||
if let Some((ref name, ref mut vals)) = self.multi_inner {
|
||||
if let Some((name, ref mut vals)) = self.multi_inner {
|
||||
match vals.get(self.multi_idx) {
|
||||
Some(val) => {
|
||||
self.multi_idx += 1;
|
||||
|
@ -1,5 +1,6 @@
|
||||
use std::{convert::Infallible, str::FromStr};
|
||||
use std::{convert::TryFrom, str::FromStr};
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
use http::header::InvalidHeaderValue;
|
||||
|
||||
use crate::{
|
||||
@ -8,8 +9,16 @@ use crate::{
|
||||
HttpMessage,
|
||||
};
|
||||
|
||||
/// Error return when a content encoding is unknown.
|
||||
///
|
||||
/// Example: 'compress'
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "unsupported content encoding")]
|
||||
pub struct ContentEncodingParseError;
|
||||
|
||||
/// Represents a supported content encoding.
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
#[non_exhaustive]
|
||||
pub enum ContentEncoding {
|
||||
/// Automatically select encoding based on encoding negotiation.
|
||||
Auto,
|
||||
@ -37,7 +46,7 @@ impl ContentEncoding {
|
||||
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
|
||||
}
|
||||
|
||||
/// Convert content encoding to string
|
||||
/// Convert content encoding to string.
|
||||
#[inline]
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
@ -48,18 +57,6 @@ impl ContentEncoding {
|
||||
ContentEncoding::Identity | ContentEncoding::Auto => "identity",
|
||||
}
|
||||
}
|
||||
|
||||
/// Default Q-factor (quality) value.
|
||||
#[inline]
|
||||
pub fn quality(self) -> f64 {
|
||||
match self {
|
||||
ContentEncoding::Br => 1.1,
|
||||
ContentEncoding::Gzip => 1.0,
|
||||
ContentEncoding::Deflate => 0.9,
|
||||
ContentEncoding::Identity | ContentEncoding::Auto => 0.1,
|
||||
ContentEncoding::Zstd => 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ContentEncoding {
|
||||
@ -69,31 +66,33 @@ impl Default for ContentEncoding {
|
||||
}
|
||||
|
||||
impl FromStr for ContentEncoding {
|
||||
type Err = Infallible;
|
||||
type Err = ContentEncodingParseError;
|
||||
|
||||
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
||||
Ok(Self::from(val))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for ContentEncoding {
|
||||
fn from(val: &str) -> ContentEncoding {
|
||||
let val = val.trim();
|
||||
|
||||
if val.eq_ignore_ascii_case("br") {
|
||||
ContentEncoding::Br
|
||||
Ok(ContentEncoding::Br)
|
||||
} else if val.eq_ignore_ascii_case("gzip") {
|
||||
ContentEncoding::Gzip
|
||||
Ok(ContentEncoding::Gzip)
|
||||
} else if val.eq_ignore_ascii_case("deflate") {
|
||||
ContentEncoding::Deflate
|
||||
Ok(ContentEncoding::Deflate)
|
||||
} else if val.eq_ignore_ascii_case("zstd") {
|
||||
ContentEncoding::Zstd
|
||||
Ok(ContentEncoding::Zstd)
|
||||
} else {
|
||||
ContentEncoding::default()
|
||||
Err(ContentEncodingParseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ContentEncoding {
|
||||
type Error = ContentEncodingParseError;
|
||||
|
||||
fn try_from(val: &str) -> Result<Self, Self::Error> {
|
||||
val.parse()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for ContentEncoding {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
|
@ -1,11 +1,14 @@
|
||||
use std::{
|
||||
cmp,
|
||||
convert::{TryFrom, TryInto},
|
||||
fmt, str,
|
||||
fmt,
|
||||
str::{self, FromStr},
|
||||
};
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
|
||||
use crate::error::ParseError;
|
||||
|
||||
const MAX_QUALITY: u16 = 1000;
|
||||
const MAX_FLOAT_QUALITY: f32 = 1.0;
|
||||
|
||||
@ -113,12 +116,12 @@ impl<T: fmt::Display> fmt::Display for QualityItem<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
type Err = crate::error::ParseError;
|
||||
impl<T: FromStr> FromStr for QualityItem<T> {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(qitem_str: &str) -> Result<QualityItem<T>, crate::error::ParseError> {
|
||||
fn from_str(qitem_str: &str) -> Result<Self, Self::Err> {
|
||||
if !qitem_str.is_ascii() {
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
// Set defaults used if parsing fails.
|
||||
@ -139,7 +142,7 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
if parts[0].len() < 2 {
|
||||
// Can't possibly be an attribute since an attribute needs at least a name followed
|
||||
// by an equals sign. And bare identifiers are forbidden.
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
let start = &parts[0][0..2];
|
||||
@ -148,25 +151,21 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
let q_val = &parts[0][2..];
|
||||
if q_val.len() > 5 {
|
||||
// longer than 5 indicates an over-precise q-factor
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
let q_value = q_val
|
||||
.parse::<f32>()
|
||||
.map_err(|_| crate::error::ParseError::Header)?;
|
||||
let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
|
||||
|
||||
if (0f32..=1f32).contains(&q_value) {
|
||||
quality = q_value;
|
||||
raw_item = parts[1];
|
||||
} else {
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let item = raw_item
|
||||
.parse::<T>()
|
||||
.map_err(|_| crate::error::ParseError::Header)?;
|
||||
let item = raw_item.parse::<T>().map_err(|_| ParseError::Header)?;
|
||||
|
||||
// we already checked above that the quality is within range
|
||||
Ok(QualityItem::new(item, Quality::from_f32(quality)))
|
||||
@ -224,7 +223,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
impl str::FromStr for Encoding {
|
||||
impl FromStr for Encoding {
|
||||
type Err = crate::error::ParseError;
|
||||
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
|
||||
use Encoding::*;
|
||||
|
@ -1,18 +1,20 @@
|
||||
//! HTTP primitives for the Actix ecosystem.
|
||||
//!
|
||||
//! ## Crate Features
|
||||
//! | Feature | Functionality |
|
||||
//! | ---------------- | ----------------------------------------------------- |
|
||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||
//! | `rustls` | TLS support via [rustls]. |
|
||||
//! | `compress` | Payload compression support. (Deflate, Gzip & Brotli) |
|
||||
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
|
||||
//! | Feature | Functionality |
|
||||
//! | ------------------- | ------------------------------------------- |
|
||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||
//! | `rustls` | TLS support via [rustls]. |
|
||||
//! | `compress-brotli` | Payload compression support: Brotli. |
|
||||
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
||||
//! | `compress-zstd` | Payload compression support: Zstd. |
|
||||
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
|
||||
//!
|
||||
//! [OpenSSL]: https://crates.io/crates/openssl
|
||||
//! [rustls]: https://crates.io/crates/rustls
|
||||
//! [trust-dns]: https://crates.io/crates/trust-dns
|
||||
|
||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||
#![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)]
|
||||
#![allow(
|
||||
clippy::type_complexity,
|
||||
clippy::too_many_arguments,
|
||||
@ -25,14 +27,12 @@
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
pub mod body;
|
||||
mod builder;
|
||||
pub mod client;
|
||||
mod config;
|
||||
#[cfg(feature = "compress")]
|
||||
|
||||
#[cfg(feature = "__compress")]
|
||||
pub mod encoding;
|
||||
mod extensions;
|
||||
pub mod header;
|
||||
|
@ -1,110 +0,0 @@
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! downcast_get_type_id {
|
||||
() => {
|
||||
/// A helper method to get the type ID of the type
|
||||
/// this trait is implemented on.
|
||||
/// This method is unsafe to *implement*, since `downcast_ref` relies
|
||||
/// on the returned `TypeId` to perform a cast.
|
||||
///
|
||||
/// Unfortunately, Rust has no notion of a trait method that is
|
||||
/// unsafe to implement (marking it as `unsafe` makes it unsafe
|
||||
/// to *call*). As a workaround, we require this method
|
||||
/// to return a private type along with the `TypeId`. This
|
||||
/// private type (`PrivateHelper`) has a private constructor,
|
||||
/// making it impossible for safe code to construct outside of
|
||||
/// this module. This ensures that safe code cannot violate
|
||||
/// type-safety by implementing this method.
|
||||
///
|
||||
/// We also take `PrivateHelper` as a parameter, to ensure that
|
||||
/// safe code cannot obtain a `PrivateHelper` instance by
|
||||
/// delegating to an existing implementation of `__private_get_type_id__`
|
||||
#[doc(hidden)]
|
||||
fn __private_get_type_id__(
|
||||
&self,
|
||||
_: PrivateHelper,
|
||||
) -> (std::any::TypeId, PrivateHelper)
|
||||
where
|
||||
Self: 'static,
|
||||
{
|
||||
(std::any::TypeId::of::<Self>(), PrivateHelper(()))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//Generate implementation for dyn $name
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! downcast {
|
||||
($name:ident) => {
|
||||
/// A struct with a private constructor, for use with
|
||||
/// `__private_get_type_id__`. Its single field is private,
|
||||
/// ensuring that it can only be constructed from this module
|
||||
#[doc(hidden)]
|
||||
pub struct PrivateHelper(());
|
||||
|
||||
impl dyn $name + 'static {
|
||||
/// Downcasts generic body to a specific type.
|
||||
pub fn downcast_ref<T: $name + 'static>(&self) -> Option<&T> {
|
||||
if self.__private_get_type_id__(PrivateHelper(())).0
|
||||
== std::any::TypeId::of::<T>()
|
||||
{
|
||||
// SAFETY: external crates cannot override the default
|
||||
// implementation of `__private_get_type_id__`, since
|
||||
// it requires returning a private type. We can therefore
|
||||
// rely on the returned `TypeId`, which ensures that this
|
||||
// case is correct.
|
||||
unsafe { Some(&*(self as *const dyn $name as *const T)) }
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Downcasts a generic body to a mutable specific type.
|
||||
pub fn downcast_mut<T: $name + 'static>(&mut self) -> Option<&mut T> {
|
||||
if self.__private_get_type_id__(PrivateHelper(())).0
|
||||
== std::any::TypeId::of::<T>()
|
||||
{
|
||||
// SAFETY: external crates cannot override the default
|
||||
// implementation of `__private_get_type_id__`, since
|
||||
// it requires returning a private type. We can therefore
|
||||
// rely on the returned `TypeId`, which ensures that this
|
||||
// case is correct.
|
||||
unsafe {
|
||||
Some(&mut *(self as *const dyn $name as *const T as *mut T))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::upper_case_acronyms)]
|
||||
|
||||
trait MB {
|
||||
downcast_get_type_id!();
|
||||
}
|
||||
|
||||
downcast!(MB);
|
||||
|
||||
impl MB for String {}
|
||||
impl MB for () {}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_any_casting() {
|
||||
let mut body = String::from("hello cast");
|
||||
let resp_body: &mut dyn MB = &mut body;
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast");
|
||||
let body = &mut resp_body.downcast_mut::<String>().unwrap();
|
||||
body.push('!');
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast!");
|
||||
let not_body = resp_body.downcast_ref::<()>();
|
||||
assert!(not_body.is_none());
|
||||
}
|
||||
}
|
@ -152,15 +152,16 @@ impl RequestHead {
|
||||
|
||||
/// Connection upgrade status
|
||||
pub fn upgrade(&self) -> bool {
|
||||
if let Some(hdr) = self.headers().get(header::CONNECTION) {
|
||||
if let Ok(s) = hdr.to_str() {
|
||||
s.to_ascii_lowercase().contains("upgrade")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
self.headers()
|
||||
.get(header::CONNECTION)
|
||||
.map(|hdr| {
|
||||
if let Ok(s) = hdr.to_str() {
|
||||
s.to_ascii_lowercase().contains("upgrade")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -208,7 +209,7 @@ impl RequestHeadType {
|
||||
impl AsRef<RequestHead> for RequestHeadType {
|
||||
fn as_ref(&self) -> &RequestHead {
|
||||
match self {
|
||||
RequestHeadType::Owned(head) => &head,
|
||||
RequestHeadType::Owned(head) => head,
|
||||
RequestHeadType::Rc(head, _) => head.as_ref(),
|
||||
}
|
||||
}
|
||||
@ -308,13 +309,11 @@ impl ResponseHead {
|
||||
/// Get custom reason for the response
|
||||
#[inline]
|
||||
pub fn reason(&self) -> &str {
|
||||
if let Some(reason) = self.reason {
|
||||
reason
|
||||
} else {
|
||||
self.reason.unwrap_or_else(|| {
|
||||
self.status
|
||||
.canonical_reason()
|
||||
.unwrap_or("<unknown status code>")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -356,7 +355,7 @@ pub struct Message<T: Head> {
|
||||
impl<T: Head> Message<T> {
|
||||
/// Get new message from the pool of objects
|
||||
pub fn new() -> Self {
|
||||
T::with_pool(|p| p.get_message())
|
||||
T::with_pool(MessagePool::get_message)
|
||||
}
|
||||
}
|
||||
|
||||
@ -364,7 +363,7 @@ impl<T: Head> std::ops::Deref for Message<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.head.as_ref()
|
||||
self.head.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ use crate::{
|
||||
HttpMessage,
|
||||
};
|
||||
|
||||
/// Request
|
||||
/// An HTTP request.
|
||||
pub struct Request<P = PayloadStream> {
|
||||
pub(crate) payload: Payload<P>,
|
||||
pub(crate) head: Message<RequestHead>,
|
||||
|
@ -3,6 +3,10 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.4.0-beta.6 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 0.4.0-beta.5 - 2021-06-17
|
||||
* No notable changes.
|
||||
|
||||
|
@ -1,13 +1,11 @@
|
||||
[package]
|
||||
name = "actix-multipart"
|
||||
version = "0.4.0-beta.5"
|
||||
version = "0.4.0-beta.6"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Multipart form support for Actix Web"
|
||||
readme = "README.md"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-multipart"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -16,7 +14,7 @@ name = "actix_multipart"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false }
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
bytes = "1"
|
||||
@ -31,6 +29,6 @@ twoway = "0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
tokio = { version = "1", features = ["sync"] }
|
||||
tokio-stream = "0.1"
|
||||
|
@ -3,15 +3,15 @@
|
||||
> Multipart form support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.6)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.6)
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-multipart)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum Supported Rust Version (MSRV): 1.46.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.51.0
|
||||
|
131
actix-router/CHANGES.md
Normal file
131
actix-router/CHANGES.md
Normal file
@ -0,0 +1,131 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.5.0-beta.2 - 2021-09-09
|
||||
* Introduce `ResourceDef::join`. [#380]
|
||||
* Disallow prefix routes with tail segments. [#379]
|
||||
* Enforce path separators on dynamic prefixes. [#378]
|
||||
* Improve malformed path error message. [#384]
|
||||
* Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
|
||||
* Prefix segments with trailing slashes define a trailing empty segment. [#2355]
|
||||
* Support multi-pattern prefixes and joins. [#2356]
|
||||
* `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356]
|
||||
* Support `build_resource_path` on multi-pattern resources. [#2356]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
[#378]: https://github.com/actix/actix-net/pull/378
|
||||
[#379]: https://github.com/actix/actix-net/pull/379
|
||||
[#380]: https://github.com/actix/actix-net/pull/380
|
||||
[#384]: https://github.com/actix/actix-net/pull/384
|
||||
[#2355]: https://github.com/actix/actix-web/pull/2355
|
||||
[#2356]: https://github.com/actix/actix-web/pull/2356
|
||||
|
||||
|
||||
## 0.5.0-beta.1 - 2021-07-20
|
||||
* Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366]
|
||||
* Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373]
|
||||
* Fix segment interpolation leaving `Path` in unintended state after matching. [#368]
|
||||
* Fix `ResourceDef` `PartialEq` implementation. [#373]
|
||||
* Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372]
|
||||
* Implement `IntoPatterns` for `bytestring::ByteString`. [#372]
|
||||
* Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370]
|
||||
* Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371]
|
||||
* `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373]
|
||||
* Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371]
|
||||
* Rename `ResourceDef::{is_prefix_match => find_match}`. [#373]
|
||||
* Rename `ResourceDef::{match_path => capture_match_info}`. [#373]
|
||||
* Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373]
|
||||
* Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373]
|
||||
* Rename `Router::{*_checked => *_fn}`. [#373]
|
||||
* Return type of `ResourceDef::name` is now `Option<&str>`. [#373]
|
||||
* Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373]
|
||||
|
||||
[#368]: https://github.com/actix/actix-net/pull/368
|
||||
[#366]: https://github.com/actix/actix-net/pull/366
|
||||
[#368]: https://github.com/actix/actix-net/pull/368
|
||||
[#370]: https://github.com/actix/actix-net/pull/370
|
||||
[#371]: https://github.com/actix/actix-net/pull/371
|
||||
[#372]: https://github.com/actix/actix-net/pull/372
|
||||
[#373]: https://github.com/actix/actix-net/pull/373
|
||||
|
||||
|
||||
## 0.4.0 - 2021-06-06
|
||||
* When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357]
|
||||
* Path tail patterns now match new lines (`\n`) in request URL. [#360]
|
||||
* Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359]
|
||||
* Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345]
|
||||
|
||||
[#345]: https://github.com/actix/actix-net/pull/345
|
||||
[#357]: https://github.com/actix/actix-net/pull/357
|
||||
[#359]: https://github.com/actix/actix-net/pull/359
|
||||
[#360]: https://github.com/actix/actix-net/pull/360
|
||||
|
||||
|
||||
## 0.3.0 - 2019-12-31
|
||||
* Version was yanked previously. See https://crates.io/crates/actix-router/0.3.0
|
||||
|
||||
|
||||
## 0.2.7 - 2021-02-06
|
||||
* Add `Router::recognize_checked` [#247]
|
||||
|
||||
[#247]: https://github.com/actix/actix-net/pull/247
|
||||
|
||||
|
||||
## 0.2.6 - 2021-01-09
|
||||
* Use `bytestring` version range compatible with Bytes v1.0. [#246]
|
||||
|
||||
[#246]: https://github.com/actix/actix-net/pull/246
|
||||
|
||||
|
||||
## 0.2.5 - 2020-09-20
|
||||
* Fix `from_hex()` method
|
||||
|
||||
|
||||
## 0.2.4 - 2019-12-31
|
||||
* Add `ResourceDef::resource_path_named()` path generation method
|
||||
|
||||
|
||||
## 0.2.3 - 2019-12-25
|
||||
* Add impl `IntoPattern` for `&String`
|
||||
|
||||
|
||||
## 0.2.2 - 2019-12-25
|
||||
* Use `IntoPattern` for `RouterBuilder::path()`
|
||||
|
||||
|
||||
## 0.2.1 - 2019-12-25
|
||||
* Add `IntoPattern` trait
|
||||
* Add multi-pattern resources
|
||||
|
||||
|
||||
## 0.2.0 - 2019-12-07
|
||||
* Update http to 0.2
|
||||
* Update regex to 1.3
|
||||
* Use bytestring instead of string
|
||||
|
||||
|
||||
## 0.1.5 - 2019-05-15
|
||||
* Remove debug prints
|
||||
|
||||
|
||||
## 0.1.4 - 2019-05-15
|
||||
* Fix checked resource match
|
||||
|
||||
|
||||
## 0.1.3 - 2019-04-22
|
||||
* Added support for `remainder match` (i.e "/path/{tail}*")
|
||||
|
||||
|
||||
## 0.1.2 - 2019-04-07
|
||||
* Export `Quoter` type
|
||||
* Allow to reset `Path` instance
|
||||
|
||||
|
||||
## 0.1.1 - 2019-04-03
|
||||
* Get dynamic segment by name instead of iterator.
|
||||
|
||||
|
||||
## 0.1.0 - 2019-03-09
|
||||
* Initial release
|
38
actix-router/Cargo.toml
Normal file
38
actix-router/Cargo.toml
Normal file
@ -0,0 +1,38 @@
|
||||
[package]
|
||||
name = "actix-router"
|
||||
version = "0.5.0-beta.2"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
description = "Resource path matching and router"
|
||||
keywords = ["actix", "router", "routing"]
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "actix_router"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[features]
|
||||
default = ["http"]
|
||||
|
||||
[dependencies]
|
||||
bytestring = ">=0.1.5, <2"
|
||||
firestorm = "0.4"
|
||||
http = { version = "0.2.3", optional = true }
|
||||
log = "0.4"
|
||||
regex = "1.5"
|
||||
serde = "1"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
firestorm = { version = "0.4", features = ["enable_system_time"] }
|
||||
http = "0.2.3"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
|
||||
[[bench]]
|
||||
name = "router"
|
||||
harness = false
|
1
actix-router/LICENSE-APACHE
Symbolic link
1
actix-router/LICENSE-APACHE
Symbolic link
@ -0,0 +1 @@
|
||||
../LICENSE-APACHE
|
1
actix-router/LICENSE-MIT
Symbolic link
1
actix-router/LICENSE-MIT
Symbolic link
@ -0,0 +1 @@
|
||||
../LICENSE-MIT
|
194
actix-router/benches/router.rs
Normal file
194
actix-router/benches/router.rs
Normal file
@ -0,0 +1,194 @@
|
||||
//! Based on https://github.com/ibraheemdev/matchit/blob/master/benches/bench.rs
|
||||
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
|
||||
macro_rules! register {
|
||||
(colon) => {{
|
||||
register!(finish => ":p1", ":p2", ":p3", ":p4")
|
||||
}};
|
||||
(brackets) => {{
|
||||
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
|
||||
}};
|
||||
(regex) => {{
|
||||
register!(finish => "(.*)", "(.*)", "(.*)", "(.*)")
|
||||
}};
|
||||
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
|
||||
let arr = [
|
||||
concat!("/authorizations"),
|
||||
concat!("/authorizations/", $p1),
|
||||
concat!("/applications/", $p1, "/tokens/", $p2),
|
||||
concat!("/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/events"),
|
||||
concat!("/networks/", $p1, "/", $p2, "/events"),
|
||||
concat!("/orgs/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/received_events"),
|
||||
concat!("/users/", $p1, "/received_events/public"),
|
||||
concat!("/users/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/events/public"),
|
||||
concat!("/users/", $p1, "/events/orgs/", $p2),
|
||||
concat!("/feeds"),
|
||||
concat!("/notifications"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/notifications"),
|
||||
concat!("/notifications/threads/", $p1),
|
||||
concat!("/notifications/threads/", $p1, "/subscription"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
|
||||
concat!("/users/", $p1, "/starred"),
|
||||
concat!("/user/starred"),
|
||||
concat!("/user/starred/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
|
||||
concat!("/users/", $p1, "/subscriptions"),
|
||||
concat!("/user/subscriptions"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscription"),
|
||||
concat!("/user/subscriptions/", $p1, "/", $p2),
|
||||
concat!("/users/", $p1, "/gists"),
|
||||
concat!("/gists"),
|
||||
concat!("/gists/", $p1),
|
||||
concat!("/gists/", $p1, "/star"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
|
||||
concat!("/issues"),
|
||||
concat!("/user/issues"),
|
||||
concat!("/orgs/", $p1, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
|
||||
concat!("/emojis"),
|
||||
concat!("/gitignore/templates"),
|
||||
concat!("/gitignore/templates/", $p1),
|
||||
concat!("/meta"),
|
||||
concat!("/rate_limit"),
|
||||
concat!("/users/", $p1, "/orgs"),
|
||||
concat!("/user/orgs"),
|
||||
concat!("/orgs/", $p1),
|
||||
concat!("/orgs/", $p1, "/members"),
|
||||
concat!("/orgs/", $p1, "/members", $p2),
|
||||
concat!("/orgs/", $p1, "/public_members"),
|
||||
concat!("/orgs/", $p1, "/public_members/", $p2),
|
||||
concat!("/orgs/", $p1, "/teams"),
|
||||
concat!("/teams/", $p1),
|
||||
concat!("/teams/", $p1, "/members"),
|
||||
concat!("/teams/", $p1, "/members", $p2),
|
||||
concat!("/teams/", $p1, "/repos"),
|
||||
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
|
||||
concat!("/user/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
|
||||
concat!("/user/repos"),
|
||||
concat!("/users/", $p1, "/repos"),
|
||||
concat!("/orgs/", $p1, "/repos"),
|
||||
concat!("/repositories"),
|
||||
concat!("/repos/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/languages"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/tags"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/readme"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/forks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
|
||||
concat!("/search/repositories"),
|
||||
concat!("/search/code"),
|
||||
concat!("/search/issues"),
|
||||
concat!("/search/users"),
|
||||
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
|
||||
concat!("/legacy/repos/search/", $p1),
|
||||
concat!("/legacy/user/search/", $p1),
|
||||
concat!("/legacy/user/email/", $p1),
|
||||
concat!("/users/", $p1),
|
||||
concat!("/user"),
|
||||
concat!("/users"),
|
||||
concat!("/user/emails"),
|
||||
concat!("/users/", $p1, "/followers"),
|
||||
concat!("/user/followers"),
|
||||
concat!("/users/", $p1, "/following"),
|
||||
concat!("/user/following"),
|
||||
concat!("/user/following/", $p1),
|
||||
concat!("/users/", $p1, "/following", $p2),
|
||||
concat!("/users/", $p1, "/keys"),
|
||||
concat!("/user/keys"),
|
||||
concat!("/user/keys/", $p1),
|
||||
];
|
||||
std::array::IntoIter::new(arr)
|
||||
}};
|
||||
}
|
||||
|
||||
fn call() -> impl Iterator<Item = &'static str> {
|
||||
let arr = [
|
||||
"/authorizations",
|
||||
"/user/repos",
|
||||
"/repos/rust-lang/rust/stargazers",
|
||||
"/orgs/rust-lang/public_members/nikomatsakis",
|
||||
"/repos/rust-lang/rust/releases/1.51.0",
|
||||
];
|
||||
|
||||
std::array::IntoIter::new(arr)
|
||||
}
|
||||
|
||||
fn compare_routers(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Compare Routers");
|
||||
|
||||
let mut actix = actix_router::Router::<bool>::build();
|
||||
for route in register!(brackets) {
|
||||
actix.path(route, true);
|
||||
}
|
||||
let actix = actix.finish();
|
||||
group.bench_function("actix", |b| {
|
||||
b.iter(|| {
|
||||
for route in call() {
|
||||
let mut path = actix_router::Path::new(route);
|
||||
black_box(actix.recognize(&mut path).unwrap());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
let regex_set = regex::RegexSet::new(register!(regex)).unwrap();
|
||||
group.bench_function("regex", |b| {
|
||||
b.iter(|| {
|
||||
for route in call() {
|
||||
black_box(regex_set.matches(route));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(benches, compare_routers);
|
||||
criterion_main!(benches);
|
169
actix-router/examples/flamegraph.rs
Normal file
169
actix-router/examples/flamegraph.rs
Normal file
@ -0,0 +1,169 @@
|
||||
macro_rules! register {
|
||||
(brackets) => {{
|
||||
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
|
||||
}};
|
||||
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
|
||||
let arr = [
|
||||
concat!("/authorizations"),
|
||||
concat!("/authorizations/", $p1),
|
||||
concat!("/applications/", $p1, "/tokens/", $p2),
|
||||
concat!("/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/events"),
|
||||
concat!("/networks/", $p1, "/", $p2, "/events"),
|
||||
concat!("/orgs/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/received_events"),
|
||||
concat!("/users/", $p1, "/received_events/public"),
|
||||
concat!("/users/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/events/public"),
|
||||
concat!("/users/", $p1, "/events/orgs/", $p2),
|
||||
concat!("/feeds"),
|
||||
concat!("/notifications"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/notifications"),
|
||||
concat!("/notifications/threads/", $p1),
|
||||
concat!("/notifications/threads/", $p1, "/subscription"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
|
||||
concat!("/users/", $p1, "/starred"),
|
||||
concat!("/user/starred"),
|
||||
concat!("/user/starred/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
|
||||
concat!("/users/", $p1, "/subscriptions"),
|
||||
concat!("/user/subscriptions"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscription"),
|
||||
concat!("/user/subscriptions/", $p1, "/", $p2),
|
||||
concat!("/users/", $p1, "/gists"),
|
||||
concat!("/gists"),
|
||||
concat!("/gists/", $p1),
|
||||
concat!("/gists/", $p1, "/star"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
|
||||
concat!("/issues"),
|
||||
concat!("/user/issues"),
|
||||
concat!("/orgs/", $p1, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
|
||||
concat!("/emojis"),
|
||||
concat!("/gitignore/templates"),
|
||||
concat!("/gitignore/templates/", $p1),
|
||||
concat!("/meta"),
|
||||
concat!("/rate_limit"),
|
||||
concat!("/users/", $p1, "/orgs"),
|
||||
concat!("/user/orgs"),
|
||||
concat!("/orgs/", $p1),
|
||||
concat!("/orgs/", $p1, "/members"),
|
||||
concat!("/orgs/", $p1, "/members", $p2),
|
||||
concat!("/orgs/", $p1, "/public_members"),
|
||||
concat!("/orgs/", $p1, "/public_members/", $p2),
|
||||
concat!("/orgs/", $p1, "/teams"),
|
||||
concat!("/teams/", $p1),
|
||||
concat!("/teams/", $p1, "/members"),
|
||||
concat!("/teams/", $p1, "/members", $p2),
|
||||
concat!("/teams/", $p1, "/repos"),
|
||||
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
|
||||
concat!("/user/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
|
||||
concat!("/user/repos"),
|
||||
concat!("/users/", $p1, "/repos"),
|
||||
concat!("/orgs/", $p1, "/repos"),
|
||||
concat!("/repositories"),
|
||||
concat!("/repos/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/languages"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/tags"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/readme"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/forks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
|
||||
concat!("/search/repositories"),
|
||||
concat!("/search/code"),
|
||||
concat!("/search/issues"),
|
||||
concat!("/search/users"),
|
||||
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
|
||||
concat!("/legacy/repos/search/", $p1),
|
||||
concat!("/legacy/user/search/", $p1),
|
||||
concat!("/legacy/user/email/", $p1),
|
||||
concat!("/users/", $p1),
|
||||
concat!("/user"),
|
||||
concat!("/users"),
|
||||
concat!("/user/emails"),
|
||||
concat!("/users/", $p1, "/followers"),
|
||||
concat!("/user/followers"),
|
||||
concat!("/users/", $p1, "/following"),
|
||||
concat!("/user/following"),
|
||||
concat!("/user/following/", $p1),
|
||||
concat!("/users/", $p1, "/following", $p2),
|
||||
concat!("/users/", $p1, "/keys"),
|
||||
concat!("/user/keys"),
|
||||
concat!("/user/keys/", $p1),
|
||||
];
|
||||
|
||||
arr.to_vec()
|
||||
}};
|
||||
}
|
||||
|
||||
static PATHS: [&str; 5] = [
|
||||
"/authorizations",
|
||||
"/user/repos",
|
||||
"/repos/rust-lang/rust/stargazers",
|
||||
"/orgs/rust-lang/public_members/nikomatsakis",
|
||||
"/repos/rust-lang/rust/releases/1.51.0",
|
||||
];
|
||||
|
||||
fn main() {
|
||||
let mut router = actix_router::Router::<bool>::build();
|
||||
|
||||
for route in register!(brackets) {
|
||||
router.path(route, true);
|
||||
}
|
||||
|
||||
let actix = router.finish();
|
||||
|
||||
if firestorm::enabled() {
|
||||
firestorm::bench("target", || {
|
||||
for &route in &PATHS {
|
||||
let mut path = actix_router::Path::new(route);
|
||||
actix.recognize(&mut path).unwrap();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
}
|
723
actix-router/src/de.rs
Normal file
723
actix-router/src/de.rs
Normal file
@ -0,0 +1,723 @@
|
||||
use serde::de::{self, Deserializer, Error as DeError, Visitor};
|
||||
use serde::forward_to_deserialize_any;
|
||||
|
||||
use crate::path::{Path, PathIter};
|
||||
use crate::ResourcePath;
|
||||
|
||||
macro_rules! unsupported_type {
|
||||
($trait_fn:ident, $name:expr) => {
|
||||
fn $trait_fn<V>(self, _: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom(concat!(
|
||||
"unsupported type: ",
|
||||
$name
|
||||
)))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! parse_single_value {
|
||||
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
|
||||
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() != 1 {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected 1",
|
||||
self.path.segment_count()
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
let v = self.path[0].parse().map_err(|_| {
|
||||
de::value::Error::custom(format!(
|
||||
"can not parse {:?} to a {}",
|
||||
&self.path[0], $tp
|
||||
))
|
||||
})?;
|
||||
visitor.$visit_fn(v)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub struct PathDeserializer<'de, T: ResourcePath> {
|
||||
path: &'de Path<T>,
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath + 'de> PathDeserializer<'de, T> {
|
||||
pub fn new(path: &'de Path<T>) -> Self {
|
||||
PathDeserializer { path }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath + 'de> Deserializer<'de> for PathDeserializer<'de, T> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_map(ParamsDeserializer {
|
||||
params: self.path.iter(),
|
||||
current: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
self.deserialize_map(visitor)
|
||||
}
|
||||
|
||||
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_unit_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
self.deserialize_unit(visitor)
|
||||
}
|
||||
|
||||
fn deserialize_newtype_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_newtype_struct(self)
|
||||
}
|
||||
|
||||
fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() < len {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected {}",
|
||||
self.path.segment_count(),
|
||||
len
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
visitor.visit_seq(ParamsSeq {
|
||||
params: self.path.iter(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_tuple_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
len: usize,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() < len {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected {}",
|
||||
self.path.segment_count(),
|
||||
len
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
visitor.visit_seq(ParamsSeq {
|
||||
params: self.path.iter(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.is_empty() {
|
||||
Err(de::value::Error::custom("expected at least one parameters"))
|
||||
} else {
|
||||
visitor.visit_enum(ValueEnum {
|
||||
value: &self.path[0],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
if self.path.segment_count() != 1 {
|
||||
Err(de::value::Error::custom(
|
||||
format!(
|
||||
"wrong number of parameters: {} expected 1",
|
||||
self.path.segment_count()
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
} else {
|
||||
visitor.visit_str(&self.path[0])
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_seq(ParamsSeq {
|
||||
params: self.path.iter(),
|
||||
})
|
||||
}
|
||||
|
||||
unsupported_type!(deserialize_any, "'any'");
|
||||
unsupported_type!(deserialize_bytes, "bytes");
|
||||
unsupported_type!(deserialize_option, "Option<T>");
|
||||
unsupported_type!(deserialize_identifier, "identifier");
|
||||
unsupported_type!(deserialize_ignored_any, "ignored_any");
|
||||
|
||||
parse_single_value!(deserialize_bool, visit_bool, "bool");
|
||||
parse_single_value!(deserialize_i8, visit_i8, "i8");
|
||||
parse_single_value!(deserialize_i16, visit_i16, "i16");
|
||||
parse_single_value!(deserialize_i32, visit_i32, "i32");
|
||||
parse_single_value!(deserialize_i64, visit_i64, "i64");
|
||||
parse_single_value!(deserialize_u8, visit_u8, "u8");
|
||||
parse_single_value!(deserialize_u16, visit_u16, "u16");
|
||||
parse_single_value!(deserialize_u32, visit_u32, "u32");
|
||||
parse_single_value!(deserialize_u64, visit_u64, "u64");
|
||||
parse_single_value!(deserialize_f32, visit_f32, "f32");
|
||||
parse_single_value!(deserialize_f64, visit_f64, "f64");
|
||||
parse_single_value!(deserialize_string, visit_string, "String");
|
||||
parse_single_value!(deserialize_byte_buf, visit_string, "String");
|
||||
parse_single_value!(deserialize_char, visit_char, "char");
|
||||
}
|
||||
|
||||
struct ParamsDeserializer<'de, T: ResourcePath> {
|
||||
params: PathIter<'de, T>,
|
||||
current: Option<(&'de str, &'de str)>,
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath> de::MapAccess<'de> for ParamsDeserializer<'de, T> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
|
||||
where
|
||||
K: de::DeserializeSeed<'de>,
|
||||
{
|
||||
self.current = self.params.next().map(|ref item| (item.0, item.1));
|
||||
match self.current {
|
||||
Some((key, _)) => Ok(Some(seed.deserialize(Key { key })?)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: de::DeserializeSeed<'de>,
|
||||
{
|
||||
if let Some((_, value)) = self.current.take() {
|
||||
seed.deserialize(Value { value })
|
||||
} else {
|
||||
Err(de::value::Error::custom("unexpected item"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Key<'de> {
|
||||
key: &'de str,
|
||||
}
|
||||
|
||||
impl<'de> Deserializer<'de> for Key<'de> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_str(self.key)
|
||||
}
|
||||
|
||||
fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("Unexpected"))
|
||||
}
|
||||
|
||||
forward_to_deserialize_any! {
|
||||
bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
|
||||
byte_buf option unit unit_struct newtype_struct seq tuple
|
||||
tuple_struct map struct enum ignored_any
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! parse_value {
|
||||
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
|
||||
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
let v = self.value.parse().map_err(|_| {
|
||||
de::value::Error::custom(format!("can not parse {:?} to a {}", self.value, $tp))
|
||||
})?;
|
||||
visitor.$visit_fn(v)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
struct Value<'de> {
|
||||
value: &'de str,
|
||||
}
|
||||
|
||||
impl<'de> Deserializer<'de> for Value<'de> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
parse_value!(deserialize_bool, visit_bool, "bool");
|
||||
parse_value!(deserialize_i8, visit_i8, "i8");
|
||||
parse_value!(deserialize_i16, visit_i16, "i16");
|
||||
parse_value!(deserialize_i32, visit_i32, "i16");
|
||||
parse_value!(deserialize_i64, visit_i64, "i64");
|
||||
parse_value!(deserialize_u8, visit_u8, "u8");
|
||||
parse_value!(deserialize_u16, visit_u16, "u16");
|
||||
parse_value!(deserialize_u32, visit_u32, "u32");
|
||||
parse_value!(deserialize_u64, visit_u64, "u64");
|
||||
parse_value!(deserialize_f32, visit_f32, "f32");
|
||||
parse_value!(deserialize_f64, visit_f64, "f64");
|
||||
parse_value!(deserialize_string, visit_string, "String");
|
||||
parse_value!(deserialize_byte_buf, visit_string, "String");
|
||||
parse_value!(deserialize_char, visit_char, "char");
|
||||
|
||||
fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_unit_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_unit()
|
||||
}
|
||||
|
||||
fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_borrowed_bytes(self.value.as_bytes())
|
||||
}
|
||||
|
||||
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_borrowed_str(self.value)
|
||||
}
|
||||
|
||||
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_some(self)
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_enum(ValueEnum { value: self.value })
|
||||
}
|
||||
|
||||
fn deserialize_newtype_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
visitor: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_newtype_struct(self)
|
||||
}
|
||||
|
||||
fn deserialize_tuple<V>(self, _: usize, _: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("unsupported type: tuple"))
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: &'static [&'static str],
|
||||
_: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("unsupported type: struct"))
|
||||
}
|
||||
|
||||
fn deserialize_tuple_struct<V>(
|
||||
self,
|
||||
_: &'static str,
|
||||
_: usize,
|
||||
_: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("unsupported type: tuple struct"))
|
||||
}
|
||||
|
||||
unsupported_type!(deserialize_any, "any");
|
||||
unsupported_type!(deserialize_seq, "seq");
|
||||
unsupported_type!(deserialize_map, "map");
|
||||
unsupported_type!(deserialize_identifier, "identifier");
|
||||
}
|
||||
|
||||
struct ParamsSeq<'de, T: ResourcePath> {
|
||||
params: PathIter<'de, T>,
|
||||
}
|
||||
|
||||
impl<'de, T: ResourcePath> de::SeqAccess<'de> for ParamsSeq<'de, T> {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn next_element_seed<U>(&mut self, seed: U) -> Result<Option<U::Value>, Self::Error>
|
||||
where
|
||||
U: de::DeserializeSeed<'de>,
|
||||
{
|
||||
match self.params.next() {
|
||||
Some(item) => Ok(Some(seed.deserialize(Value { value: item.1 })?)),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ValueEnum<'de> {
|
||||
value: &'de str,
|
||||
}
|
||||
|
||||
impl<'de> de::EnumAccess<'de> for ValueEnum<'de> {
|
||||
type Error = de::value::Error;
|
||||
type Variant = UnitVariant;
|
||||
|
||||
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
|
||||
where
|
||||
V: de::DeserializeSeed<'de>,
|
||||
{
|
||||
Ok((seed.deserialize(Key { key: self.value })?, UnitVariant))
|
||||
}
|
||||
}
|
||||
|
||||
struct UnitVariant;
|
||||
|
||||
impl<'de> de::VariantAccess<'de> for UnitVariant {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn unit_variant(self) -> Result<(), Self::Error> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Self::Error>
|
||||
where
|
||||
T: de::DeserializeSeed<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("not supported"))
|
||||
}
|
||||
|
||||
fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("not supported"))
|
||||
}
|
||||
|
||||
fn struct_variant<V>(
|
||||
self,
|
||||
_: &'static [&'static str],
|
||||
_: V,
|
||||
) -> Result<V::Value, Self::Error>
|
||||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
Err(de::value::Error::custom("not supported"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde::{de, Deserialize};
|
||||
|
||||
use super::*;
|
||||
use crate::path::Path;
|
||||
use crate::router::Router;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct MyStruct {
|
||||
key: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Id {
|
||||
_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Test1(String, u32);
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Test2 {
|
||||
key: String,
|
||||
value: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
enum TestEnum {
|
||||
Val1,
|
||||
Val2,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Test3 {
|
||||
val: TestEnum,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_extract() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{key}/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name/user1/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
|
||||
let s: MyStruct = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.key, "name");
|
||||
assert_eq!(s.value, "user1");
|
||||
|
||||
let s: (String, String) =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.0, "name");
|
||||
assert_eq!(s.1, "user1");
|
||||
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{key}/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name/32/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
|
||||
let s: Test1 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.0, "name");
|
||||
assert_eq!(s.1, 32);
|
||||
|
||||
let s: Test2 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.key, "name");
|
||||
assert_eq!(s.value, 32);
|
||||
|
||||
let s: (String, u8) =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(s.0, "name");
|
||||
assert_eq!(s.1, 32);
|
||||
|
||||
let res: Vec<String> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(res[0], "name".to_owned());
|
||||
assert_eq!(res[1], "32".to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_path_single() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/32/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: i8 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i, 32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_enum() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{val}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/val1/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: TestEnum = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i, TestEnum::Val1);
|
||||
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{val1}/{val2}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/val1/val2/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: (TestEnum, TestEnum) =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i, (TestEnum::Val1, TestEnum::Val2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_enum_value() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{val}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/val1/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: Test3 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
|
||||
assert_eq!(i.val, TestEnum::Val1);
|
||||
|
||||
let mut path = Path::new("/val3/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
let i: Result<Test3, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(i.is_err());
|
||||
assert!(format!("{:?}", i).contains("unknown variant"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_errors() {
|
||||
let mut router = Router::<()>::build();
|
||||
router.path("/{value}/", ());
|
||||
let router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name/");
|
||||
assert!(router.recognize(&mut path).is_some());
|
||||
|
||||
let s: Result<Test1, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("wrong number of parameters"));
|
||||
|
||||
let s: Result<Test2, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("can not parse"));
|
||||
|
||||
let s: Result<(String, String), de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("wrong number of parameters"));
|
||||
|
||||
let s: Result<u32, de::value::Error> =
|
||||
de::Deserialize::deserialize(PathDeserializer::new(&path));
|
||||
assert!(s.is_err());
|
||||
assert!(format!("{:?}", s).contains("can not parse"));
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_extract_path_decode() {
|
||||
// let mut router = Router::<()>::default();
|
||||
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
|
||||
|
||||
// macro_rules! test_single_value {
|
||||
// ($value:expr, $expected:expr) => {{
|
||||
// let req = TestRequest::with_uri($value).finish();
|
||||
// let info = router.recognize(&req, &(), 0);
|
||||
// let req = req.with_route_info(info);
|
||||
// assert_eq!(
|
||||
// *Path::<String>::from_request(&req, &PathConfig::default()).unwrap(),
|
||||
// $expected
|
||||
// );
|
||||
// }};
|
||||
// }
|
||||
|
||||
// test_single_value!("/%25/", "%");
|
||||
// test_single_value!("/%40%C2%A3%24%25%5E%26%2B%3D/", "@£$%^&+=");
|
||||
// test_single_value!("/%2B/", "+");
|
||||
// test_single_value!("/%252B/", "%2B");
|
||||
// test_single_value!("/%2F/", "/");
|
||||
// test_single_value!("/%252F/", "%2F");
|
||||
// test_single_value!(
|
||||
// "/http%3A%2F%2Flocalhost%3A80%2Ffoo/",
|
||||
// "http://localhost:80/foo"
|
||||
// );
|
||||
// test_single_value!("/%2Fvar%2Flog%2Fsyslog/", "/var/log/syslog");
|
||||
// test_single_value!(
|
||||
// "/http%3A%2F%2Flocalhost%3A80%2Ffile%2F%252Fvar%252Flog%252Fsyslog/",
|
||||
// "http://localhost:80/file/%2Fvar%2Flog%2Fsyslog"
|
||||
// );
|
||||
|
||||
// let req = TestRequest::with_uri("/%25/7/?id=test").finish();
|
||||
|
||||
// let mut router = Router::<()>::default();
|
||||
// router.register_resource(Resource::new(ResourceDef::new("/{key}/{value}/")));
|
||||
// let info = router.recognize(&req, &(), 0);
|
||||
// let req = req.with_route_info(info);
|
||||
|
||||
// let s = Path::<Test2>::from_request(&req, &PathConfig::default()).unwrap();
|
||||
// assert_eq!(s.key, "%");
|
||||
// assert_eq!(s.value, 7);
|
||||
|
||||
// let s = Path::<(String, String)>::from_request(&req, &PathConfig::default()).unwrap();
|
||||
// assert_eq!(s.0, "%");
|
||||
// assert_eq!(s.1, "7");
|
||||
// }
|
||||
|
||||
// #[test]
|
||||
// fn test_extract_path_no_decode() {
|
||||
// let mut router = Router::<()>::default();
|
||||
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
|
||||
|
||||
// let req = TestRequest::with_uri("/%25/").finish();
|
||||
// let info = router.recognize(&req, &(), 0);
|
||||
// let req = req.with_route_info(info);
|
||||
// assert_eq!(
|
||||
// *Path::<String>::from_request(&req, &&PathConfig::default().disable_decoding())
|
||||
// .unwrap(),
|
||||
// "%25"
|
||||
// );
|
||||
// }
|
||||
}
|
149
actix-router/src/lib.rs
Normal file
149
actix-router/src/lib.rs
Normal file
@ -0,0 +1,149 @@
|
||||
//! Resource path matching and router.
|
||||
|
||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||
|
||||
mod de;
|
||||
mod path;
|
||||
mod resource;
|
||||
mod router;
|
||||
|
||||
pub use self::de::PathDeserializer;
|
||||
pub use self::path::Path;
|
||||
pub use self::resource::ResourceDef;
|
||||
pub use self::router::{ResourceInfo, Router, RouterBuilder};
|
||||
|
||||
// TODO: this trait is necessary, document it
|
||||
// see impl Resource for ServiceRequest
|
||||
pub trait Resource<T: ResourcePath> {
|
||||
fn resource_path(&mut self) -> &mut Path<T>;
|
||||
}
|
||||
|
||||
pub trait ResourcePath {
|
||||
fn path(&self) -> &str;
|
||||
}
|
||||
|
||||
impl ResourcePath for String {
|
||||
fn path(&self) -> &str {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ResourcePath for &'a str {
|
||||
fn path(&self) -> &str {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResourcePath for bytestring::ByteString {
|
||||
fn path(&self) -> &str {
|
||||
&*self
|
||||
}
|
||||
}
|
||||
|
||||
/// One or many patterns.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Patterns {
|
||||
Single(String),
|
||||
List(Vec<String>),
|
||||
}
|
||||
|
||||
impl Patterns {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Patterns::Single(_) => false,
|
||||
Patterns::List(pats) => pats.is_empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper trait for type that could be converted to one or more path pattern.
|
||||
pub trait IntoPatterns {
|
||||
fn patterns(&self) -> Patterns;
|
||||
}
|
||||
|
||||
impl IntoPatterns for String {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoPatterns for &'a String {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single((*self).clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoPatterns for &'a str {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single((*self).to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPatterns for bytestring::ByteString {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single(self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPatterns for Patterns {
|
||||
fn patterns(&self) -> Patterns {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AsRef<str>> IntoPatterns for Vec<T> {
|
||||
fn patterns(&self) -> Patterns {
|
||||
let mut patterns = self.iter().map(|v| v.as_ref().to_owned());
|
||||
|
||||
match patterns.size_hint() {
|
||||
(1, _) => Patterns::Single(patterns.next().unwrap()),
|
||||
_ => Patterns::List(patterns.collect()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! array_patterns_single (($tp:ty) => {
|
||||
impl IntoPatterns for [$tp; 1] {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::Single(self[0].to_owned())
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
macro_rules! array_patterns_multiple (($tp:ty, $str_fn:expr, $($num:tt) +) => {
|
||||
// for each array length specified in $num
|
||||
$(
|
||||
impl IntoPatterns for [$tp; $num] {
|
||||
fn patterns(&self) -> Patterns {
|
||||
Patterns::List(self.iter().map($str_fn).collect())
|
||||
}
|
||||
}
|
||||
)+
|
||||
});
|
||||
|
||||
array_patterns_single!(&str);
|
||||
array_patterns_multiple!(&str, |&v| v.to_owned(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
|
||||
|
||||
array_patterns_single!(String);
|
||||
array_patterns_multiple!(String, |v| v.clone(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
|
||||
|
||||
#[cfg(feature = "http")]
|
||||
mod url;
|
||||
|
||||
#[cfg(feature = "http")]
|
||||
pub use self::url::{Quoter, Url};
|
||||
|
||||
#[cfg(feature = "http")]
|
||||
mod http_impls {
|
||||
use http::Uri;
|
||||
|
||||
use super::ResourcePath;
|
||||
|
||||
impl ResourcePath for Uri {
|
||||
fn path(&self) -> &str {
|
||||
self.path()
|
||||
}
|
||||
}
|
||||
}
|
220
actix-router/src/path.rs
Normal file
220
actix-router/src/path.rs
Normal file
@ -0,0 +1,220 @@
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Index;
|
||||
|
||||
use firestorm::profile_method;
|
||||
use serde::de;
|
||||
|
||||
use crate::{de::PathDeserializer, Resource, ResourcePath};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) enum PathItem {
|
||||
Static(Cow<'static, str>),
|
||||
Segment(u16, u16),
|
||||
}
|
||||
|
||||
impl Default for PathItem {
|
||||
fn default() -> Self {
|
||||
Self::Static(Cow::Borrowed(""))
|
||||
}
|
||||
}
|
||||
|
||||
/// Resource path match information.
|
||||
///
|
||||
/// If resource path contains variable patterns, `Path` stores them.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Path<T> {
|
||||
path: T,
|
||||
pub(crate) skip: u16,
|
||||
pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>,
|
||||
}
|
||||
|
||||
impl<T: ResourcePath> Path<T> {
|
||||
pub fn new(path: T) -> Path<T> {
|
||||
Path {
|
||||
path,
|
||||
skip: 0,
|
||||
segments: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get reference to inner path instance.
|
||||
#[inline]
|
||||
pub fn get_ref(&self) -> &T {
|
||||
&self.path
|
||||
}
|
||||
|
||||
/// Get mutable reference to inner path instance.
|
||||
#[inline]
|
||||
pub fn get_mut(&mut self) -> &mut T {
|
||||
&mut self.path
|
||||
}
|
||||
|
||||
/// Path.
|
||||
#[inline]
|
||||
pub fn path(&self) -> &str {
|
||||
profile_method!(path);
|
||||
|
||||
let skip = self.skip as usize;
|
||||
let path = self.path.path();
|
||||
if skip <= path.len() {
|
||||
&path[skip..]
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
/// Set new path.
|
||||
#[inline]
|
||||
pub fn set(&mut self, path: T) {
|
||||
self.skip = 0;
|
||||
self.path = path;
|
||||
self.segments.clear();
|
||||
}
|
||||
|
||||
/// Reset state.
|
||||
#[inline]
|
||||
pub fn reset(&mut self) {
|
||||
self.skip = 0;
|
||||
self.segments.clear();
|
||||
}
|
||||
|
||||
/// Skip first `n` chars in path.
|
||||
#[inline]
|
||||
pub fn skip(&mut self, n: u16) {
|
||||
self.skip += n;
|
||||
}
|
||||
|
||||
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
|
||||
profile_method!(add);
|
||||
|
||||
match value {
|
||||
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
|
||||
PathItem::Segment(begin, end) => self.segments.push((
|
||||
name.into(),
|
||||
PathItem::Segment(self.skip + begin, self.skip + end),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn add_static(
|
||||
&mut self,
|
||||
name: impl Into<Cow<'static, str>>,
|
||||
value: impl Into<Cow<'static, str>>,
|
||||
) {
|
||||
self.segments
|
||||
.push((name.into(), PathItem::Static(value.into())));
|
||||
}
|
||||
|
||||
/// Check if there are any matched patterns.
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.segments.is_empty()
|
||||
}
|
||||
|
||||
/// Returns number of interpolated segments.
|
||||
#[inline]
|
||||
pub fn segment_count(&self) -> usize {
|
||||
self.segments.len()
|
||||
}
|
||||
|
||||
/// Get matched parameter by name without type conversion
|
||||
pub fn get(&self, name: &str) -> Option<&str> {
|
||||
profile_method!(get);
|
||||
|
||||
for (seg_name, val) in self.segments.iter() {
|
||||
if name == seg_name {
|
||||
return match val {
|
||||
PathItem::Static(ref s) => Some(s),
|
||||
PathItem::Segment(s, e) => {
|
||||
Some(&self.path.path()[(*s as usize)..(*e as usize)])
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Get unprocessed part of the path
|
||||
pub fn unprocessed(&self) -> &str {
|
||||
&self.path.path()[(self.skip as usize)..]
|
||||
}
|
||||
|
||||
/// Get matched parameter by name.
|
||||
///
|
||||
/// If keyed parameter is not available empty string is used as default value.
|
||||
pub fn query(&self, key: &str) -> &str {
|
||||
profile_method!(query);
|
||||
|
||||
if let Some(s) = self.get(key) {
|
||||
s
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
/// Return iterator to items in parameter container.
|
||||
pub fn iter(&self) -> PathIter<'_, T> {
|
||||
PathIter {
|
||||
idx: 0,
|
||||
params: self,
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to deserialize matching parameters to a specified type `U`
|
||||
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
|
||||
profile_method!(load);
|
||||
de::Deserialize::deserialize(PathDeserializer::new(self))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PathIter<'a, T> {
|
||||
idx: usize,
|
||||
params: &'a Path<T>,
|
||||
}
|
||||
|
||||
impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> {
|
||||
type Item = (&'a str, &'a str);
|
||||
|
||||
#[inline]
|
||||
fn next(&mut self) -> Option<(&'a str, &'a str)> {
|
||||
if self.idx < self.params.segment_count() {
|
||||
let idx = self.idx;
|
||||
let res = match self.params.segments[idx].1 {
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)],
|
||||
};
|
||||
self.idx += 1;
|
||||
return Some((&self.params.segments[idx].0, res));
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ResourcePath> Index<&'a str> for Path<T> {
|
||||
type Output = str;
|
||||
|
||||
fn index(&self, name: &'a str) -> &str {
|
||||
self.get(name)
|
||||
.expect("Value for parameter is not available")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ResourcePath> Index<usize> for Path<T> {
|
||||
type Output = str;
|
||||
|
||||
fn index(&self, idx: usize) -> &str {
|
||||
match self.segments[idx].1 {
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ResourcePath> Resource<T> for Path<T> {
|
||||
fn resource_path(&mut self) -> &mut Self {
|
||||
self
|
||||
}
|
||||
}
|
1818
actix-router/src/resource.rs
Normal file
1818
actix-router/src/resource.rs
Normal file
File diff suppressed because it is too large
Load Diff
281
actix-router/src/router.rs
Normal file
281
actix-router/src/router.rs
Normal file
@ -0,0 +1,281 @@
|
||||
use firestorm::profile_method;
|
||||
|
||||
use crate::{IntoPatterns, Resource, ResourceDef, ResourcePath};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub struct ResourceId(pub u16);
|
||||
|
||||
/// Information about current resource
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ResourceInfo {
|
||||
resource: ResourceId,
|
||||
}
|
||||
|
||||
/// Resource router.
|
||||
// T is the resource itself
|
||||
// U is any other data needed for routing like method guards
|
||||
pub struct Router<T, U = ()> {
|
||||
routes: Vec<(ResourceDef, T, Option<U>)>,
|
||||
}
|
||||
|
||||
impl<T, U> Router<T, U> {
|
||||
pub fn build() -> RouterBuilder<T, U> {
|
||||
RouterBuilder {
|
||||
resources: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn recognize<R, P>(&self, resource: &mut R) -> Option<(&T, ResourceId)>
|
||||
where
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize);
|
||||
|
||||
for item in self.routes.iter() {
|
||||
if item.0.capture_match_info(resource.resource_path()) {
|
||||
return Some((&item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn recognize_mut<R, P>(&mut self, resource: &mut R) -> Option<(&mut T, ResourceId)>
|
||||
where
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize_mut);
|
||||
|
||||
for item in self.routes.iter_mut() {
|
||||
if item.0.capture_match_info(resource.resource_path()) {
|
||||
return Some((&mut item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn recognize_fn<R, P, F>(&self, resource: &mut R, check: F) -> Option<(&T, ResourceId)>
|
||||
where
|
||||
F: Fn(&R, &Option<U>) -> bool,
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize_checked);
|
||||
|
||||
for item in self.routes.iter() {
|
||||
if item.0.capture_match_info_fn(resource, &check, &item.2) {
|
||||
return Some((&item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn recognize_mut_fn<R, P, F>(
|
||||
&mut self,
|
||||
resource: &mut R,
|
||||
check: F,
|
||||
) -> Option<(&mut T, ResourceId)>
|
||||
where
|
||||
F: Fn(&R, &Option<U>) -> bool,
|
||||
R: Resource<P>,
|
||||
P: ResourcePath,
|
||||
{
|
||||
profile_method!(recognize_mut_checked);
|
||||
|
||||
for item in self.routes.iter_mut() {
|
||||
if item.0.capture_match_info_fn(resource, &check, &item.2) {
|
||||
return Some((&mut item.1, ResourceId(item.0.id())));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RouterBuilder<T, U = ()> {
|
||||
resources: Vec<(ResourceDef, T, Option<U>)>,
|
||||
}
|
||||
|
||||
impl<T, U> RouterBuilder<T, U> {
|
||||
/// Register resource for specified path.
|
||||
pub fn path<P: IntoPatterns>(
|
||||
&mut self,
|
||||
path: P,
|
||||
resource: T,
|
||||
) -> &mut (ResourceDef, T, Option<U>) {
|
||||
profile_method!(path);
|
||||
|
||||
self.resources
|
||||
.push((ResourceDef::new(path), resource, None));
|
||||
self.resources.last_mut().unwrap()
|
||||
}
|
||||
|
||||
/// Register resource for specified path prefix.
|
||||
pub fn prefix(&mut self, prefix: &str, resource: T) -> &mut (ResourceDef, T, Option<U>) {
|
||||
profile_method!(prefix);
|
||||
|
||||
self.resources
|
||||
.push((ResourceDef::prefix(prefix), resource, None));
|
||||
self.resources.last_mut().unwrap()
|
||||
}
|
||||
|
||||
/// Register resource for ResourceDef
|
||||
pub fn rdef(&mut self, rdef: ResourceDef, resource: T) -> &mut (ResourceDef, T, Option<U>) {
|
||||
profile_method!(rdef);
|
||||
|
||||
self.resources.push((rdef, resource, None));
|
||||
self.resources.last_mut().unwrap()
|
||||
}
|
||||
|
||||
/// Finish configuration and create router instance.
|
||||
pub fn finish(self) -> Router<T, U> {
|
||||
Router {
|
||||
routes: self.resources,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::path::Path;
|
||||
use crate::router::{ResourceId, Router};
|
||||
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
#[test]
|
||||
fn test_recognizer_1() {
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/name", 10).0.set_id(0);
|
||||
router.path("/name/{val}", 11).0.set_id(1);
|
||||
router.path("/name/{val}/index.html", 12).0.set_id(2);
|
||||
router.path("/file/{file}.{ext}", 13).0.set_id(3);
|
||||
router.path("/v{val}/{val2}/index.html", 14).0.set_id(4);
|
||||
router.path("/v/{tail:.*}", 15).0.set_id(5);
|
||||
router.path("/test2/{test}.html", 16).0.set_id(6);
|
||||
router.path("/{test}/index.html", 17).0.set_id(7);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/unknown");
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/name");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
assert_eq!(info, ResourceId(0));
|
||||
assert!(path.is_empty());
|
||||
|
||||
let mut path = Path::new("/name/value");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
assert_eq!(info, ResourceId(1));
|
||||
assert_eq!(path.get("val").unwrap(), "value");
|
||||
assert_eq!(&path["val"], "value");
|
||||
|
||||
let mut path = Path::new("/name/value2/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 12);
|
||||
assert_eq!(info, ResourceId(2));
|
||||
assert_eq!(path.get("val").unwrap(), "value2");
|
||||
|
||||
let mut path = Path::new("/file/file.gz");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 13);
|
||||
assert_eq!(info, ResourceId(3));
|
||||
assert_eq!(path.get("file").unwrap(), "file");
|
||||
assert_eq!(path.get("ext").unwrap(), "gz");
|
||||
|
||||
let mut path = Path::new("/vtest/ttt/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 14);
|
||||
assert_eq!(info, ResourceId(4));
|
||||
assert_eq!(path.get("val").unwrap(), "test");
|
||||
assert_eq!(path.get("val2").unwrap(), "ttt");
|
||||
|
||||
let mut path = Path::new("/v/blah-blah/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 15);
|
||||
assert_eq!(info, ResourceId(5));
|
||||
assert_eq!(path.get("tail").unwrap(), "blah-blah/index.html");
|
||||
|
||||
let mut path = Path::new("/test2/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 16);
|
||||
assert_eq!(info, ResourceId(6));
|
||||
assert_eq!(path.get("test").unwrap(), "index");
|
||||
|
||||
let mut path = Path::new("/bbb/index.html");
|
||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 17);
|
||||
assert_eq!(info, ResourceId(7));
|
||||
assert_eq!(path.get("test").unwrap(), "bbb");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recognizer_2() {
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/index.json", 10);
|
||||
router.path("/{source}.json", 11);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/index.json");
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
|
||||
let mut path = Path::new("/test.json");
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recognizer_with_prefix() {
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/name", 10).0.set_id(0);
|
||||
router.path("/name/{val}", 11).0.set_id(1);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name");
|
||||
path.skip(5);
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/test/name");
|
||||
path.skip(5);
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
|
||||
let mut path = Path::new("/test/name/value");
|
||||
path.skip(5);
|
||||
let (h, id) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
assert_eq!(id, ResourceId(1));
|
||||
assert_eq!(path.get("val").unwrap(), "value");
|
||||
assert_eq!(&path["val"], "value");
|
||||
|
||||
// same patterns
|
||||
let mut router = Router::<usize>::build();
|
||||
router.path("/name", 10);
|
||||
router.path("/name/{val}", 11);
|
||||
let mut router = router.finish();
|
||||
|
||||
let mut path = Path::new("/name");
|
||||
path.skip(6);
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/test2/name");
|
||||
path.skip(6);
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 10);
|
||||
|
||||
let mut path = Path::new("/test2/name-test");
|
||||
path.skip(6);
|
||||
assert!(router.recognize_mut(&mut path).is_none());
|
||||
|
||||
let mut path = Path::new("/test2/name/ttt");
|
||||
path.skip(6);
|
||||
let (h, _) = router.recognize_mut(&mut path).unwrap();
|
||||
assert_eq!(*h, 11);
|
||||
assert_eq!(&path["val"], "ttt");
|
||||
}
|
||||
}
|
288
actix-router/src/url.rs
Normal file
288
actix-router/src/url.rs
Normal file
@ -0,0 +1,288 @@
|
||||
use crate::ResourcePath;
|
||||
|
||||
#[allow(dead_code)]
|
||||
const GEN_DELIMS: &[u8] = b":/?#[]@";
|
||||
#[allow(dead_code)]
|
||||
const SUB_DELIMS_WITHOUT_QS: &[u8] = b"!$'()*,";
|
||||
#[allow(dead_code)]
|
||||
const SUB_DELIMS: &[u8] = b"!$'()*,+?=;";
|
||||
#[allow(dead_code)]
|
||||
const RESERVED: &[u8] = b":/?#[]@!$'()*,+?=;";
|
||||
#[allow(dead_code)]
|
||||
const UNRESERVED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||
1234567890
|
||||
-._~";
|
||||
const ALLOWED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||
1234567890
|
||||
-._~
|
||||
!$'()*,";
|
||||
const QS: &[u8] = b"+&=;b";
|
||||
|
||||
#[inline]
|
||||
fn bit_at(array: &[u8], ch: u8) -> bool {
|
||||
array[(ch >> 3) as usize] & (1 << (ch & 7)) != 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn set_bit(array: &mut [u8], ch: u8) {
|
||||
array[(ch >> 3) as usize] |= 1 << (ch & 7)
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
static DEFAULT_QUOTER: Quoter = Quoter::new(b"@:", b"%/+");
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct Url {
|
||||
uri: http::Uri,
|
||||
path: Option<String>,
|
||||
}
|
||||
|
||||
impl Url {
|
||||
pub fn new(uri: http::Uri) -> Url {
|
||||
let path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
|
||||
|
||||
Url { uri, path }
|
||||
}
|
||||
|
||||
pub fn with_quoter(uri: http::Uri, quoter: &Quoter) -> Url {
|
||||
Url {
|
||||
path: quoter.requote(uri.path().as_bytes()),
|
||||
uri,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn uri(&self) -> &http::Uri {
|
||||
&self.uri
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &str {
|
||||
if let Some(ref s) = self.path {
|
||||
s
|
||||
} else {
|
||||
self.uri.path()
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn update(&mut self, uri: &http::Uri) {
|
||||
self.uri = uri.clone();
|
||||
self.path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn update_with_quoter(&mut self, uri: &http::Uri, quoter: &Quoter) {
|
||||
self.uri = uri.clone();
|
||||
self.path = quoter.requote(uri.path().as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
impl ResourcePath for Url {
|
||||
#[inline]
|
||||
fn path(&self) -> &str {
|
||||
self.path()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Quoter {
|
||||
safe_table: [u8; 16],
|
||||
protected_table: [u8; 16],
|
||||
}
|
||||
|
||||
impl Quoter {
|
||||
pub fn new(safe: &[u8], protected: &[u8]) -> Quoter {
|
||||
let mut q = Quoter {
|
||||
safe_table: [0; 16],
|
||||
protected_table: [0; 16],
|
||||
};
|
||||
|
||||
// prepare safe table
|
||||
for i in 0..128 {
|
||||
if ALLOWED.contains(&i) {
|
||||
set_bit(&mut q.safe_table, i);
|
||||
}
|
||||
if QS.contains(&i) {
|
||||
set_bit(&mut q.safe_table, i);
|
||||
}
|
||||
}
|
||||
|
||||
for ch in safe {
|
||||
set_bit(&mut q.safe_table, *ch)
|
||||
}
|
||||
|
||||
// prepare protected table
|
||||
for ch in protected {
|
||||
set_bit(&mut q.safe_table, *ch);
|
||||
set_bit(&mut q.protected_table, *ch);
|
||||
}
|
||||
|
||||
q
|
||||
}
|
||||
|
||||
pub fn requote(&self, val: &[u8]) -> Option<String> {
|
||||
let mut has_pct = 0;
|
||||
let mut pct = [b'%', 0, 0];
|
||||
let mut idx = 0;
|
||||
let mut cloned: Option<Vec<u8>> = None;
|
||||
|
||||
let len = val.len();
|
||||
while idx < len {
|
||||
let ch = val[idx];
|
||||
|
||||
if has_pct != 0 {
|
||||
pct[has_pct] = val[idx];
|
||||
has_pct += 1;
|
||||
if has_pct == 3 {
|
||||
has_pct = 0;
|
||||
let buf = cloned.as_mut().unwrap();
|
||||
|
||||
if let Some(ch) = restore_ch(pct[1], pct[2]) {
|
||||
if ch < 128 {
|
||||
if bit_at(&self.protected_table, ch) {
|
||||
buf.extend_from_slice(&pct);
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if bit_at(&self.safe_table, ch) {
|
||||
buf.push(ch);
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
buf.push(ch);
|
||||
} else {
|
||||
buf.extend_from_slice(&pct[..]);
|
||||
}
|
||||
}
|
||||
} else if ch == b'%' {
|
||||
has_pct = 1;
|
||||
if cloned.is_none() {
|
||||
let mut c = Vec::with_capacity(len);
|
||||
c.extend_from_slice(&val[..idx]);
|
||||
cloned = Some(c);
|
||||
}
|
||||
} else if let Some(ref mut cloned) = cloned {
|
||||
cloned.push(ch)
|
||||
}
|
||||
idx += 1;
|
||||
}
|
||||
|
||||
cloned.map(|data| String::from_utf8_lossy(&data).into_owned())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn from_hex(v: u8) -> Option<u8> {
|
||||
if (b'0'..=b'9').contains(&v) {
|
||||
Some(v - 0x30) // ord('0') == 0x30
|
||||
} else if (b'A'..=b'F').contains(&v) {
|
||||
Some(v - 0x41 + 10) // ord('A') == 0x41
|
||||
} else if (b'a'..=b'f').contains(&v) {
|
||||
Some(v - 0x61 + 10) // ord('a') == 0x61
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn restore_ch(d1: u8, d2: u8) -> Option<u8> {
|
||||
from_hex(d1).and_then(|d1| from_hex(d2).map(move |d2| d1 << 4 | d2))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use http::Uri;
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use super::*;
|
||||
use crate::{Path, ResourceDef};
|
||||
|
||||
const PROTECTED: &[u8] = b"%/+";
|
||||
|
||||
fn match_url(pattern: &'static str, url: impl AsRef<str>) -> Path<Url> {
|
||||
let re = ResourceDef::new(pattern);
|
||||
let uri = Uri::try_from(url.as_ref()).unwrap();
|
||||
let mut path = Path::new(Url::new(uri));
|
||||
assert!(re.capture_match_info(&mut path));
|
||||
path
|
||||
}
|
||||
|
||||
fn percent_encode(data: &[u8]) -> String {
|
||||
data.iter().map(|c| format!("%{:02X}", c)).collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_url() {
|
||||
let re = "/user/{id}/test";
|
||||
|
||||
let path = match_url(re, "/user/2345/test");
|
||||
assert_eq!(path.get("id").unwrap(), "2345");
|
||||
|
||||
// "%25" should never be decoded into '%' to guarantee the output is a valid
|
||||
// percent-encoded format
|
||||
let path = match_url(re, "/user/qwe%25/test");
|
||||
assert_eq!(path.get("id").unwrap(), "qwe%25");
|
||||
|
||||
let path = match_url(re, "/user/qwe%25rty/test");
|
||||
assert_eq!(path.get("id").unwrap(), "qwe%25rty");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_protected_chars() {
|
||||
let encoded = percent_encode(PROTECTED);
|
||||
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
|
||||
assert_eq!(path.get("id").unwrap(), &encoded);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_non_protecteed_ascii() {
|
||||
let nonprotected_ascii = ('\u{0}'..='\u{7F}')
|
||||
.filter(|&c| c.is_ascii() && !PROTECTED.contains(&(c as u8)))
|
||||
.collect::<String>();
|
||||
let encoded = percent_encode(nonprotected_ascii.as_bytes());
|
||||
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
|
||||
assert_eq!(path.get("id").unwrap(), &nonprotected_ascii);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_utf8_multibyte() {
|
||||
let test = ('\u{FF00}'..='\u{FFFF}').collect::<String>();
|
||||
let encoded = percent_encode(test.as_bytes());
|
||||
let path = match_url("/a/{id}/b", format!("/a/{}/b", &encoded));
|
||||
assert_eq!(path.get("id").unwrap(), &test);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_utf8() {
|
||||
let invalid_utf8 = percent_encode((0x80..=0xff).collect::<Vec<_>>().as_slice());
|
||||
let uri = Uri::try_from(format!("/{}", invalid_utf8)).unwrap();
|
||||
let path = Path::new(Url::new(uri));
|
||||
|
||||
// We should always get a valid utf8 string
|
||||
assert!(String::from_utf8(path.path().as_bytes().to_owned()).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_hex() {
|
||||
let hex = b"0123456789abcdefABCDEF";
|
||||
|
||||
for i in 0..256 {
|
||||
let c = i as u8;
|
||||
if hex.contains(&c) {
|
||||
assert!(from_hex(c).is_some())
|
||||
} else {
|
||||
assert!(from_hex(c).is_none())
|
||||
}
|
||||
}
|
||||
|
||||
let expected = [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15,
|
||||
];
|
||||
for i in 0..hex.len() {
|
||||
assert_eq!(from_hex(hex[i]).unwrap(), expected[i]);
|
||||
}
|
||||
}
|
||||
}
|
@ -3,6 +3,14 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.1.0-beta.4 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 0.1.0-beta.3 - 2021-06-20
|
||||
* No significant changes from `0.1.0-beta.2`.
|
||||
|
||||
|
||||
## 0.1.0-beta.2 - 2021-04-17
|
||||
* No significant changes from `0.1.0-beta.1`.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-test"
|
||||
version = "0.1.0-beta.2"
|
||||
version = "0.1.0-beta.4"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
@ -20,13 +20,13 @@ openssl = ["tls-openssl", "actix-http/openssl"]
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = [] }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-http-test = "3.0.0-beta.5"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false, features = ["cookies"] }
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
|
||||
actix-rt = "2.1"
|
||||
awc = { version = "3.0.0-beta.6", default-features = false, features = ["cookies"] }
|
||||
awc = { version = "3.0.0-beta.8", default-features = false, features = ["cookies"] }
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
futures-util = { version = "0.3.7", default-features = false, features = [] }
|
||||
|
@ -3,6 +3,16 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 4.0.0-beta.7 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
## 4.0.0-beta.6 - 2021-06-26
|
||||
* Update `actix` to `0.12`. [#2277]
|
||||
|
||||
[#2277]: https://github.com/actix/actix-web/pull/2277
|
||||
|
||||
|
||||
## 4.0.0-beta.5 - 2021-06-17
|
||||
* No notable changes.
|
||||
|
||||
|
@ -1,13 +1,11 @@
|
||||
[package]
|
||||
name = "actix-web-actors"
|
||||
version = "4.0.0-beta.5"
|
||||
version = "4.0.0-beta.7"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix actors support for Actix Web"
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-web-actors/"
|
||||
repository = "https://github.com/actix/actix-web"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
@ -16,10 +14,10 @@ name = "actix_web_actors"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix = { version = "0.11.0-beta.3", default-features = false }
|
||||
actix = { version = "0.12.0", default-features = false }
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-web = { version = "4.0.0-beta.7", default-features = false }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
|
||||
bytes = "1"
|
||||
bytestring = "1"
|
||||
@ -29,8 +27,8 @@ tokio = { version = "1", features = ["sync"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.2"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
|
||||
awc = { version = "3.0.0-beta.6", default-features = false }
|
||||
awc = { version = "3.0.0-beta.8", default-features = false }
|
||||
env_logger = "0.8"
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
|
@ -3,16 +3,15 @@
|
||||
> Actix actors support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7)
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-web-actors)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum supported Rust version: 1.46 or later
|
||||
- Minimum supported Rust version: 1.51 or later
|
||||
|
@ -3,6 +3,13 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.5.0-beta.4 - 2021-09-09
|
||||
* In routing macros, paths are now validated at compile time. [#2350]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
[#2350]: https://github.com/actix/actix-web/pull/2350
|
||||
|
||||
|
||||
## 0.5.0-beta.3 - 2021-06-17
|
||||
* No notable changes.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web-codegen"
|
||||
version = "0.5.0-beta.3"
|
||||
version = "0.5.0-beta.4"
|
||||
description = "Routing and runtime macros for Actix Web"
|
||||
readme = "README.md"
|
||||
homepage = "https://actix.rs"
|
||||
@ -17,12 +17,13 @@ proc-macro = true
|
||||
quote = "1"
|
||||
syn = { version = "1", features = ["full", "parsing"] }
|
||||
proc-macro2 = "1"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.2"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = "4.0.0-beta.7"
|
||||
actix-web = "4.0.0-beta.9"
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
trybuild = "1"
|
||||
|
@ -3,19 +3,18 @@
|
||||
> Routing and runtime macros for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.3)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.4)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3)
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.4)
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-web-codegen)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum supported Rust version: 1.46 or later.
|
||||
- Minimum supported Rust version: 1.51 or later.
|
||||
|
||||
## Compile Testing
|
||||
|
||||
|
@ -3,10 +3,11 @@ extern crate proc_macro;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use actix_router::ResourceDef;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||
use quote::{format_ident, quote, ToTokens, TokenStreamExt};
|
||||
use syn::{parse_macro_input, AttributeArgs, Ident, NestedMeta};
|
||||
use syn::{parse_macro_input, AttributeArgs, Ident, LitStr, NestedMeta};
|
||||
|
||||
enum ResourceType {
|
||||
Async,
|
||||
@ -101,6 +102,7 @@ impl Args {
|
||||
match arg {
|
||||
NestedMeta::Lit(syn::Lit::Str(lit)) => match path {
|
||||
None => {
|
||||
let _ = ResourceDef::new(lit.value());
|
||||
path = Some(lit);
|
||||
}
|
||||
_ => {
|
||||
@ -227,8 +229,7 @@ impl Route {
|
||||
format!(
|
||||
r#"invalid service definition, expected #[{}("<some path>")]"#,
|
||||
method
|
||||
.map(|it| it.as_str())
|
||||
.unwrap_or("route")
|
||||
.map_or("route", |it| it.as_str())
|
||||
.to_ascii_lowercase()
|
||||
),
|
||||
));
|
||||
@ -298,7 +299,7 @@ impl ToTokens for Route {
|
||||
} = self;
|
||||
let resource_name = resource_name
|
||||
.as_ref()
|
||||
.map_or_else(|| name.to_string(), |n| n.value());
|
||||
.map_or_else(|| name.to_string(), LitStr::value);
|
||||
let method_guards = {
|
||||
let mut others = methods.iter();
|
||||
// unwrapping since length is checked to be at least one
|
||||
|
@ -1,4 +1,4 @@
|
||||
#[rustversion::stable(1.46)] // MSRV
|
||||
#[rustversion::stable(1.51)] // MSRV
|
||||
#[test]
|
||||
fn compile_macros() {
|
||||
let t = trybuild::TestCases::new();
|
||||
@ -10,6 +10,7 @@ fn compile_macros() {
|
||||
t.compile_fail("tests/trybuild/route-missing-method-fail.rs");
|
||||
t.compile_fail("tests/trybuild/route-duplicate-method-fail.rs");
|
||||
t.compile_fail("tests/trybuild/route-unexpected-method-fail.rs");
|
||||
t.compile_fail("tests/trybuild/route-malformed-path-fail.rs");
|
||||
|
||||
t.pass("tests/trybuild/docstring-ok.rs");
|
||||
}
|
||||
|
@ -0,0 +1,33 @@
|
||||
use actix_web_codegen::get;
|
||||
|
||||
#[get("/{")]
|
||||
async fn zero() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{foo")]
|
||||
async fn one() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{}")]
|
||||
async fn two() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/*")]
|
||||
async fn three() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{tail:\\d+}*")]
|
||||
async fn four() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
#[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")]
|
||||
async fn five() -> &'static str {
|
||||
"malformed resource def"
|
||||
}
|
||||
|
||||
fn main() {}
|
@ -0,0 +1,42 @@
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:3:1
|
||||
|
|
||||
3 | #[get("/{")]
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: pattern "{" contains malformed dynamic segment
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:8:1
|
||||
|
|
||||
8 | #[get("/{foo")]
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: pattern "{foo" contains malformed dynamic segment
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:13:1
|
||||
|
|
||||
13 | #[get("/{}")]
|
||||
| ^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: Wrong path pattern: "/{}" regex parse error:
|
||||
((?s-m)^/(?P<>[^/]+))$
|
||||
^
|
||||
error: empty capture group name
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:23:1
|
||||
|
|
||||
23 | #[get("/{tail:\\d+}*")]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: custom regex is not supported for tail match
|
||||
|
||||
error: custom attribute panicked
|
||||
--> $DIR/route-malformed-path-fail.rs:28:1
|
||||
|
|
||||
28 | #[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= help: message: Only 16 dynamic segments are allowed, provided: 17
|
@ -3,6 +3,20 @@
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-09-09
|
||||
### Changed
|
||||
* Send headers within the redirect requests. [#2310]
|
||||
|
||||
[#2310]: https://github.com/actix/actix-web/pull/2310
|
||||
|
||||
|
||||
## 3.0.0-beta.7 - 2021-06-26
|
||||
### Changed
|
||||
* Change compression algorithm features flags. [#2250]
|
||||
|
||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||
|
||||
|
||||
## 3.0.0-beta.6 - 2021-06-17
|
||||
* No significant changes since 3.0.0-beta.5.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "awc"
|
||||
version = "3.0.0-beta.6"
|
||||
version = "3.0.0-beta.8"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"fakeshadow <24548779@qq.com>",
|
||||
@ -24,10 +24,10 @@ path = "src/lib.rs"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress", "cookies"]
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
[features]
|
||||
default = ["compress", "cookies"]
|
||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
# openssl
|
||||
openssl = ["tls-openssl", "actix-http/openssl"]
|
||||
@ -35,8 +35,12 @@ openssl = ["tls-openssl", "actix-http/openssl"]
|
||||
# rustls
|
||||
rustls = ["tls-rustls", "actix-http/rustls"]
|
||||
|
||||
# content-encoding support
|
||||
compress = ["actix-http/compress"]
|
||||
# Brotli algorithm content-encoding support
|
||||
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
||||
# Gzip and deflate algorithms content-encoding support
|
||||
compress-gzip = ["actix-http/compress-gzip", "__compress"]
|
||||
# Zstd algorithm content-encoding support
|
||||
compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
||||
|
||||
# cookie parsing and cookie jar
|
||||
cookies = ["cookie"]
|
||||
@ -44,14 +48,19 @@ cookies = ["cookie"]
|
||||
# trust-dns as dns resolver
|
||||
trust-dns = ["actix-http/trust-dns"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-service = "2.0.0"
|
||||
actix-http = "3.0.0-beta.7"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-rt = { version = "2.1", default-features = false }
|
||||
|
||||
base64 = "0.13"
|
||||
bytes = "1"
|
||||
cfg-if = "1"
|
||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
||||
derive_more = "0.99.5"
|
||||
futures-core = { version = "0.3.7", default-features = false }
|
||||
@ -68,13 +77,13 @@ tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
||||
tls-rustls = { version = "0.19.0", package = "rustls", optional = true, features = ["dangerous_configuration"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-beta.7", features = ["openssl"] }
|
||||
actix-http = { version = "3.0.0-beta.7", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] }
|
||||
actix-web = { version = "4.0.0-beta.9", features = ["openssl"] }
|
||||
actix-http = { version = "3.0.0-beta.10", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-utils = "3.0.0"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["openssl", "rustls"] }
|
||||
actix-test = { version = "0.1.0-beta.2", features = ["openssl", "rustls"] }
|
||||
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] }
|
||||
|
||||
brotli2 = "0.3.2"
|
||||
env_logger = "0.8"
|
||||
|
@ -3,17 +3,16 @@
|
||||
> Async HTTP and WebSocket client library.
|
||||
|
||||
[](https://crates.io/crates/awc)
|
||||
[](https://docs.rs/awc/3.0.0-beta.6)
|
||||
[](https://docs.rs/awc/3.0.0-beta.8)
|
||||

|
||||
[](https://deps.rs/crate/awc/3.0.0-beta.6)
|
||||
[](https://gitter.im/actix/actix-web?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://deps.rs/crate/awc/3.0.0-beta.8)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/awc)
|
||||
- [Example Project](https://github.com/actix/examples/tree/HEAD/security/awc_https)
|
||||
- [Chat on Gitter](https://gitter.im/actix/actix-web)
|
||||
- Minimum Supported Rust Version (MSRV): 1.46.0
|
||||
- Minimum Supported Rust Version (MSRV): 1.51.0
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -2,17 +2,19 @@ use std::error::Error as StdError;
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> Result<(), Box<dyn StdError>> {
|
||||
std::env::set_var("RUST_LOG", "actix_http=trace");
|
||||
std::env::set_var("RUST_LOG", "client=trace,awc=trace,actix_http=trace");
|
||||
env_logger::init();
|
||||
|
||||
let client = awc::Client::new();
|
||||
|
||||
// Create request builder, configure request and send
|
||||
let mut response = client
|
||||
let request = client
|
||||
.get("https://www.rust-lang.org/")
|
||||
.append_header(("User-Agent", "Actix-web"))
|
||||
.send()
|
||||
.await?;
|
||||
.append_header(("User-Agent", "Actix-web"));
|
||||
|
||||
println!("Request: {:?}", request);
|
||||
|
||||
let mut response = request.send().await?;
|
||||
|
||||
// server http response
|
||||
println!("Response: {:?}", response);
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! `awc` is a HTTP and WebSocket client library built on the Actix ecosystem.
|
||||
//!
|
||||
//! ## Making a GET request
|
||||
//!
|
||||
//! # Making a GET request
|
||||
//! ```no_run
|
||||
//! # #[actix_rt::main]
|
||||
//! # async fn main() -> Result<(), awc::error::SendRequestError> {
|
||||
@ -16,10 +15,8 @@
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! ## Making POST requests
|
||||
//!
|
||||
//! ### Raw body contents
|
||||
//!
|
||||
//! # Making POST requests
|
||||
//! ## Raw body contents
|
||||
//! ```no_run
|
||||
//! # #[actix_rt::main]
|
||||
//! # async fn main() -> Result<(), awc::error::SendRequestError> {
|
||||
@ -31,8 +28,7 @@
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! ### Forms
|
||||
//!
|
||||
//! ## Forms
|
||||
//! ```no_run
|
||||
//! # #[actix_rt::main]
|
||||
//! # async fn main() -> Result<(), awc::error::SendRequestError> {
|
||||
@ -46,8 +42,7 @@
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! ### JSON
|
||||
//!
|
||||
//! ## JSON
|
||||
//! ```no_run
|
||||
//! # #[actix_rt::main]
|
||||
//! # async fn main() -> Result<(), awc::error::SendRequestError> {
|
||||
@ -64,8 +59,24 @@
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! ## WebSocket support
|
||||
//! # Response Compression
|
||||
//! All [official][iana-encodings] and common content encoding codecs are supported, optionally.
|
||||
//!
|
||||
//! The `Accept-Encoding` header will automatically be populated with enabled codecs and added to
|
||||
//! outgoing requests, allowing servers to select their `Content-Encoding` accordingly.
|
||||
//!
|
||||
//! Feature flags enable these codecs according to the table below. By default, all `compress-*`
|
||||
//! features are enabled.
|
||||
//!
|
||||
//! | Feature | Codecs |
|
||||
//! | ----------------- | ------------- |
|
||||
//! | `compress-brotli` | brotli |
|
||||
//! | `compress-gzip` | gzip, deflate |
|
||||
//! | `compress-zstd` | zstd |
|
||||
//!
|
||||
//! [iana-encodings]: https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
|
||||
//!
|
||||
//! # WebSocket support
|
||||
//! ```no_run
|
||||
//! # #[actix_rt::main]
|
||||
//! # async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
@ -128,6 +139,9 @@ pub use self::sender::SendClientRequest;
|
||||
|
||||
/// An asynchronous HTTP and WebSocket client.
|
||||
///
|
||||
/// You should take care to create, at most, one `Client` per thread. Otherwise, expect higher CPU
|
||||
/// and memory usage.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use awc::Client;
|
||||
@ -136,10 +150,10 @@ pub use self::sender::SendClientRequest;
|
||||
/// async fn main() {
|
||||
/// let mut client = Client::default();
|
||||
///
|
||||
/// let res = client.get("http://www.rust-lang.org") // <- Create request builder
|
||||
/// .insert_header(("User-Agent", "Actix-web"))
|
||||
/// .send() // <- Send HTTP request
|
||||
/// .await; // <- send request and wait for response
|
||||
/// let res = client.get("http://www.rust-lang.org")
|
||||
/// .insert_header(("User-Agent", "my-app/1.2"))
|
||||
/// .send()
|
||||
/// .await;
|
||||
///
|
||||
/// println!("Response: {:?}", res);
|
||||
/// }
|
||||
|
@ -85,10 +85,12 @@ where
|
||||
let max_redirect_times = self.max_redirect_times;
|
||||
|
||||
// backup the uri and method for reuse schema and authority.
|
||||
let (uri, method) = match head {
|
||||
RequestHeadType::Owned(ref head) => (head.uri.clone(), head.method.clone()),
|
||||
let (uri, method, headers) = match head {
|
||||
RequestHeadType::Owned(ref head) => {
|
||||
(head.uri.clone(), head.method.clone(), head.headers.clone())
|
||||
}
|
||||
RequestHeadType::Rc(ref head, ..) => {
|
||||
(head.uri.clone(), head.method.clone())
|
||||
(head.uri.clone(), head.method.clone(), head.headers.clone())
|
||||
}
|
||||
};
|
||||
|
||||
@ -104,6 +106,7 @@ where
|
||||
max_redirect_times,
|
||||
uri: Some(uri),
|
||||
method: Some(method),
|
||||
headers: Some(headers),
|
||||
body: body_opt,
|
||||
addr,
|
||||
connector: Some(connector),
|
||||
@ -127,9 +130,10 @@ pin_project_lite::pin_project! {
|
||||
max_redirect_times: u8,
|
||||
uri: Option<Uri>,
|
||||
method: Option<Method>,
|
||||
headers: Option<header::HeaderMap>,
|
||||
body: Option<Bytes>,
|
||||
addr: Option<SocketAddr>,
|
||||
connector: Option<Rc<S>>
|
||||
connector: Option<Rc<S>>,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -148,6 +152,7 @@ where
|
||||
max_redirect_times,
|
||||
uri,
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
addr,
|
||||
connector,
|
||||
@ -156,79 +161,60 @@ where
|
||||
StatusCode::MOVED_PERMANENTLY
|
||||
| StatusCode::FOUND
|
||||
| StatusCode::SEE_OTHER
|
||||
| StatusCode::TEMPORARY_REDIRECT
|
||||
| StatusCode::PERMANENT_REDIRECT
|
||||
if *max_redirect_times > 0 =>
|
||||
{
|
||||
let org_uri = uri.take().unwrap();
|
||||
// rebuild uri from the location header value.
|
||||
let uri = rebuild_uri(&res, org_uri)?;
|
||||
let is_redirect = res.head().status == StatusCode::TEMPORARY_REDIRECT
|
||||
|| res.head().status == StatusCode::PERMANENT_REDIRECT;
|
||||
|
||||
// reset method
|
||||
let method = method.take().unwrap();
|
||||
let method = match method {
|
||||
Method::GET | Method::HEAD => method,
|
||||
_ => Method::GET,
|
||||
};
|
||||
let prev_uri = uri.take().unwrap();
|
||||
|
||||
// rebuild uri from the location header value.
|
||||
let next_uri = build_next_uri(&res, &prev_uri)?;
|
||||
|
||||
// take ownership of states that could be reused
|
||||
let addr = addr.take();
|
||||
let connector = connector.take();
|
||||
let mut max_redirect_times = *max_redirect_times;
|
||||
|
||||
// use a new request head.
|
||||
let mut head = RequestHead::default();
|
||||
head.uri = uri.clone();
|
||||
head.method = method.clone();
|
||||
|
||||
let head = RequestHeadType::Owned(head);
|
||||
|
||||
max_redirect_times -= 1;
|
||||
|
||||
let fut = connector
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
// remove body
|
||||
.call(ConnectRequest::Client(head, Body::None, addr));
|
||||
|
||||
self.set(RedirectServiceFuture::Client {
|
||||
fut,
|
||||
max_redirect_times,
|
||||
uri: Some(uri),
|
||||
method: Some(method),
|
||||
// body is dropped on 301,302,303
|
||||
body: None,
|
||||
addr,
|
||||
connector,
|
||||
});
|
||||
|
||||
self.poll(cx)
|
||||
}
|
||||
StatusCode::TEMPORARY_REDIRECT | StatusCode::PERMANENT_REDIRECT
|
||||
if *max_redirect_times > 0 =>
|
||||
{
|
||||
let org_uri = uri.take().unwrap();
|
||||
// rebuild uri from the location header value.
|
||||
let uri = rebuild_uri(&res, org_uri)?;
|
||||
|
||||
// try to reuse body
|
||||
let body = body.take();
|
||||
let body_new = match body {
|
||||
Some(ref bytes) => Body::Bytes(bytes.clone()),
|
||||
// TODO: should this be Body::Empty or Body::None.
|
||||
_ => Body::Empty,
|
||||
// reset method
|
||||
let method = if is_redirect {
|
||||
method.take().unwrap()
|
||||
} else {
|
||||
let method = method.take().unwrap();
|
||||
match method {
|
||||
Method::GET | Method::HEAD => method,
|
||||
_ => Method::GET,
|
||||
}
|
||||
};
|
||||
|
||||
let addr = addr.take();
|
||||
let method = method.take().unwrap();
|
||||
let connector = connector.take();
|
||||
let mut max_redirect_times = *max_redirect_times;
|
||||
let mut body = body.take();
|
||||
let body_new = if is_redirect {
|
||||
// try to reuse body
|
||||
match body {
|
||||
Some(ref bytes) => Body::Bytes(bytes.clone()),
|
||||
// TODO: should this be Body::Empty or Body::None.
|
||||
_ => Body::Empty,
|
||||
}
|
||||
} else {
|
||||
body = None;
|
||||
// remove body
|
||||
Body::None
|
||||
};
|
||||
|
||||
let mut headers = headers.take().unwrap();
|
||||
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
|
||||
// use a new request head.
|
||||
let mut head = RequestHead::default();
|
||||
head.uri = uri.clone();
|
||||
head.uri = next_uri.clone();
|
||||
head.method = method.clone();
|
||||
head.headers = headers.clone();
|
||||
|
||||
let head = RequestHeadType::Owned(head);
|
||||
|
||||
let mut max_redirect_times = *max_redirect_times;
|
||||
max_redirect_times -= 1;
|
||||
|
||||
let fut = connector
|
||||
@ -239,8 +225,9 @@ where
|
||||
self.set(RedirectServiceFuture::Client {
|
||||
fut,
|
||||
max_redirect_times,
|
||||
uri: Some(uri),
|
||||
uri: Some(next_uri),
|
||||
method: Some(method),
|
||||
headers: Some(headers),
|
||||
body,
|
||||
addr,
|
||||
connector,
|
||||
@ -256,7 +243,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestError> {
|
||||
fn build_next_uri(res: &ClientResponse, prev_uri: &Uri) -> Result<Uri, SendRequestError> {
|
||||
let uri = res
|
||||
.headers()
|
||||
.get(header::LOCATION)
|
||||
@ -266,8 +253,8 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestErr
|
||||
.map_err(|e| SendRequestError::Url(InvalidUrl::HttpError(e.into())))?;
|
||||
if uri.scheme().is_none() || uri.authority().is_none() {
|
||||
let uri = Uri::builder()
|
||||
.scheme(org_uri.scheme().cloned().unwrap())
|
||||
.authority(org_uri.authority().cloned().unwrap())
|
||||
.scheme(prev_uri.scheme().cloned().unwrap())
|
||||
.authority(prev_uri.authority().cloned().unwrap())
|
||||
.path_and_query(value.as_bytes())
|
||||
.build()?;
|
||||
Ok::<_, SendRequestError>(uri)
|
||||
@ -281,12 +268,25 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestErr
|
||||
Ok(uri)
|
||||
}
|
||||
|
||||
fn remove_sensitive_headers(headers: &mut header::HeaderMap, prev_uri: &Uri, next_uri: &Uri) {
|
||||
if next_uri.host() != prev_uri.host()
|
||||
|| next_uri.port() != prev_uri.port()
|
||||
|| next_uri.scheme() != prev_uri.scheme()
|
||||
{
|
||||
headers.remove(header::COOKIE);
|
||||
headers.remove(header::AUTHORIZATION);
|
||||
headers.remove(header::PROXY_AUTHORIZATION);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use actix_web::{web, App, Error, HttpResponse};
|
||||
use actix_web::{web, App, Error, HttpRequest, HttpResponse};
|
||||
|
||||
use super::*;
|
||||
use crate::http::HeaderValue;
|
||||
use crate::ClientBuilder;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_basic_redirect() {
|
||||
@ -347,4 +347,239 @@ mod tests {
|
||||
|
||||
assert_eq!(res.status().as_u16(), 302);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_status_kind_307_308() {
|
||||
let srv = actix_test::start(|| {
|
||||
async fn root() -> HttpResponse {
|
||||
HttpResponse::TemporaryRedirect()
|
||||
.append_header(("location", "/test"))
|
||||
.finish()
|
||||
}
|
||||
|
||||
async fn test(req: HttpRequest, body: Bytes) -> HttpResponse {
|
||||
if req.method() == Method::POST && !body.is_empty() {
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test").route(web::to(test)))
|
||||
});
|
||||
|
||||
let res = srv.post("/").send_body("Hello").await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_status_kind_301_302_303() {
|
||||
let srv = actix_test::start(|| {
|
||||
async fn root() -> HttpResponse {
|
||||
HttpResponse::Found()
|
||||
.append_header(("location", "/test"))
|
||||
.finish()
|
||||
}
|
||||
|
||||
async fn test(req: HttpRequest, body: Bytes) -> HttpResponse {
|
||||
if (req.method() == Method::GET || req.method() == Method::HEAD)
|
||||
&& body.is_empty()
|
||||
{
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test").route(web::to(test)))
|
||||
});
|
||||
|
||||
let res = srv.post("/").send_body("Hello").await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
|
||||
let res = srv.post("/").send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_headers() {
|
||||
let srv = actix_test::start(|| {
|
||||
async fn root(req: HttpRequest) -> HttpResponse {
|
||||
if req
|
||||
.headers()
|
||||
.get("custom")
|
||||
.unwrap_or(&HeaderValue::from_str("").unwrap())
|
||||
== "value"
|
||||
{
|
||||
HttpResponse::Found()
|
||||
.append_header(("location", "/test"))
|
||||
.finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
async fn test(req: HttpRequest) -> HttpResponse {
|
||||
if req
|
||||
.headers()
|
||||
.get("custom")
|
||||
.unwrap_or(&HeaderValue::from_str("").unwrap())
|
||||
== "value"
|
||||
{
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test").route(web::to(test)))
|
||||
});
|
||||
|
||||
let client = ClientBuilder::new()
|
||||
.header("custom", "value")
|
||||
.disable_redirects()
|
||||
.finish();
|
||||
let res = client.get(srv.url("/")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 302);
|
||||
|
||||
let client = ClientBuilder::new().header("custom", "value").finish();
|
||||
let res = client.get(srv.url("/")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
|
||||
let client = ClientBuilder::new().finish();
|
||||
let res = client
|
||||
.get(srv.url("/"))
|
||||
.insert_header(("custom", "value"))
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_cross_origin_headers() {
|
||||
// defining two services to have two different origins
|
||||
let srv2 = actix_test::start(|| {
|
||||
async fn root(req: HttpRequest) -> HttpResponse {
|
||||
if req.headers().get(header::AUTHORIZATION).is_none() {
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new().service(web::resource("/").route(web::to(root)))
|
||||
});
|
||||
let srv2_port: u16 = srv2.addr().port();
|
||||
|
||||
let srv1 = actix_test::start(move || {
|
||||
async fn root(req: HttpRequest) -> HttpResponse {
|
||||
let port = *req.app_data::<u16>().unwrap();
|
||||
if req.headers().get(header::AUTHORIZATION).is_some() {
|
||||
HttpResponse::Found()
|
||||
.append_header((
|
||||
"location",
|
||||
format!("http://localhost:{}/", port).as_str(),
|
||||
))
|
||||
.finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
async fn test1(req: HttpRequest) -> HttpResponse {
|
||||
if req.headers().get(header::AUTHORIZATION).is_some() {
|
||||
HttpResponse::Found()
|
||||
.append_header(("location", "/test2"))
|
||||
.finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
async fn test2(req: HttpRequest) -> HttpResponse {
|
||||
if req.headers().get(header::AUTHORIZATION).is_some() {
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.app_data(srv2_port)
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test1").route(web::to(test1)))
|
||||
.service(web::resource("/test2").route(web::to(test2)))
|
||||
});
|
||||
|
||||
// send a request to different origins, http://srv1/ then http://srv2/. So it should remove the header
|
||||
let client = ClientBuilder::new()
|
||||
.header(header::AUTHORIZATION, "auth_key_value")
|
||||
.finish();
|
||||
let res = client.get(srv1.url("/")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
|
||||
// send a request to same origin, http://srv1/test1 then http://srv1/test2. So it should NOT remove any header
|
||||
let res = client.get(srv1.url("/test1")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_remove_sensitive_headers() {
|
||||
fn gen_headers() -> header::HeaderMap {
|
||||
let mut headers = header::HeaderMap::new();
|
||||
headers.insert(header::USER_AGENT, HeaderValue::from_str("value").unwrap());
|
||||
headers.insert(
|
||||
header::AUTHORIZATION,
|
||||
HeaderValue::from_str("value").unwrap(),
|
||||
);
|
||||
headers.insert(
|
||||
header::PROXY_AUTHORIZATION,
|
||||
HeaderValue::from_str("value").unwrap(),
|
||||
);
|
||||
headers.insert(header::COOKIE, HeaderValue::from_str("value").unwrap());
|
||||
headers
|
||||
}
|
||||
|
||||
// Same origin
|
||||
let prev_uri = Uri::from_str("https://host/path1").unwrap();
|
||||
let next_uri = Uri::from_str("https://host/path2").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 4);
|
||||
|
||||
// different schema
|
||||
let prev_uri = Uri::from_str("http://host/").unwrap();
|
||||
let next_uri = Uri::from_str("https://host/").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
|
||||
// different host
|
||||
let prev_uri = Uri::from_str("https://host1/").unwrap();
|
||||
let next_uri = Uri::from_str("https://host2/").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
|
||||
// different port
|
||||
let prev_uri = Uri::from_str("https://host:12/").unwrap();
|
||||
let next_uri = Uri::from_str("https://host:23/").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
|
||||
// different everything!
|
||||
let prev_uri = Uri::from_str("http://host1:12/path1").unwrap();
|
||||
let next_uri = Uri::from_str("https://host2:23/path2").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use actix_http::{
|
||||
body::Body,
|
||||
http::{
|
||||
header::{self, IntoHeaderPair},
|
||||
uri, ConnectionType, Error as HttpError, HeaderMap, HeaderValue, Method, Uri, Version,
|
||||
ConnectionType, Error as HttpError, HeaderMap, HeaderValue, Method, Uri, Version,
|
||||
},
|
||||
RequestHead,
|
||||
};
|
||||
@ -22,11 +22,6 @@ use crate::{
|
||||
ClientConfig,
|
||||
};
|
||||
|
||||
#[cfg(feature = "compress")]
|
||||
const HTTPS_ENCODING: &str = "br, gzip, deflate";
|
||||
#[cfg(not(feature = "compress"))]
|
||||
const HTTPS_ENCODING: &str = "br";
|
||||
|
||||
/// An HTTP Client request builder
|
||||
///
|
||||
/// This type can be used to construct an instance of `ClientRequest` through a
|
||||
@ -480,22 +475,44 @@ impl ClientRequest {
|
||||
|
||||
let mut slf = self;
|
||||
|
||||
// Set Accept-Encoding HTTP header depending on enabled feature.
|
||||
// If decompress is not ask, then we are not able to find which encoding is
|
||||
// supported, so we cannot guess Accept-Encoding HTTP header.
|
||||
if slf.response_decompress {
|
||||
let https = slf
|
||||
.head
|
||||
.uri
|
||||
.scheme()
|
||||
.map(|s| s == &uri::Scheme::HTTPS)
|
||||
.unwrap_or(true);
|
||||
// Set Accept-Encoding with compression algorithm awc is built with.
|
||||
#[allow(clippy::vec_init_then_push)]
|
||||
#[cfg(feature = "__compress")]
|
||||
let accept_encoding = {
|
||||
let mut encoding = vec![];
|
||||
|
||||
if https {
|
||||
slf = slf.insert_header_if_none((header::ACCEPT_ENCODING, HTTPS_ENCODING));
|
||||
} else {
|
||||
#[cfg(feature = "compress")]
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
{
|
||||
slf = slf.insert_header_if_none((header::ACCEPT_ENCODING, "gzip, deflate"));
|
||||
encoding.push("br");
|
||||
}
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
{
|
||||
encoding.push("gzip");
|
||||
encoding.push("deflate");
|
||||
}
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
encoding.push("zstd");
|
||||
|
||||
assert!(
|
||||
!encoding.is_empty(),
|
||||
"encoding can not be empty unless __compress feature has been explicitly enabled"
|
||||
);
|
||||
|
||||
encoding.join(", ")
|
||||
};
|
||||
|
||||
// Otherwise tell the server, we do not support any compression algorithm.
|
||||
// So we clearly indicate that we do want identity encoding.
|
||||
#[cfg(not(feature = "__compress"))]
|
||||
let accept_encoding = "identity";
|
||||
|
||||
slf = slf.insert_header_if_none((header::ACCEPT_ENCODING, accept_encoding));
|
||||
}
|
||||
|
||||
Ok(slf)
|
||||
|
@ -22,7 +22,7 @@ use derive_more::From;
|
||||
use futures_core::Stream;
|
||||
use serde::Serialize;
|
||||
|
||||
#[cfg(feature = "compress")]
|
||||
#[cfg(feature = "__compress")]
|
||||
use actix_http::{encoding::Decoder, http::header::ContentEncoding, Payload, PayloadStream};
|
||||
|
||||
use crate::{
|
||||
@ -91,7 +91,7 @@ impl SendClientRequest {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "compress")]
|
||||
#[cfg(feature = "__compress")]
|
||||
impl Future for SendClientRequest {
|
||||
type Output = Result<ClientResponse<Decoder<Payload<PayloadStream>>>, SendRequestError>;
|
||||
|
||||
@ -131,7 +131,7 @@ impl Future for SendClientRequest {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "compress"))]
|
||||
#[cfg(not(feature = "__compress"))]
|
||||
impl Future for SendClientRequest {
|
||||
type Output = Result<ClientResponse, SendRequestError>;
|
||||
|
||||
|
@ -517,7 +517,7 @@ mod tests {
|
||||
"test-origin"
|
||||
);
|
||||
assert_eq!(req.max_size, 100);
|
||||
assert_eq!(req.server_mode, true);
|
||||
assert!(req.server_mode);
|
||||
assert_eq!(req.protocols, Some("v1,v2".to_string()));
|
||||
assert_eq!(
|
||||
req.head.headers.get(header::CONTENT_TYPE).unwrap(),
|
||||
|
@ -1,6 +1,5 @@
|
||||
use std::{future::Future, time::Instant};
|
||||
|
||||
use actix_http::Response;
|
||||
use actix_utils::future::{ready, Ready};
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::test::TestRequest;
|
||||
@ -24,11 +23,11 @@ struct StringResponder(String);
|
||||
|
||||
impl FutureResponder for StringResponder {
|
||||
type Error = Error;
|
||||
type Future = Ready<Result<Response, Self::Error>>;
|
||||
type Future = Ready<Result<HttpResponse, Self::Error>>;
|
||||
|
||||
fn future_respond_to(self, _: &HttpRequest) -> Self::Future {
|
||||
// this is default builder for string response in both new and old responder trait.
|
||||
ready(Ok(Response::build(StatusCode::OK)
|
||||
ready(Ok(HttpResponse::build(StatusCode::OK)
|
||||
.content_type("text/plain; charset=utf-8")
|
||||
.body(self.0)))
|
||||
}
|
||||
@ -37,7 +36,7 @@ impl FutureResponder for StringResponder {
|
||||
impl<T> FutureResponder for OptionResponder<T>
|
||||
where
|
||||
T: FutureResponder,
|
||||
T::Future: Future<Output = Result<Response, Error>>,
|
||||
T::Future: Future<Output = Result<HttpResponse, Error>>,
|
||||
{
|
||||
type Error = Error;
|
||||
type Future = Either<T::Future, Ready<Result<HttpResponse, Self::Error>>>;
|
||||
@ -52,7 +51,7 @@ where
|
||||
|
||||
impl Responder for StringResponder {
|
||||
fn respond_to(self, _: &HttpRequest) -> HttpResponse {
|
||||
Response::build(StatusCode::OK)
|
||||
HttpResponse::build(StatusCode::OK)
|
||||
.content_type("text/plain; charset=utf-8")
|
||||
.body(self.0)
|
||||
}
|
||||
@ -62,7 +61,7 @@ impl<T: Responder> Responder for OptionResponder<T> {
|
||||
fn respond_to(self, req: &HttpRequest) -> HttpResponse {
|
||||
match self.0 {
|
||||
Some(t) => t.respond_to(req),
|
||||
None => Response::from_error(error::ErrorInternalServerError("err")),
|
||||
None => HttpResponse::from_error(error::ErrorInternalServerError("err")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -51,9 +51,8 @@ where
|
||||
fut.await.unwrap();
|
||||
}
|
||||
});
|
||||
let elapsed = start.elapsed();
|
||||
// check that at least first request succeeded
|
||||
elapsed
|
||||
start.elapsed()
|
||||
})
|
||||
});
|
||||
}
|
||||
@ -93,9 +92,8 @@ fn async_web_service(c: &mut Criterion) {
|
||||
fut.await.unwrap();
|
||||
}
|
||||
});
|
||||
let elapsed = start.elapsed();
|
||||
// check that at least first request succeeded
|
||||
elapsed
|
||||
start.elapsed()
|
||||
})
|
||||
});
|
||||
}
|
||||
|
@ -1 +1 @@
|
||||
msrv = "1.46"
|
||||
msrv = "1.51"
|
||||
|
@ -4,7 +4,7 @@ digraph {
|
||||
subgraph cluster_net {
|
||||
label="actix-net"
|
||||
"actix-codec" "actix-macros" "actix-rt" "actix-server" "actix-service"
|
||||
"actix-tls" "actix-tracing" "actix-utils" "actix-router"
|
||||
"actix-tls" "actix-tracing" "actix-utils"
|
||||
}
|
||||
|
||||
subgraph cluster_other {
|
||||
@ -25,7 +25,6 @@ digraph {
|
||||
"actix-tls" -> { "tokio-util" }[color="#009900"]
|
||||
"actix-server" -> { "actix-service" "actix-rt" "actix-utils" "tokio" }
|
||||
"actix-rt" -> { "actix-macros" "tokio" }
|
||||
"actix-router" -> { "bytestring" }
|
||||
|
||||
"local-channel" -> { "local-waker" }
|
||||
|
||||
|
@ -10,6 +10,7 @@ digraph {
|
||||
"web-actors"
|
||||
"web-codegen"
|
||||
"http-test"
|
||||
"router"
|
||||
|
||||
{ rank=same; "multipart" "web-actors" "http-test" };
|
||||
{ rank=same; "files" "awc" "web" };
|
||||
@ -36,7 +37,7 @@ digraph {
|
||||
"rt" -> { "macros" }
|
||||
|
||||
{ rank=same; "utils" "codec" };
|
||||
{ rank=same; "rt" "macros" "service" "router" };
|
||||
{ rank=same; "rt" "macros" "service" };
|
||||
|
||||
// actix
|
||||
|
||||
|
@ -10,9 +10,10 @@ digraph {
|
||||
"actix-web-codegen"
|
||||
"actix-http-test"
|
||||
"actix-test"
|
||||
"actix-router"
|
||||
}
|
||||
|
||||
"actix-web" -> { "actix-web-codegen" "actix-http" }
|
||||
"actix-web" -> { "actix-web-codegen" "actix-http" "actix-router" }
|
||||
"awc" -> { "actix-http" }
|
||||
"actix-web-actors" -> { "actix" "actix-web" "actix-http" }
|
||||
"actix-multipart" -> { "actix-web" }
|
||||
|
109
src/app.rs
109
src/app.rs
@ -43,13 +43,14 @@ impl App<AppEntry, Body> {
|
||||
/// Create application builder. Application can be configured with a builder-like pattern.
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> Self {
|
||||
let fref = Rc::new(RefCell::new(None));
|
||||
let factory_ref = Rc::new(RefCell::new(None));
|
||||
|
||||
App {
|
||||
endpoint: AppEntry::new(fref.clone()),
|
||||
endpoint: AppEntry::new(factory_ref.clone()),
|
||||
data_factories: Vec::new(),
|
||||
services: Vec::new(),
|
||||
default: None,
|
||||
factory_ref: fref,
|
||||
factory_ref,
|
||||
external: Vec::new(),
|
||||
extensions: Extensions::new(),
|
||||
_phantom: PhantomData,
|
||||
@ -68,43 +69,83 @@ where
|
||||
InitError = (),
|
||||
>,
|
||||
{
|
||||
/// Set application data. Application data could be accessed
|
||||
/// by using `Data<T>` extractor where `T` is data type.
|
||||
/// Set application (root level) data.
|
||||
///
|
||||
/// **Note**: HTTP server accepts an application factory rather than
|
||||
/// an application instance. Http server constructs an application
|
||||
/// instance for each thread, thus application data must be constructed
|
||||
/// multiple times. If you want to share data between different
|
||||
/// threads, a shared object should be used, e.g. `Arc`. Internally `Data` type
|
||||
/// uses `Arc` so data could be created outside of app factory and clones could
|
||||
/// be stored via `App::app_data()` method.
|
||||
/// Application data stored with `App::app_data()` method is available through the
|
||||
/// [`HttpRequest::app_data`](crate::HttpRequest::app_data) method at runtime.
|
||||
///
|
||||
/// # [`Data<T>`]
|
||||
/// Any [`Data<T>`] type added here can utilize it's extractor implementation in handlers.
|
||||
/// Types not wrapped in `Data<T>` cannot use this extractor. See [its docs](Data<T>) for more
|
||||
/// about its usage and patterns.
|
||||
///
|
||||
/// ```
|
||||
/// use std::cell::Cell;
|
||||
/// use actix_web::{web, App, HttpResponse, Responder};
|
||||
/// use actix_web::{web, App, HttpRequest, HttpResponse, Responder};
|
||||
///
|
||||
/// struct MyData {
|
||||
/// counter: Cell<usize>,
|
||||
/// count: std::cell::Cell<usize>,
|
||||
/// }
|
||||
///
|
||||
/// async fn index(data: web::Data<MyData>) -> impl Responder {
|
||||
/// data.counter.set(data.counter.get() + 1);
|
||||
/// HttpResponse::Ok()
|
||||
/// async fn handler(req: HttpRequest, counter: web::Data<MyData>) -> impl Responder {
|
||||
/// // note this cannot use the Data<T> extractor because it was not added with it
|
||||
/// let incr = *req.app_data::<usize>().unwrap();
|
||||
/// assert_eq!(incr, 3);
|
||||
///
|
||||
/// // update counter using other value from app data
|
||||
/// counter.count.set(counter.count.get() + incr);
|
||||
///
|
||||
/// HttpResponse::Ok().body(counter.count.get().to_string())
|
||||
/// }
|
||||
///
|
||||
/// let app = App::new()
|
||||
/// .data(MyData{ counter: Cell::new(0) })
|
||||
/// .service(
|
||||
/// web::resource("/index.html").route(
|
||||
/// web::get().to(index)));
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/")
|
||||
/// .app_data(3usize)
|
||||
/// .app_data(web::Data::new(MyData { count: Default::default() }))
|
||||
/// .route(web::get().to(handler))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// # Shared Mutable State
|
||||
/// [`HttpServer::new`](crate::HttpServer::new) accepts an application factory rather than an
|
||||
/// application instance; the factory closure is called on each worker thread independently.
|
||||
/// Therefore, if you want to share a data object between different workers, a shareable object
|
||||
/// needs to be created first, outside the `HttpServer::new` closure and cloned into it.
|
||||
/// [`Data<T>`] is an example of such a sharable object.
|
||||
///
|
||||
/// ```ignore
|
||||
/// let counter = web::Data::new(AppStateWithCounter {
|
||||
/// counter: Mutex::new(0),
|
||||
/// });
|
||||
///
|
||||
/// HttpServer::new(move || {
|
||||
/// // move counter object into the closure and clone for each worker
|
||||
///
|
||||
/// App::new()
|
||||
/// .app_data(counter.clone())
|
||||
/// .route("/", web::get().to(handler))
|
||||
/// })
|
||||
/// ```
|
||||
pub fn app_data<U: 'static>(mut self, ext: U) -> Self {
|
||||
self.extensions.insert(ext);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add application (root) data after wrapping in `Data<T>`.
|
||||
///
|
||||
/// Deprecated in favor of [`app_data`](Self::app_data).
|
||||
#[deprecated(since = "4.0.0", note = "Use `.app_data(Data::new(val))` instead.")]
|
||||
pub fn data<U: 'static>(self, data: U) -> Self {
|
||||
self.app_data(Data::new(data))
|
||||
}
|
||||
|
||||
/// Set application data factory. This function is
|
||||
/// similar to `.data()` but it accepts data factory. Data object get
|
||||
/// constructed asynchronously during application initialization.
|
||||
/// Add application data factory. This function is similar to `.data()` but it accepts a
|
||||
/// "data factory". Data values are constructed asynchronously during application
|
||||
/// initialization, before the server starts accepting requests.
|
||||
#[deprecated(
|
||||
since = "4.0.0",
|
||||
note = "Construct data value before starting server and use `.app_data(Data::new(val))` instead."
|
||||
)]
|
||||
pub fn data_factory<F, Out, D, E>(mut self, data: F) -> Self
|
||||
where
|
||||
F: Fn() -> Out + 'static,
|
||||
@ -133,18 +174,6 @@ where
|
||||
self
|
||||
}
|
||||
|
||||
/// Set application level arbitrary data item.
|
||||
///
|
||||
/// Application data stored with `App::app_data()` method is available
|
||||
/// via `HttpRequest::app_data()` method at runtime.
|
||||
///
|
||||
/// This method could be used for storing `Data<T>` as well, in that case
|
||||
/// data could be accessed by using `Data<T>` extractor.
|
||||
pub fn app_data<U: 'static>(mut self, ext: U) -> Self {
|
||||
self.extensions.insert(ext);
|
||||
self
|
||||
}
|
||||
|
||||
/// Run external configuration as part of the application building
|
||||
/// process
|
||||
///
|
||||
@ -305,7 +334,7 @@ where
|
||||
U: AsRef<str>,
|
||||
{
|
||||
let mut rdef = ResourceDef::new(url.as_ref());
|
||||
*rdef.name_mut() = name.as_ref().to_string();
|
||||
rdef.set_name(name.as_ref());
|
||||
self.external.push(rdef);
|
||||
self
|
||||
}
|
||||
@ -518,6 +547,8 @@ mod tests {
|
||||
assert_eq!(resp.status(), StatusCode::CREATED);
|
||||
}
|
||||
|
||||
// allow deprecated App::data
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_data_factory() {
|
||||
let srv = init_service(
|
||||
@ -541,6 +572,8 @@ mod tests {
|
||||
assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
|
||||
// allow deprecated App::data
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_data_factory_errors() {
|
||||
let srv = try_init_service(
|
||||
|
@ -1,22 +1,22 @@
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
use std::{cell::RefCell, mem, rc::Rc};
|
||||
|
||||
use actix_http::{Extensions, Request};
|
||||
use actix_router::{Path, ResourceDef, Router, Url};
|
||||
use actix_service::boxed::{self, BoxService, BoxServiceFactory};
|
||||
use actix_service::{fn_service, Service, ServiceFactory};
|
||||
use actix_service::{
|
||||
boxed::{self, BoxService, BoxServiceFactory},
|
||||
fn_service, Service, ServiceFactory,
|
||||
};
|
||||
use futures_core::future::LocalBoxFuture;
|
||||
use futures_util::future::join_all;
|
||||
|
||||
use crate::data::FnDataFactory;
|
||||
use crate::error::Error;
|
||||
use crate::guard::Guard;
|
||||
use crate::request::{HttpRequest, HttpRequestPool};
|
||||
use crate::rmap::ResourceMap;
|
||||
use crate::service::{AppServiceFactory, ServiceRequest, ServiceResponse};
|
||||
use crate::{
|
||||
config::{AppConfig, AppService},
|
||||
HttpResponse,
|
||||
data::FnDataFactory,
|
||||
guard::Guard,
|
||||
request::{HttpRequest, HttpRequestPool},
|
||||
rmap::ResourceMap,
|
||||
service::{AppServiceFactory, ServiceRequest, ServiceResponse},
|
||||
Error, HttpResponse,
|
||||
};
|
||||
|
||||
type Guards = Vec<Box<dyn Guard>>;
|
||||
@ -75,11 +75,11 @@ where
|
||||
let mut config = AppService::new(config, default.clone());
|
||||
|
||||
// register services
|
||||
std::mem::take(&mut *self.services.borrow_mut())
|
||||
mem::take(&mut *self.services.borrow_mut())
|
||||
.into_iter()
|
||||
.for_each(|mut srv| srv.register(&mut config));
|
||||
|
||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
||||
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||
|
||||
let (config, services) = config.into_services();
|
||||
|
||||
@ -98,13 +98,13 @@ where
|
||||
});
|
||||
|
||||
// external resources
|
||||
for mut rdef in std::mem::take(&mut *self.external.borrow_mut()) {
|
||||
for mut rdef in mem::take(&mut *self.external.borrow_mut()) {
|
||||
rmap.add(&mut rdef, None);
|
||||
}
|
||||
|
||||
// complete ResourceMap tree creation
|
||||
let rmap = Rc::new(rmap);
|
||||
rmap.finish(rmap.clone());
|
||||
ResourceMap::finish(&rmap);
|
||||
|
||||
// construct all async data factory futures
|
||||
let factory_futs = join_all(self.async_data_factories.iter().map(|f| f()));
|
||||
@ -131,9 +131,9 @@ where
|
||||
let service = endpoint_fut.await?;
|
||||
|
||||
// populate app data container from (async) data factories.
|
||||
async_data_factories.iter().for_each(|factory| {
|
||||
for factory in &async_data_factories {
|
||||
factory.create(&mut app_data);
|
||||
});
|
||||
}
|
||||
|
||||
Ok(AppInitService {
|
||||
service,
|
||||
@ -144,7 +144,9 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Service that takes a [`Request`] and delegates to a service that take a [`ServiceRequest`].
|
||||
/// The [`Service`] that is passed to `actix-http`'s server builder.
|
||||
///
|
||||
/// Wraps a service receiving a [`ServiceRequest`] into one receiving a [`Request`].
|
||||
pub struct AppInitService<T, B>
|
||||
where
|
||||
T: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
@ -275,6 +277,7 @@ impl ServiceFactory<ServiceRequest> for AppRoutingFactory {
|
||||
}
|
||||
}
|
||||
|
||||
/// The Actix Web router default entry point.
|
||||
pub struct AppRouting {
|
||||
router: Router<HttpService, Guards>,
|
||||
default: HttpService,
|
||||
@ -288,7 +291,7 @@ impl Service<ServiceRequest> for AppRouting {
|
||||
actix_service::always_ready!();
|
||||
|
||||
fn call(&self, mut req: ServiceRequest) -> Self::Future {
|
||||
let res = self.router.recognize_checked(&mut req, |req, guards| {
|
||||
let res = self.router.recognize_fn(&mut req, |req, guards| {
|
||||
if let Some(ref guards) = guards {
|
||||
for f in guards {
|
||||
if !f.check(req.head()) {
|
||||
@ -349,6 +352,8 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
// allow deprecated App::data
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_drop_data() {
|
||||
let data = Arc::new(AtomicBool::new(false));
|
||||
|
@ -62,6 +62,8 @@ impl AppService {
|
||||
(self.config, self.services)
|
||||
}
|
||||
|
||||
/// Clones inner config and default service, returning new `AppService` with empty service list
|
||||
/// marked as non-root.
|
||||
pub(crate) fn clone_config(&self) -> Self {
|
||||
AppService {
|
||||
config: self.config.clone(),
|
||||
@ -71,12 +73,12 @@ impl AppService {
|
||||
}
|
||||
}
|
||||
|
||||
/// Service configuration
|
||||
/// Returns reference to configuration.
|
||||
pub fn config(&self) -> &AppConfig {
|
||||
&self.config
|
||||
}
|
||||
|
||||
/// Default resource
|
||||
/// Returns default handler factory.
|
||||
pub fn default_service(&self) -> Rc<HttpNewService> {
|
||||
self.default.clone()
|
||||
}
|
||||
@ -92,9 +94,9 @@ impl AppService {
|
||||
F: IntoServiceFactory<S, ServiceRequest>,
|
||||
S: ServiceFactory<
|
||||
ServiceRequest,
|
||||
Config = (),
|
||||
Response = ServiceResponse,
|
||||
Error = Error,
|
||||
Config = (),
|
||||
InitError = (),
|
||||
> + 'static,
|
||||
{
|
||||
@ -116,6 +118,7 @@ impl AppConfig {
|
||||
AppConfig { secure, host, addr }
|
||||
}
|
||||
|
||||
/// Needed in actix-test crate. Semver exempt.
|
||||
#[doc(hidden)]
|
||||
pub fn __priv_test_new(secure: bool, host: String, addr: SocketAddr) -> Self {
|
||||
AppConfig::new(secure, host, addr)
|
||||
@ -141,6 +144,11 @@ impl AppConfig {
|
||||
pub fn local_addr(&self) -> SocketAddr {
|
||||
self.addr
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn set_host(&mut self, host: &str) {
|
||||
self.host = host.to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AppConfig {
|
||||
@ -191,6 +199,7 @@ impl ServiceConfig {
|
||||
/// Add shared app data item.
|
||||
///
|
||||
/// Counterpart to [`App::data()`](crate::App::data).
|
||||
#[deprecated(since = "4.0.0", note = "Use `.app_data(Data::new(val))` instead.")]
|
||||
pub fn data<U: 'static>(&mut self, data: U) -> &mut Self {
|
||||
self.app_data(Data::new(data));
|
||||
self
|
||||
@ -240,7 +249,7 @@ impl ServiceConfig {
|
||||
U: AsRef<str>,
|
||||
{
|
||||
let mut rdef = ResourceDef::new(url.as_ref());
|
||||
*rdef.name_mut() = name.as_ref().to_string();
|
||||
rdef.set_name(name.as_ref());
|
||||
self.external.push(rdef);
|
||||
self
|
||||
}
|
||||
@ -256,6 +265,8 @@ mod tests {
|
||||
use crate::test::{call_service, init_service, read_body, TestRequest};
|
||||
use crate::{web, App, HttpRequest, HttpResponse};
|
||||
|
||||
// allow deprecated `ServiceConfig::data`
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_data() {
|
||||
let cfg = |cfg: &mut ServiceConfig| {
|
||||
|
11
src/data.rs
11
src/data.rs
@ -36,6 +36,11 @@ pub(crate) type FnDataFactory =
|
||||
/// If route data is not set for a handler, using `Data<T>` extractor would cause *Internal
|
||||
/// Server Error* response.
|
||||
///
|
||||
// TODO: document `dyn T` functionality through converting an Arc
|
||||
// TODO: note equivalence of req.app_data<Data<T>> and Data<T> extractor
|
||||
// TODO: note that data must be inserted using Data<T> in order to extract it
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use std::sync::Mutex;
|
||||
/// use actix_web::{web, App, HttpResponse, Responder};
|
||||
@ -154,6 +159,8 @@ mod tests {
|
||||
web, App, HttpResponse,
|
||||
};
|
||||
|
||||
// allow deprecated App::data
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_data_extractor() {
|
||||
let srv = init_service(App::new().data("TEST".to_string()).service(
|
||||
@ -221,6 +228,8 @@ mod tests {
|
||||
assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
|
||||
// allow deprecated App::data
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_route_data_extractor() {
|
||||
let srv = init_service(
|
||||
@ -250,6 +259,8 @@ mod tests {
|
||||
assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
|
||||
// allow deprecated App::data
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_override_data() {
|
||||
let srv =
|
||||
|
106
src/dev.rs
Normal file
106
src/dev.rs
Normal file
@ -0,0 +1,106 @@
|
||||
//! Lower-level types and re-exports.
|
||||
//!
|
||||
//! Most users will not have to interact with the types in this module, but it is useful for those
|
||||
//! writing extractors, middleware and libraries, or interacting with the service API directly.
|
||||
|
||||
pub use crate::config::{AppConfig, AppService};
|
||||
#[doc(hidden)]
|
||||
pub use crate::handler::Handler;
|
||||
pub use crate::info::{ConnectionInfo, PeerAddr};
|
||||
pub use crate::rmap::ResourceMap;
|
||||
pub use crate::service::{HttpServiceFactory, ServiceRequest, ServiceResponse, WebService};
|
||||
|
||||
pub use crate::types::form::UrlEncoded;
|
||||
pub use crate::types::json::JsonBody;
|
||||
pub use crate::types::readlines::Readlines;
|
||||
|
||||
pub use actix_http::body::{AnyBody, Body, BodySize, MessageBody, ResponseBody, SizedStream};
|
||||
|
||||
#[cfg(feature = "__compress")]
|
||||
pub use actix_http::encoding::Decoder as Decompress;
|
||||
pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, Response, ResponseHead};
|
||||
pub use actix_router::{Path, ResourceDef, ResourcePath, Url};
|
||||
pub use actix_server::Server;
|
||||
pub use actix_service::{
|
||||
always_ready, fn_factory, fn_service, forward_ready, Service, ServiceFactory, Transform,
|
||||
};
|
||||
|
||||
use crate::http::header::ContentEncoding;
|
||||
use actix_http::ResponseBuilder;
|
||||
|
||||
use actix_router::Patterns;
|
||||
|
||||
pub(crate) fn ensure_leading_slash(mut patterns: Patterns) -> Patterns {
|
||||
match &mut patterns {
|
||||
Patterns::Single(pat) => {
|
||||
if !pat.is_empty() && !pat.starts_with('/') {
|
||||
pat.insert(0, '/');
|
||||
};
|
||||
}
|
||||
Patterns::List(pats) => {
|
||||
for pat in pats {
|
||||
if !pat.is_empty() && !pat.starts_with('/') {
|
||||
pat.insert(0, '/');
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
patterns
|
||||
}
|
||||
struct Enc(ContentEncoding);
|
||||
|
||||
/// Helper trait that allows to set specific encoding for response.
|
||||
pub trait BodyEncoding {
|
||||
/// Get content encoding
|
||||
fn get_encoding(&self) -> Option<ContentEncoding>;
|
||||
|
||||
/// Set content encoding
|
||||
///
|
||||
/// Must be used with [`crate::middleware::Compress`] to take effect.
|
||||
fn encoding(&mut self, encoding: ContentEncoding) -> &mut Self;
|
||||
}
|
||||
|
||||
impl BodyEncoding for ResponseBuilder {
|
||||
fn get_encoding(&self) -> Option<ContentEncoding> {
|
||||
self.extensions().get::<Enc>().map(|enc| enc.0)
|
||||
}
|
||||
|
||||
fn encoding(&mut self, encoding: ContentEncoding) -> &mut Self {
|
||||
self.extensions_mut().insert(Enc(encoding));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> BodyEncoding for Response<B> {
|
||||
fn get_encoding(&self) -> Option<ContentEncoding> {
|
||||
self.extensions().get::<Enc>().map(|enc| enc.0)
|
||||
}
|
||||
|
||||
fn encoding(&mut self, encoding: ContentEncoding) -> &mut Self {
|
||||
self.extensions_mut().insert(Enc(encoding));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl BodyEncoding for crate::HttpResponseBuilder {
|
||||
fn get_encoding(&self) -> Option<ContentEncoding> {
|
||||
self.extensions().get::<Enc>().map(|enc| enc.0)
|
||||
}
|
||||
|
||||
fn encoding(&mut self, encoding: ContentEncoding) -> &mut Self {
|
||||
self.extensions_mut().insert(Enc(encoding));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<B> BodyEncoding for crate::HttpResponse<B> {
|
||||
fn get_encoding(&self) -> Option<ContentEncoding> {
|
||||
self.extensions().get::<Enc>().map(|enc| enc.0)
|
||||
}
|
||||
|
||||
fn encoding(&mut self, encoding: ContentEncoding) -> &mut Self {
|
||||
self.extensions_mut().insert(Enc(encoding));
|
||||
self
|
||||
}
|
||||
}
|
@ -1,6 +1,4 @@
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! __downcast_get_type_id {
|
||||
macro_rules! downcast_get_type_id {
|
||||
() => {
|
||||
/// A helper method to get the type ID of the type
|
||||
/// this trait is implemented on.
|
||||
@ -30,10 +28,8 @@ macro_rules! __downcast_get_type_id {
|
||||
};
|
||||
}
|
||||
|
||||
//Generate implementation for dyn $name
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! __downcast_dyn {
|
||||
// Generate implementation for dyn $name
|
||||
macro_rules! downcast_dyn {
|
||||
($name:ident) => {
|
||||
/// A struct with a private constructor, for use with
|
||||
/// `__private_get_type_id__`. Its single field is private,
|
||||
@ -80,15 +76,17 @@ macro_rules! __downcast_dyn {
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use {downcast_dyn, downcast_get_type_id};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::upper_case_acronyms)]
|
||||
|
||||
trait MB {
|
||||
__downcast_get_type_id!();
|
||||
downcast_get_type_id!();
|
||||
}
|
||||
|
||||
__downcast_dyn!(MB);
|
||||
downcast_dyn!(MB);
|
||||
|
||||
impl MB for String {}
|
||||
impl MB for () {}
|
||||
|
@ -18,6 +18,7 @@ mod response_error;
|
||||
pub use self::error::Error;
|
||||
pub use self::internal::*;
|
||||
pub use self::response_error::ResponseError;
|
||||
pub(crate) use macros::{downcast_dyn, downcast_get_type_id};
|
||||
|
||||
/// A convenience [`Result`](std::result::Result) for Actix Web operations.
|
||||
///
|
||||
|
@ -9,7 +9,7 @@ use std::{
|
||||
use actix_http::{body::AnyBody, header, Response, StatusCode};
|
||||
use bytes::BytesMut;
|
||||
|
||||
use crate::{__downcast_dyn, __downcast_get_type_id};
|
||||
use crate::error::{downcast_dyn, downcast_get_type_id};
|
||||
use crate::{helpers, HttpResponse};
|
||||
|
||||
/// Errors that can generate responses.
|
||||
@ -41,10 +41,10 @@ pub trait ResponseError: fmt::Debug + fmt::Display {
|
||||
res.set_body(AnyBody::from(buf))
|
||||
}
|
||||
|
||||
__downcast_get_type_id!();
|
||||
downcast_get_type_id!();
|
||||
}
|
||||
|
||||
__downcast_dyn!(ResponseError);
|
||||
downcast_dyn!(ResponseError);
|
||||
|
||||
impl ResponseError for Box<dyn StdError + 'static> {}
|
||||
|
||||
@ -57,6 +57,10 @@ impl ResponseError for serde::de::value::Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl ResponseError for serde_json::Error {}
|
||||
|
||||
impl ResponseError for serde_urlencoded::ser::Error {}
|
||||
|
||||
impl ResponseError for std::str::Utf8Error {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
StatusCode::BAD_REQUEST
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user