mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-03 09:36:36 +02:00
Compare commits
154 Commits
codegen-v0
...
http-test-
Author | SHA1 | Date | |
---|---|---|---|
ce6d520215 | |||
3e25742a41 | |||
20f4cfe6b5 | |||
6408291ab0 | |||
8d260e599f | |||
14bcf72ec1 | |||
6485434a33 | |||
16c7c16463 | |||
9b0fdca6e9 | |||
8759d79b03 | |||
c0d5d7bdb5 | |||
40eab1f091 | |||
75517cce82 | |||
9b51624b27 | |||
8e2ae8cd40 | |||
9a2f8450e0 | |||
23ef51609e | |||
f7d629a61a | |||
e0845d9ad9 | |||
2f79daec16 | |||
f3f41a0cc7 | |||
987067698b | |||
b62f1b4ef7 | |||
df5257c373 | |||
226ea696ce | |||
e524fc86ea | |||
7e990e423f | |||
8f9a12ed5d | |||
c6eba2da9b | |||
06c7945801 | |||
0dba6310c6 | |||
f7d7d92984 | |||
3d6ea7fe9b | |||
8dbf7da89f | |||
de92b3be2e | |||
5d0e8138ee | |||
6b7196225e | |||
265fa0d050 | |||
062127a210 | |||
3926416580 | |||
43671ae4aa | |||
264a703d94 | |||
498fb954b3 | |||
2253eae2bb | |||
8e76a1c775 | |||
dce57a79c9 | |||
6a5b370206 | |||
b1c85ba85b | |||
9aab911600 | |||
017e40f733 | |||
45592b37b6 | |||
8abcb94512 | |||
f2cacc4c9d | |||
56b9c0d08e | |||
de9e41484a | |||
2fed978597 | |||
40048a5811 | |||
e942d3e3b1 | |||
09cffc093c | |||
c58f287044 | |||
7b27493e4c | |||
478b33b8a3 | |||
592b40f914 | |||
fe5279c77a | |||
80d222aa78 | |||
a03a2a0076 | |||
745e738955 | |||
1fd90f0b10 | |||
a35804b89f | |||
5611b98c0d | |||
dce9438518 | |||
be986d96b3 | |||
8ddb24b49b | |||
87f627cd5d | |||
03456b8a33 | |||
8c2fad3164 | |||
62fbd225bc | |||
0fa4d999d9 | |||
da4c849f62 | |||
49cd303c3b | |||
955c3ac0c4 | |||
56e5c19b85 | |||
3f03af1c59 | |||
25c0673278 | |||
e7a05f9892 | |||
2f13e5f675 | |||
9f964751f6 | |||
fcca515387 | |||
075932d823 | |||
cb379c0e0c | |||
d4a5d450de | |||
542200cbc2 | |||
d0c08dbb7d | |||
d0b5fb18d2 | |||
12fb3412a5 | |||
2665357a0c | |||
693271e571 | |||
10ef9b0751 | |||
ce00c88963 | |||
75e6ffb057 | |||
ad38973767 | |||
1c1d6477ef | |||
53509a5361 | |||
a6f27baff1 | |||
218e34ee17 | |||
11bfa84926 | |||
5aa6f713c7 | |||
151a15da74 | |||
1ce58ecb30 | |||
f940653981 | |||
b291e29882 | |||
f843776f36 | |||
52f7d96358 | |||
51e573b888 | |||
38e015432b | |||
f5895d5eff | |||
a0c4bf8d1b | |||
594e3a6ef1 | |||
a808a26d8c | |||
de62e8b025 | |||
3486edabcf | |||
4c59a34513 | |||
1b706b3069 | |||
a9f445875a | |||
e0f02c1d9e | |||
092dbba5b9 | |||
ff4b2d251f | |||
98faa61afe | |||
3f2db9e75c | |||
074d18209d | |||
593fbde46a | |||
161861997c | |||
3d621677a5 | |||
0c144054cb | |||
b0fbe0dfd8 | |||
b653bf557f | |||
1d1a65282f | |||
b0a363a7ae | |||
b4d3c2394d | |||
5ca42df89a | |||
fc5ecdc30b | |||
7fe800c3ff | |||
075df88a07 | |||
391d8a744a | |||
5b6cb681b9 | |||
0957ec40b4 | |||
ccf430d74a | |||
c84c1f0f15 | |||
e9279dfbb8 | |||
a68239adaa | |||
40a4b1ccd5 | |||
7f5a8c0851 | |||
bcdde1d4ea | |||
30aa64ea32 |
@ -6,9 +6,12 @@ lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dcli
|
||||
ci-check-min = "hack --workspace check --no-default-features"
|
||||
ci-check-default = "hack --workspace check"
|
||||
ci-check-default-tests = "check --workspace --tests"
|
||||
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,io-uring check"
|
||||
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,experimental-io-uring check"
|
||||
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
|
||||
|
||||
# testing
|
||||
ci-doctest-default = "test --workspace --doc --no-fail-fast -- --nocapture"
|
||||
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
||||
|
||||
# compile docs as docs.rs would
|
||||
# RUSTDOCFLAGS="--cfg=docsrs" cargo +nightly doc --no-deps --workspace
|
||||
|
@ -1,4 +1,4 @@
|
||||
name: CI (master only)
|
||||
name: CI (post-merge)
|
||||
|
||||
on:
|
||||
push:
|
||||
@ -23,6 +23,7 @@ jobs:
|
||||
CI: 1
|
||||
CARGO_INCREMENTAL: 0
|
||||
VCPKGRS_DYNAMIC: 1
|
||||
CARGO_UNSTABLE_SPARSE_REGISTRY: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@ -44,18 +45,15 @@ jobs:
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: taiki-e/install-action@cargo-hack
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: check minimal
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-check-min }
|
||||
@ -78,23 +76,19 @@ jobs:
|
||||
cargo test --lib --tests -p=actix-multipart --all-features
|
||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
||||
|
||||
- name: tests (io-uring)
|
||||
if: matrix.target.os == 'ubuntu-latest'
|
||||
timeout-minutes: 60
|
||||
run: >
|
||||
sudo bash -c "ulimit -Sl 512
|
||||
&& ulimit -Hl 512
|
||||
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
||||
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
|
||||
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||
cargo install cargo-cache --version 0.8.2 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
|
||||
ci_feature_powerset_check:
|
||||
name: Verify Feature Combinations
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
CI: 1
|
||||
CARGO_INCREMENTAL: 0
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
@ -105,18 +99,15 @@ jobs:
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: taiki-e/install-action@cargo-hack
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: check feature combinations
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-check-all-feature-powerset }
|
||||
@ -125,29 +116,35 @@ jobs:
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-check-all-feature-powerset-linux }
|
||||
|
||||
coverage:
|
||||
name: coverage
|
||||
nextest:
|
||||
name: nextest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
CI: 1
|
||||
CARGO_INCREMENTAL: 0
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable-x86_64-unknown-linux-gnu
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Install nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
|
||||
- name: Generate coverage file
|
||||
run: |
|
||||
cargo install cargo-tarpaulin --vers "^0.13"
|
||||
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
|
||||
- name: Upload to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
with: { file: cobertura.xml }
|
||||
- name: Test with cargo-nextest
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: nextest
|
||||
args: run
|
50
.github/workflows/ci.yml
vendored
50
.github/workflows/ci.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
||||
version:
|
||||
- 1.54.0 # MSRV
|
||||
- 1.57.0 # MSRV
|
||||
- stable
|
||||
|
||||
name: ${{ matrix.target.name }} / ${{ matrix.version }}
|
||||
@ -47,18 +47,22 @@ jobs:
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: taiki-e/install-action@cargo-hack
|
||||
|
||||
- name: workaround MSRV issues
|
||||
if: matrix.version != 'stable'
|
||||
run: |
|
||||
cargo install cargo-edit --version=0.8.0
|
||||
cargo add const-str@0.3 --dev -p=actix-web
|
||||
cargo add const-str@0.3 --dev -p=awc
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: check minimal
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: ci-check-min }
|
||||
@ -81,19 +85,37 @@ jobs:
|
||||
cargo test --lib --tests -p=actix-multipart --all-features
|
||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
||||
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.8.2 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
|
||||
io-uring:
|
||||
name: io-uring tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
|
||||
- name: tests (io-uring)
|
||||
if: matrix.target.os == 'ubuntu-latest'
|
||||
timeout-minutes: 60
|
||||
run: >
|
||||
sudo bash -c "ulimit -Sl 512
|
||||
&& ulimit -Hl 512
|
||||
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
||||
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
|
||||
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
&& RUSTUP_TOOLCHAIN=stable cargo test --lib --tests -p=actix-files --all-features"
|
||||
|
||||
rustdoc:
|
||||
name: doc tests
|
||||
|
36
.github/workflows/coverage.yml
vendored
Normal file
36
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
# disabled because `cargo tarpaulin` currently segfaults
|
||||
|
||||
name: Coverage
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
|
||||
jobs:
|
||||
# job currently (1st Feb 2022) segfaults
|
||||
coverage:
|
||||
name: coverage
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.2.0
|
||||
|
||||
- name: Generate coverage file
|
||||
run: |
|
||||
cargo install cargo-tarpaulin --vers "^0.13"
|
||||
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
|
||||
- name: Upload to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
with: { file: cobertura.xml }
|
3
.prettierrc.json
Normal file
3
.prettierrc.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"proseWrap": "never"
|
||||
}
|
1033
CHANGES.md
1033
CHANGES.md
File diff suppressed because it is too large
Load Diff
149
Cargo.toml
149
Cargo.toml
@ -1,36 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web"
|
||||
version = "4.0.0-rc.1"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
]
|
||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"
|
||||
]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lib]
|
||||
name = "actix_web"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
".",
|
||||
"actix-files",
|
||||
"actix-http-test",
|
||||
"actix-http",
|
||||
@ -39,93 +9,10 @@ members = [
|
||||
"actix-test",
|
||||
"actix-web-actors",
|
||||
"actix-web-codegen",
|
||||
"actix-web",
|
||||
"awc",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
# Brotli algorithm content-encoding support
|
||||
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
||||
# Gzip and deflate algorithms content-encoding support
|
||||
compress-gzip = ["actix-http/compress-gzip", "__compress"]
|
||||
# Zstd algorithm content-encoding support
|
||||
compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
||||
|
||||
# support for cookies
|
||||
cookies = ["cookie"]
|
||||
|
||||
# secure cookies feature
|
||||
secure-cookies = ["cookie/secure"]
|
||||
|
||||
# openssl
|
||||
openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
||||
|
||||
# rustls
|
||||
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
# io-uring feature only avaiable for Linux OSes.
|
||||
experimental-io-uring = ["actix-server/io-uring"]
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.1"
|
||||
actix-macros = "0.2.3"
|
||||
actix-rt = "2.6"
|
||||
actix-server = "2"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-tls = { version = "3.0.0", default-features = false, optional = true }
|
||||
|
||||
actix-http = { version = "3.0.0-rc.1", features = ["http2", "ws"] }
|
||||
actix-router = "0.5.0-rc.3"
|
||||
actix-web-codegen = "0.5.0-rc.2"
|
||||
|
||||
ahash = "0.7"
|
||||
bytes = "1"
|
||||
cfg-if = "1"
|
||||
cookie = { version = "0.16", features = ["percent-encode"], optional = true }
|
||||
derive_more = "0.99.5"
|
||||
encoding_rs = "0.8"
|
||||
futures-core = { version = "0.3.7", default-features = false }
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
itoa = "1"
|
||||
language-tags = "0.3"
|
||||
once_cell = "1.5"
|
||||
log = "0.4"
|
||||
mime = "0.3"
|
||||
pin-project-lite = "0.2.7"
|
||||
regex = "1.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
smallvec = "1.6.1"
|
||||
socket2 = "0.4.0"
|
||||
time = { version = "0.3", default-features = false, features = ["formatting"] }
|
||||
url = "2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-files = "0.6.0-beta.16"
|
||||
actix-test = { version = "0.1.0-beta.12", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.20", features = ["openssl"] }
|
||||
|
||||
brotli = "3.3.3"
|
||||
const-str = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
env_logger = "0.9"
|
||||
flate2 = "1.0.13"
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
rand = "0.8"
|
||||
rcgen = "0.8"
|
||||
rustls-pemfile = "0.2"
|
||||
static_assertions = "1"
|
||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
||||
zstd = "0.9"
|
||||
|
||||
[profile.dev]
|
||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||
debug = 0
|
||||
@ -142,7 +29,7 @@ actix-http-test = { path = "actix-http-test" }
|
||||
actix-multipart = { path = "actix-multipart" }
|
||||
actix-router = { path = "actix-router" }
|
||||
actix-test = { path = "actix-test" }
|
||||
actix-web = { path = "." }
|
||||
actix-web = { path = "actix-web" }
|
||||
actix-web-actors = { path = "actix-web-actors" }
|
||||
actix-web-codegen = { path = "actix-web-codegen" }
|
||||
awc = { path = "awc" }
|
||||
@ -155,35 +42,3 @@ awc = { path = "awc" }
|
||||
# actix-utils = { path = "../actix-net/actix-utils" }
|
||||
# actix-tls = { path = "../actix-net/actix-tls" }
|
||||
# actix-server = { path = "../actix-net/actix-server" }
|
||||
|
||||
[[test]]
|
||||
name = "test_server"
|
||||
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
[[test]]
|
||||
name = "compression"
|
||||
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]
|
||||
|
||||
[[example]]
|
||||
name = "basic"
|
||||
required-features = ["compress-gzip"]
|
||||
|
||||
[[example]]
|
||||
name = "uds"
|
||||
required-features = ["compress-gzip"]
|
||||
|
||||
[[example]]
|
||||
name = "on-connect"
|
||||
required-features = []
|
||||
|
||||
[[bench]]
|
||||
name = "server"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "service"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "responder"
|
||||
harness = false
|
||||
|
677
MIGRATION.md
677
MIGRATION.md
@ -1,677 +0,0 @@
|
||||
## Unreleased
|
||||
|
||||
- The default `NormalizePath` behavior now strips trailing slashes by default. This was
|
||||
previously documented to be the case in v3 but the behavior now matches. The effect is that
|
||||
routes defined with trailing slashes will become inaccessible when
|
||||
using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
|
||||
It is advised that the `new` method be used instead.
|
||||
|
||||
Before: `#[get("/test/")]`
|
||||
After: `#[get("/test")]`
|
||||
|
||||
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
|
||||
|
||||
- The `type Config` of `FromRequest` was removed.
|
||||
|
||||
- Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
|
||||
By default all compression algorithms are enabled.
|
||||
To select algorithm you want to include with `middleware::Compress` use following flags:
|
||||
- `compress-brotli`
|
||||
- `compress-gzip`
|
||||
- `compress-zstd`
|
||||
If you have set in your `Cargo.toml` dedicated `actix-web` features and you still want
|
||||
to have compression enabled. Please change features selection like bellow:
|
||||
|
||||
Before: `"compress"`
|
||||
After: `"compress-brotli", "compress-gzip", "compress-zstd"`
|
||||
|
||||
|
||||
## 3.0.0
|
||||
|
||||
- The return type for `ServiceRequest::app_data::<T>()` was changed from returning a `Data<T>` to
|
||||
simply a `T`. To access a `Data<T>` use `ServiceRequest::app_data::<Data<T>>()`.
|
||||
|
||||
- Cookie handling has been offloaded to the `cookie` crate:
|
||||
* `USERINFO_ENCODE_SET` is no longer exposed. Percent-encoding is still supported; check docs.
|
||||
* Some types now require lifetime parameters.
|
||||
|
||||
- The time crate was updated to `v0.2`, a major breaking change to the time crate, which affects
|
||||
any `actix-web` method previously expecting a time v0.1 input.
|
||||
|
||||
- Setting a cookie's SameSite property, explicitly, to `SameSite::None` will now
|
||||
result in `SameSite=None` being sent with the response Set-Cookie header.
|
||||
To create a cookie without a SameSite attribute, remove any calls setting same_site.
|
||||
|
||||
- actix-http support for Actors messages was moved to actix-http crate and is enabled
|
||||
with feature `actors`
|
||||
|
||||
- content_length function is removed from actix-http.
|
||||
You can set Content-Length by normally setting the response body or calling no_chunking function.
|
||||
|
||||
- `BodySize::Sized64` variant has been removed. `BodySize::Sized` now receives a
|
||||
`u64` instead of a `usize`.
|
||||
|
||||
- Code that was using `path.<index>` to access a `web::Path<(A, B, C)>`s elements now needs to use
|
||||
destructuring or `.into_inner()`. For example:
|
||||
|
||||
```rust
|
||||
// Previously:
|
||||
async fn some_route(path: web::Path<(String, String)>) -> String {
|
||||
format!("Hello, {} {}", path.0, path.1)
|
||||
}
|
||||
|
||||
// Now (this also worked before):
|
||||
async fn some_route(path: web::Path<(String, String)>) -> String {
|
||||
let (first_name, last_name) = path.into_inner();
|
||||
format!("Hello, {} {}", first_name, last_name)
|
||||
}
|
||||
// Or (this wasn't previously supported):
|
||||
async fn some_route(web::Path((first_name, last_name)): web::Path<(String, String)>) -> String {
|
||||
format!("Hello, {} {}", first_name, last_name)
|
||||
}
|
||||
```
|
||||
|
||||
- `middleware::NormalizePath` can now also be configured to trim trailing slashes instead of always keeping one.
|
||||
It will need `middleware::normalize::TrailingSlash` when being constructed with `NormalizePath::new(...)`,
|
||||
or for an easier migration you can replace `wrap(middleware::NormalizePath)` with `wrap(middleware::NormalizePath::new(TrailingSlash::MergeOnly))`.
|
||||
|
||||
- `HttpServer::maxconn` is renamed to the more expressive `HttpServer::max_connections`.
|
||||
|
||||
- `HttpServer::maxconnrate` is renamed to the more expressive `HttpServer::max_connection_rate`.
|
||||
|
||||
|
||||
## 2.0.0
|
||||
|
||||
- `HttpServer::start()` renamed to `HttpServer::run()`. It also possible to
|
||||
`.await` on `run` method result, in that case it awaits server exit.
|
||||
|
||||
- `App::register_data()` renamed to `App::app_data()` and accepts any type `T: 'static`.
|
||||
Stored data is available via `HttpRequest::app_data()` method at runtime.
|
||||
|
||||
- Extractor configuration must be registered with `App::app_data()` instead of `App::data()`
|
||||
|
||||
- Sync handlers has been removed. `.to_async()` method has been renamed to `.to()`
|
||||
replace `fn` with `async fn` to convert sync handler to async
|
||||
|
||||
- `actix_http_test::TestServer` moved to `actix_web::test` module. To start
|
||||
test server use `test::start()` or `test_start_with_config()` methods
|
||||
|
||||
- `ResponseError` trait has been reafctored. `ResponseError::error_response()` renders
|
||||
http response.
|
||||
|
||||
- Feature `rust-tls` renamed to `rustls`
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
actix-web = { version = "2.0.0", features = ["rust-tls"] }
|
||||
```
|
||||
|
||||
use
|
||||
|
||||
```rust
|
||||
actix-web = { version = "2.0.0", features = ["rustls"] }
|
||||
```
|
||||
|
||||
- Feature `ssl` renamed to `openssl`
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
actix-web = { version = "2.0.0", features = ["ssl"] }
|
||||
```
|
||||
|
||||
use
|
||||
|
||||
```rust
|
||||
actix-web = { version = "2.0.0", features = ["openssl"] }
|
||||
```
|
||||
- `Cors` builder now requires that you call `.finish()` to construct the middleware
|
||||
|
||||
## 1.0.1
|
||||
|
||||
- Cors middleware has been moved to `actix-cors` crate
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
use actix_web::middleware::cors::Cors;
|
||||
```
|
||||
|
||||
use
|
||||
|
||||
```rust
|
||||
use actix_cors::Cors;
|
||||
```
|
||||
|
||||
- Identity middleware has been moved to `actix-identity` crate
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
use actix_web::middleware::identity::{Identity, CookieIdentityPolicy, IdentityService};
|
||||
```
|
||||
|
||||
use
|
||||
|
||||
```rust
|
||||
use actix_identity::{Identity, CookieIdentityPolicy, IdentityService};
|
||||
```
|
||||
|
||||
|
||||
## 1.0.0
|
||||
|
||||
- Extractor configuration. In version 1.0 this is handled with the new `Data` mechanism for both setting and retrieving the configuration
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
|
||||
#[derive(Default)]
|
||||
struct ExtractorConfig {
|
||||
config: String,
|
||||
}
|
||||
|
||||
impl FromRequest for YourExtractor {
|
||||
type Config = ExtractorConfig;
|
||||
type Result = Result<YourExtractor, Error>;
|
||||
|
||||
fn from_request(req: &HttpRequest, cfg: &Self::Config) -> Self::Result {
|
||||
println!("use the config: {:?}", cfg.config);
|
||||
...
|
||||
}
|
||||
}
|
||||
|
||||
App::new().resource("/route_with_config", |r| {
|
||||
r.post().with_config(handler_fn, |cfg| {
|
||||
cfg.0.config = "test".to_string();
|
||||
})
|
||||
})
|
||||
|
||||
```
|
||||
|
||||
use the HttpRequest to get the configuration like any other `Data` with `req.app_data::<C>()` and set it with the `data()` method on the `resource`
|
||||
|
||||
```rust
|
||||
#[derive(Default)]
|
||||
struct ExtractorConfig {
|
||||
config: String,
|
||||
}
|
||||
|
||||
impl FromRequest for YourExtractor {
|
||||
type Error = Error;
|
||||
type Future = Result<Self, Self::Error>;
|
||||
type Config = ExtractorConfig;
|
||||
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
let cfg = req.app_data::<ExtractorConfig>();
|
||||
println!("config data?: {:?}", cfg.unwrap().role);
|
||||
...
|
||||
}
|
||||
}
|
||||
|
||||
App::new().service(
|
||||
resource("/route_with_config")
|
||||
.data(ExtractorConfig {
|
||||
config: "test".to_string(),
|
||||
})
|
||||
.route(post().to(handler_fn)),
|
||||
)
|
||||
```
|
||||
|
||||
- Resource registration. 1.0 version uses generalized resource
|
||||
registration via `.service()` method.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
App.new().resource("/welcome", |r| r.f(welcome))
|
||||
```
|
||||
|
||||
use App's or Scope's `.service()` method. `.service()` method accepts
|
||||
object that implements `HttpServiceFactory` trait. By default
|
||||
actix-web provides `Resource` and `Scope` services.
|
||||
|
||||
```rust
|
||||
App.new().service(
|
||||
web::resource("/welcome")
|
||||
.route(web::get().to(welcome))
|
||||
.route(web::post().to(post_handler))
|
||||
```
|
||||
|
||||
- Scope registration.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
let app = App::new().scope("/{project_id}", |scope| {
|
||||
scope
|
||||
.resource("/path1", |r| r.f(|_| HttpResponse::Ok()))
|
||||
.resource("/path2", |r| r.f(|_| HttpResponse::Ok()))
|
||||
.resource("/path3", |r| r.f(|_| HttpResponse::MethodNotAllowed()))
|
||||
});
|
||||
```
|
||||
|
||||
use `.service()` for registration and `web::scope()` as scope object factory.
|
||||
|
||||
```rust
|
||||
let app = App::new().service(
|
||||
web::scope("/{project_id}")
|
||||
.service(web::resource("/path1").to(|| HttpResponse::Ok()))
|
||||
.service(web::resource("/path2").to(|| HttpResponse::Ok()))
|
||||
.service(web::resource("/path3").to(|| HttpResponse::MethodNotAllowed()))
|
||||
);
|
||||
```
|
||||
|
||||
- `.with()`, `.with_async()` registration methods have been renamed to `.to()` and `.to_async()`.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
App.new().resource("/welcome", |r| r.with(welcome))
|
||||
```
|
||||
|
||||
use `.to()` or `.to_async()` methods
|
||||
|
||||
```rust
|
||||
App.new().service(web::resource("/welcome").to(welcome))
|
||||
```
|
||||
|
||||
- Passing arguments to handler with extractors, multiple arguments are allowed
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
fn welcome((body, req): (Bytes, HttpRequest)) -> ... {
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
use multiple arguments
|
||||
|
||||
```rust
|
||||
fn welcome(body: Bytes, req: HttpRequest) -> ... {
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
- `.f()`, `.a()` and `.h()` handler registration methods have been removed.
|
||||
Use `.to()` for handlers and `.to_async()` for async handlers. Handler function
|
||||
must use extractors.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
App.new().resource("/welcome", |r| r.f(welcome))
|
||||
```
|
||||
|
||||
use App's `to()` or `to_async()` methods
|
||||
|
||||
```rust
|
||||
App.new().service(web::resource("/welcome").to(welcome))
|
||||
```
|
||||
|
||||
- `HttpRequest` does not provide access to request's payload stream.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
fn index(req: &HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {
|
||||
req
|
||||
.payload()
|
||||
.from_err()
|
||||
.fold((), |_, chunk| {
|
||||
...
|
||||
})
|
||||
.map(|_| HttpResponse::Ok().finish())
|
||||
.responder()
|
||||
}
|
||||
```
|
||||
|
||||
use `Payload` extractor
|
||||
|
||||
```rust
|
||||
fn index(stream: web::Payload) -> impl Future<Item=HttpResponse, Error=Error> {
|
||||
stream
|
||||
.from_err()
|
||||
.fold((), |_, chunk| {
|
||||
...
|
||||
})
|
||||
.map(|_| HttpResponse::Ok().finish())
|
||||
}
|
||||
```
|
||||
|
||||
- `State` is now `Data`. You register Data during the App initialization process
|
||||
and then access it from handlers either using a Data extractor or using
|
||||
HttpRequest's api.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
App.with_state(T)
|
||||
```
|
||||
|
||||
use App's `data` method
|
||||
|
||||
```rust
|
||||
App.new()
|
||||
.data(T)
|
||||
```
|
||||
|
||||
and either use the Data extractor within your handler
|
||||
|
||||
```rust
|
||||
use actix_web::web::Data;
|
||||
|
||||
fn endpoint_handler(Data<T>)){
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
.. or access your Data element from the HttpRequest
|
||||
|
||||
```rust
|
||||
fn endpoint_handler(req: HttpRequest) {
|
||||
let data: Option<Data<T>> = req.app_data::<T>();
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
- AsyncResponder is removed, use `.to_async()` registration method and `impl Future<>` as result type.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
use actix_web::AsyncResponder;
|
||||
|
||||
fn endpoint_handler(...) -> impl Future<Item=HttpResponse, Error=Error>{
|
||||
...
|
||||
.responder()
|
||||
}
|
||||
```
|
||||
|
||||
.. simply omit AsyncResponder and the corresponding responder() finish method
|
||||
|
||||
|
||||
- Middleware
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
let app = App::new()
|
||||
.middleware(middleware::Logger::default())
|
||||
```
|
||||
|
||||
use `.wrap()` method
|
||||
|
||||
```rust
|
||||
let app = App::new()
|
||||
.wrap(middleware::Logger::default())
|
||||
.route("/index.html", web::get().to(index));
|
||||
```
|
||||
|
||||
- `HttpRequest::body()`, `HttpRequest::urlencoded()`, `HttpRequest::json()`, `HttpRequest::multipart()`
|
||||
method have been removed. Use `Bytes`, `String`, `Form`, `Json`, `Multipart` extractors instead.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
fn index(req: &HttpRequest) -> Responder {
|
||||
req.body()
|
||||
.and_then(|body| {
|
||||
...
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
use
|
||||
|
||||
```rust
|
||||
fn index(body: Bytes) -> Responder {
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
- `actix_web::server` module has been removed. To start http server use `actix_web::HttpServer` type
|
||||
|
||||
- StaticFiles and NamedFile have been moved to a separate crate.
|
||||
|
||||
instead of `use actix_web::fs::StaticFile`
|
||||
|
||||
use `use actix_files::Files`
|
||||
|
||||
instead of `use actix_web::fs::Namedfile`
|
||||
|
||||
use `use actix_files::NamedFile`
|
||||
|
||||
- Multipart has been moved to a separate crate.
|
||||
|
||||
instead of `use actix_web::multipart::Multipart`
|
||||
|
||||
use `use actix_multipart::Multipart`
|
||||
|
||||
- Response compression is not enabled by default.
|
||||
To enable, use `Compress` middleware, `App::new().wrap(Compress::default())`.
|
||||
|
||||
- Session middleware moved to actix-session crate
|
||||
|
||||
- Actors support have been moved to `actix-web-actors` crate
|
||||
|
||||
- Custom Error
|
||||
|
||||
Instead of error_response method alone, ResponseError now provides two methods: error_response and render_response respectively. Where, error_response creates the error response and render_response returns the error response to the caller.
|
||||
|
||||
Simplest migration from 0.7 to 1.0 shall include below method to the custom implementation of ResponseError:
|
||||
|
||||
```rust
|
||||
fn render_response(&self) -> HttpResponse {
|
||||
self.error_response()
|
||||
}
|
||||
```
|
||||
|
||||
## 0.7.15
|
||||
|
||||
- The `' '` character is not percent decoded anymore before matching routes. If you need to use it in
|
||||
your routes, you should use `%20`.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
fn main() {
|
||||
let app = App::new().resource("/my index", |r| {
|
||||
r.method(http::Method::GET)
|
||||
.with(index);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
use
|
||||
|
||||
```rust
|
||||
fn main() {
|
||||
let app = App::new().resource("/my%20index", |r| {
|
||||
r.method(http::Method::GET)
|
||||
.with(index);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
- If you used `AsyncResult::async` you need to replace it with `AsyncResult::future`
|
||||
|
||||
|
||||
## 0.7.4
|
||||
|
||||
- `Route::with_config()`/`Route::with_async_config()` always passes configuration objects as tuple
|
||||
even for handler with one parameter.
|
||||
|
||||
|
||||
## 0.7
|
||||
|
||||
- `HttpRequest` does not implement `Stream` anymore. If you need to read request payload
|
||||
use `HttpMessage::payload()` method.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
fn index(req: HttpRequest) -> impl Responder {
|
||||
req
|
||||
.from_err()
|
||||
.fold(...)
|
||||
....
|
||||
}
|
||||
```
|
||||
|
||||
use `.payload()`
|
||||
|
||||
```rust
|
||||
fn index(req: HttpRequest) -> impl Responder {
|
||||
req
|
||||
.payload() // <- get request payload stream
|
||||
.from_err()
|
||||
.fold(...)
|
||||
....
|
||||
}
|
||||
```
|
||||
|
||||
- [Middleware](https://actix.rs/actix-web/actix_web/middleware/trait.Middleware.html)
|
||||
trait uses `&HttpRequest` instead of `&mut HttpRequest`.
|
||||
|
||||
- Removed `Route::with2()` and `Route::with3()` use tuple of extractors instead.
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
fn index(query: Query<..>, info: Json<MyStruct) -> impl Responder {}
|
||||
```
|
||||
|
||||
use tuple of extractors and use `.with()` for registration:
|
||||
|
||||
```rust
|
||||
fn index((query, json): (Query<..>, Json<MyStruct)) -> impl Responder {}
|
||||
```
|
||||
|
||||
- `Handler::handle()` uses `&self` instead of `&mut self`
|
||||
|
||||
- `Handler::handle()` accepts reference to `HttpRequest<_>` instead of value
|
||||
|
||||
- Removed deprecated `HttpServer::threads()`, use
|
||||
[HttpServer::workers()](https://actix.rs/actix-web/actix_web/server/struct.HttpServer.html#method.workers) instead.
|
||||
|
||||
- Renamed `client::ClientConnectorError::Connector` to
|
||||
`client::ClientConnectorError::Resolver`
|
||||
|
||||
- `Route::with()` does not return `ExtractorConfig`, to configure
|
||||
extractor use `Route::with_config()`
|
||||
|
||||
instead of
|
||||
|
||||
```rust
|
||||
fn main() {
|
||||
let app = App::new().resource("/index.html", |r| {
|
||||
r.method(http::Method::GET)
|
||||
.with(index)
|
||||
.limit(4096); // <- limit size of the payload
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
use
|
||||
|
||||
```rust
|
||||
|
||||
fn main() {
|
||||
let app = App::new().resource("/index.html", |r| {
|
||||
r.method(http::Method::GET)
|
||||
.with_config(index, |cfg| { // <- register handler
|
||||
cfg.limit(4096); // <- limit size of the payload
|
||||
})
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
- `Route::with_async()` does not return `ExtractorConfig`, to configure
|
||||
extractor use `Route::with_async_config()`
|
||||
|
||||
|
||||
## 0.6
|
||||
|
||||
- `Path<T>` extractor return `ErrorNotFound` on failure instead of `ErrorBadRequest`
|
||||
|
||||
- `ws::Message::Close` now includes optional close reason.
|
||||
`ws::CloseCode::Status` and `ws::CloseCode::Empty` have been removed.
|
||||
|
||||
- `HttpServer::threads()` renamed to `HttpServer::workers()`.
|
||||
|
||||
- `HttpServer::start_ssl()` and `HttpServer::start_tls()` deprecated.
|
||||
Use `HttpServer::bind_ssl()` and `HttpServer::bind_tls()` instead.
|
||||
|
||||
- `HttpRequest::extensions()` returns read only reference to the request's Extension
|
||||
`HttpRequest::extensions_mut()` returns mutable reference.
|
||||
|
||||
- Instead of
|
||||
|
||||
`use actix_web::middleware::{
|
||||
CookieSessionBackend, CookieSessionError, RequestSession,
|
||||
Session, SessionBackend, SessionImpl, SessionStorage};`
|
||||
|
||||
use `actix_web::middleware::session`
|
||||
|
||||
`use actix_web::middleware::session{CookieSessionBackend, CookieSessionError,
|
||||
RequestSession, Session, SessionBackend, SessionImpl, SessionStorage};`
|
||||
|
||||
- `FromRequest::from_request()` accepts mutable reference to a request
|
||||
|
||||
- `FromRequest::Result` has to implement `Into<Reply<Self>>`
|
||||
|
||||
- [`Responder::respond_to()`](
|
||||
https://actix.rs/actix-web/actix_web/trait.Responder.html#tymethod.respond_to)
|
||||
is generic over `S`
|
||||
|
||||
- Use `Query` extractor instead of HttpRequest::query()`.
|
||||
|
||||
```rust
|
||||
fn index(q: Query<HashMap<String, String>>) -> Result<..> {
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```rust
|
||||
let q = Query::<HashMap<String, String>>::extract(req);
|
||||
```
|
||||
|
||||
- Websocket operations are implemented as `WsWriter` trait.
|
||||
you need to use `use actix_web::ws::WsWriter`
|
||||
|
||||
|
||||
## 0.5
|
||||
|
||||
- `HttpResponseBuilder::body()`, `.finish()`, `.json()`
|
||||
methods return `HttpResponse` instead of `Result<HttpResponse>`
|
||||
|
||||
- `actix_web::Method`, `actix_web::StatusCode`, `actix_web::Version`
|
||||
moved to `actix_web::http` module
|
||||
|
||||
- `actix_web::header` moved to `actix_web::http::header`
|
||||
|
||||
- `NormalizePath` moved to `actix_web::http` module
|
||||
|
||||
- `HttpServer` moved to `actix_web::server`, added new `actix_web::server::new()` function,
|
||||
shortcut for `actix_web::server::HttpServer::new()`
|
||||
|
||||
- `DefaultHeaders` middleware does not use separate builder, all builder methods moved to type itself
|
||||
|
||||
- `StaticFiles::new()`'s show_index parameter removed, use `show_files_listing()` method instead.
|
||||
|
||||
- `CookieSessionBackendBuilder` removed, all methods moved to `CookieSessionBackend` type
|
||||
|
||||
- `actix_web::httpcodes` module is deprecated, `HttpResponse::Ok()`, `HttpResponse::Found()` and other `HttpResponse::XXX()`
|
||||
functions should be used instead
|
||||
|
||||
- `ClientRequestBuilder::body()` returns `Result<_, actix_web::Error>`
|
||||
instead of `Result<_, http::Error>`
|
||||
|
||||
- `Application` renamed to a `App`
|
||||
|
||||
- `actix_web::Reply`, `actix_web::Resource` moved to `actix_web::dev`
|
105
README.md
105
README.md
@ -1,105 +0,0 @@
|
||||
<div align="center">
|
||||
<h1>Actix Web</h1>
|
||||
<p>
|
||||
<strong>Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust</strong>
|
||||
</p>
|
||||
<p>
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web/4.0.0-rc.1)
|
||||

|
||||

|
||||
[](https://deps.rs/crate/actix-web/4.0.0-rc.1)
|
||||
<br />
|
||||
[](https://github.com/actix/actix-web/actions/workflows/ci.yml)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||

|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
</p>
|
||||
</div>
|
||||
|
||||
## Features
|
||||
|
||||
- Supports _HTTP/1.x_ and _HTTP/2_
|
||||
- Streaming and pipelining
|
||||
- Powerful [request routing](https://actix.rs/docs/url-dispatch/) with optional macros
|
||||
- Full [Tokio](https://tokio.rs) compatibility
|
||||
- Keep-alive and slow requests handling
|
||||
- Client/server [WebSockets](https://actix.rs/docs/websockets/) support
|
||||
- Transparent content compression/decompression (br, gzip, deflate, zstd)
|
||||
- Multipart streams
|
||||
- Static assets
|
||||
- SSL support using OpenSSL or Rustls
|
||||
- Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
||||
- Includes an async [HTTP client](https://docs.rs/awc/)
|
||||
- Runs on stable Rust 1.54+
|
||||
|
||||
## Documentation
|
||||
|
||||
- [Website & User Guide](https://actix.rs)
|
||||
- [Examples Repository](https://github.com/actix/examples)
|
||||
- [API Documentation](https://docs.rs/actix-web)
|
||||
- [API Documentation (master branch)](https://actix.rs/actix-web/actix_web)
|
||||
|
||||
## Example
|
||||
|
||||
Dependencies:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
actix-web = "4.0.0-rc.1"
|
||||
```
|
||||
|
||||
Code:
|
||||
|
||||
```rust
|
||||
use actix_web::{get, web, App, HttpServer, Responder};
|
||||
|
||||
#[get("/{id}/{name}/index.html")]
|
||||
async fn index(params: web::Path<(u32, String)>) -> impl Responder {
|
||||
let (id, name) = params.into_inner();
|
||||
format!("Hello {}! id:{}", name, id)
|
||||
}
|
||||
|
||||
#[actix_web::main] // or #[tokio::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
HttpServer::new(|| App::new().service(index))
|
||||
.bind(("127.0.0.1", 8080))?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
```
|
||||
|
||||
### More examples
|
||||
|
||||
- [Basic Setup](https://github.com/actix/examples/tree/master/basics/basics/)
|
||||
- [Application State](https://github.com/actix/examples/tree/master/basics/state/)
|
||||
- [JSON Handling](https://github.com/actix/examples/tree/master/json/json/)
|
||||
- [Multipart Streams](https://github.com/actix/examples/tree/master/forms/multipart/)
|
||||
- [Diesel Integration](https://github.com/actix/examples/tree/master/database_interactions/diesel/)
|
||||
- [r2d2 Integration](https://github.com/actix/examples/tree/master/database_interactions/r2d2/)
|
||||
- [Simple WebSocket](https://github.com/actix/examples/tree/master/websockets/websocket/)
|
||||
- [Tera Templates](https://github.com/actix/examples/tree/master/template_engines/tera/)
|
||||
- [Askama Templates](https://github.com/actix/examples/tree/master/template_engines/askama/)
|
||||
- [HTTPS using Rustls](https://github.com/actix/examples/tree/master/security/rustls/)
|
||||
- [HTTPS using OpenSSL](https://github.com/actix/examples/tree/master/security/openssl/)
|
||||
- [WebSocket Chat](https://github.com/actix/examples/tree/master/websockets/chat/)
|
||||
|
||||
You may consider checking out [this directory](https://github.com/actix/examples/tree/master/) for more examples.
|
||||
|
||||
## Benchmarks
|
||||
|
||||
One of the fastest web frameworks available according to the [TechEmpower Framework Benchmark](https://www.techempower.com/benchmarks/#section=data-r20&test=composite).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under either of the following licenses, at your option:
|
||||
|
||||
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or [http://www.apache.org/licenses/LICENSE-2.0])
|
||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or [http://opensource.org/licenses/MIT])
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Contribution to the actix-web repo is organized under the terms of the Contributor Covenant.
|
||||
The Actix team promises to intervene to uphold that code of conduct.
|
@ -1,6 +1,27 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
|
||||
|
||||
## 0.6.2 - 2022-07-23
|
||||
- Allow partial range responses for video content to start streaming sooner. [#2817]
|
||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||
|
||||
[#2817]: https://github.com/actix/actix-web/pull/2817
|
||||
|
||||
|
||||
## 0.6.1 - 2022-06-11
|
||||
- Add `NamedFile::{modified, metadata, content_type, content_disposition, encoding}()` getters. [#2021]
|
||||
- Update `tokio-uring` dependency to `0.3`.
|
||||
- Audio files now use `Content-Disposition: inline` instead of `attachment`. [#2645]
|
||||
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||
|
||||
[#2021]: https://github.com/actix/actix-web/pull/2021
|
||||
[#2645]: https://github.com/actix/actix-web/pull/2645
|
||||
|
||||
|
||||
## 0.6.0 - 2022-02-25
|
||||
- No significant changes since `0.6.0-beta.16`.
|
||||
|
||||
|
||||
## 0.6.0-beta.16 - 2022-01-31
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-files"
|
||||
version = "0.6.0-beta.16"
|
||||
version = "0.6.2"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"fakeshadow <24548779@qq.com>",
|
||||
@ -22,10 +22,10 @@ path = "src/lib.rs"
|
||||
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
||||
|
||||
[dependencies]
|
||||
actix-http = "3.0.0-rc.1"
|
||||
actix-http = "3"
|
||||
actix-service = "2"
|
||||
actix-utils = "3"
|
||||
actix-web = { version = "4.0.0-rc.1", default-features = false }
|
||||
actix-web = { version = "4", default-features = false }
|
||||
|
||||
askama_escape = "0.10"
|
||||
bitflags = "1"
|
||||
@ -39,10 +39,13 @@ mime_guess = "2.0.1"
|
||||
percent-encoding = "2.1"
|
||||
pin-project-lite = "0.2.7"
|
||||
|
||||
tokio-uring = { version = "0.2", optional = true, features = ["bytes"] }
|
||||
# experimental-io-uring
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
||||
actix-server = { version = "2.1", optional = true } # ensure matching tokio-uring versions
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.12"
|
||||
actix-web = "4.0.0-rc.1"
|
||||
actix-rt = "2.7"
|
||||
actix-test = "0.1.0-beta.13"
|
||||
actix-web = "4"
|
||||
tempfile = "3.2"
|
||||
|
@ -3,16 +3,16 @@
|
||||
> Static file serving for Actix Web
|
||||
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.16)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|
||||
[](https://docs.rs/actix-files/0.6.2)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.16)
|
||||
[](https://deps.rs/crate/actix-files/0.6.2)
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
||||
- [API Documentation](https://docs.rs/actix-files/)
|
||||
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
|
||||
- [API Documentation](https://docs.rs/actix-files)
|
||||
- [Example Project](https://github.com/actix/examples/tree/master/basics/static-files)
|
||||
- Minimum Supported Rust Version (MSRV): 1.54
|
||||
|
@ -81,7 +81,7 @@ async fn chunked_read_file_callback(
|
||||
) -> Result<(File, Bytes), Error> {
|
||||
use io::{Read as _, Seek as _};
|
||||
|
||||
let res = actix_web::rt::task::spawn_blocking(move || {
|
||||
let res = actix_web::web::block(move || {
|
||||
let mut buf = Vec::with_capacity(max_bytes);
|
||||
|
||||
file.seek(io::SeekFrom::Start(offset))?;
|
||||
@ -94,8 +94,7 @@ async fn chunked_read_file_callback(
|
||||
Ok((file, Bytes::from(buf)))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|_| actix_web::error::BlockingError)??;
|
||||
.await??;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ pub(crate) fn directory_listing(
|
||||
if dir.is_visible(&entry) {
|
||||
let entry = entry.unwrap();
|
||||
let p = match entry.path().strip_prefix(&dir.path) {
|
||||
Ok(p) if cfg!(windows) => base.join(p).to_string_lossy().replace("\\", "/"),
|
||||
Ok(p) if cfg!(windows) => base.join(p).to_string_lossy().replace('\\', "/"),
|
||||
Ok(p) => base.join(p).to_string_lossy().into_owned(),
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
@ -2,7 +2,7 @@ use actix_web::{http::StatusCode, ResponseError};
|
||||
use derive_more::Display;
|
||||
|
||||
/// Errors which can occur when serving static files.
|
||||
#[derive(Display, Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq, Display)]
|
||||
pub enum FilesError {
|
||||
/// Path is not a directory
|
||||
#[allow(dead_code)]
|
||||
@ -22,7 +22,7 @@ impl ResponseError for FilesError {
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Display, Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq, Display)]
|
||||
#[non_exhaustive]
|
||||
pub enum UriSegmentError {
|
||||
/// The segment started with the wrapped invalid character.
|
||||
|
@ -364,20 +364,43 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_status_code_text() {
|
||||
let mut file = NamedFile::open_async("Cargo.toml")
|
||||
async fn status_code_customize_same_output() {
|
||||
let file1 = NamedFile::open_async("Cargo.toml")
|
||||
.await
|
||||
.unwrap()
|
||||
.set_status_code(StatusCode::NOT_FOUND);
|
||||
|
||||
let file2 = NamedFile::open_async("Cargo.toml")
|
||||
.await
|
||||
.unwrap()
|
||||
.customize()
|
||||
.with_status(StatusCode::NOT_FOUND);
|
||||
|
||||
let req = TestRequest::default().to_http_request();
|
||||
let res1 = file1.respond_to(&req);
|
||||
let res2 = file2.respond_to(&req);
|
||||
|
||||
assert_eq!(res1.status(), StatusCode::NOT_FOUND);
|
||||
assert_eq!(res2.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_status_code_text() {
|
||||
let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||
|
||||
{
|
||||
file.file();
|
||||
let _f: &File = &file;
|
||||
}
|
||||
|
||||
{
|
||||
let _f: &mut File = &mut file;
|
||||
}
|
||||
|
||||
let file = file.customize().with_status(StatusCode::NOT_FOUND);
|
||||
|
||||
let req = TestRequest::default().to_http_request();
|
||||
let resp = file.respond_to(&req);
|
||||
assert_eq!(
|
||||
|
@ -23,6 +23,7 @@ use actix_web::{
|
||||
use bitflags::bitflags;
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use futures_core::future::LocalBoxFuture;
|
||||
use mime::Mime;
|
||||
use mime_guess::from_path;
|
||||
|
||||
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
||||
@ -76,8 +77,8 @@ pub struct NamedFile {
|
||||
pub(crate) md: Metadata,
|
||||
pub(crate) flags: Flags,
|
||||
pub(crate) status_code: StatusCode,
|
||||
pub(crate) content_type: mime::Mime,
|
||||
pub(crate) content_disposition: header::ContentDisposition,
|
||||
pub(crate) content_type: Mime,
|
||||
pub(crate) content_disposition: ContentDisposition,
|
||||
pub(crate) encoding: Option<ContentEncoding>,
|
||||
}
|
||||
|
||||
@ -96,18 +97,18 @@ impl NamedFile {
|
||||
///
|
||||
/// # Examples
|
||||
/// ```ignore
|
||||
/// use std::{
|
||||
/// io::{self, Write as _},
|
||||
/// env,
|
||||
/// fs::File
|
||||
/// };
|
||||
/// use actix_files::NamedFile;
|
||||
/// use std::io::{self, Write};
|
||||
/// use std::env;
|
||||
/// use std::fs::File;
|
||||
///
|
||||
/// fn main() -> io::Result<()> {
|
||||
/// let mut file = File::create("foo.txt")?;
|
||||
/// file.write_all(b"Hello, world!")?;
|
||||
/// let named_file = NamedFile::from_file(file, "bar.txt")?;
|
||||
/// # std::fs::remove_file("foo.txt");
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// let mut file = File::create("foo.txt")?;
|
||||
/// file.write_all(b"Hello, world!")?;
|
||||
/// let named_file = NamedFile::from_file(file, "bar.txt")?;
|
||||
/// # std::fs::remove_file("foo.txt");
|
||||
/// Ok(())
|
||||
/// ```
|
||||
pub fn from_file<P: AsRef<Path>>(file: File, path: P) -> io::Result<NamedFile> {
|
||||
let path = path.as_ref().to_path_buf();
|
||||
@ -128,7 +129,7 @@ impl NamedFile {
|
||||
let ct = from_path(&path).first_or_octet_stream();
|
||||
|
||||
let disposition = match ct.type_() {
|
||||
mime::IMAGE | mime::TEXT | mime::VIDEO => DispositionType::Inline,
|
||||
mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline,
|
||||
mime::APPLICATION => match ct.subtype() {
|
||||
mime::JAVASCRIPT | mime::JSON => DispositionType::Inline,
|
||||
name if name == "wasm" => DispositionType::Inline,
|
||||
@ -209,11 +210,10 @@ impl NamedFile {
|
||||
Self::from_file(file, path)
|
||||
}
|
||||
|
||||
#[allow(rustdoc::broken_intra_doc_links)]
|
||||
/// Attempts to open a file asynchronously in read-only mode.
|
||||
///
|
||||
/// When the `experimental-io-uring` crate feature is enabled, this will be async.
|
||||
/// Otherwise, it will be just like [`open`][Self::open].
|
||||
/// When the `experimental-io-uring` crate feature is enabled, this will be async. Otherwise, it
|
||||
/// will behave just like `open`.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
@ -238,13 +238,13 @@ impl NamedFile {
|
||||
Self::from_file(file, path)
|
||||
}
|
||||
|
||||
/// Returns reference to the underlying `File` object.
|
||||
/// Returns reference to the underlying file object.
|
||||
#[inline]
|
||||
pub fn file(&self) -> &File {
|
||||
&self.file
|
||||
}
|
||||
|
||||
/// Retrieve the path of this file.
|
||||
/// Returns the filesystem path to this file.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
@ -262,16 +262,53 @@ impl NamedFile {
|
||||
self.path.as_path()
|
||||
}
|
||||
|
||||
/// Set response **Status Code**
|
||||
/// Returns the time the file was last modified.
|
||||
///
|
||||
/// Returns `None` only on unsupported platforms; see [`std::fs::Metadata::modified()`].
|
||||
/// Therefore, it is usually safe to unwrap this.
|
||||
#[inline]
|
||||
pub fn modified(&self) -> Option<SystemTime> {
|
||||
self.modified
|
||||
}
|
||||
|
||||
/// Returns the filesystem metadata associated with this file.
|
||||
#[inline]
|
||||
pub fn metadata(&self) -> &Metadata {
|
||||
&self.md
|
||||
}
|
||||
|
||||
/// Returns the `Content-Type` header that will be used when serving this file.
|
||||
#[inline]
|
||||
pub fn content_type(&self) -> &Mime {
|
||||
&self.content_type
|
||||
}
|
||||
|
||||
/// Returns the `Content-Disposition` that will be used when serving this file.
|
||||
#[inline]
|
||||
pub fn content_disposition(&self) -> &ContentDisposition {
|
||||
&self.content_disposition
|
||||
}
|
||||
|
||||
/// Returns the `Content-Encoding` that will be used when serving this file.
|
||||
///
|
||||
/// A return value of `None` indicates that the content is not already using a compressed
|
||||
/// representation and may be subject to compression downstream.
|
||||
#[inline]
|
||||
pub fn content_encoding(&self) -> Option<ContentEncoding> {
|
||||
self.encoding
|
||||
}
|
||||
|
||||
/// Set response status code.
|
||||
#[deprecated(since = "0.7.0", note = "Prefer `Responder::customize()`.")]
|
||||
pub fn set_status_code(mut self, status: StatusCode) -> Self {
|
||||
self.status_code = status;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the MIME Content-Type for serving this file. By default the Content-Type is inferred
|
||||
/// from the filename extension.
|
||||
/// Sets the `Content-Type` header that will be used when serving this file. By default the
|
||||
/// `Content-Type` is inferred from the filename extension.
|
||||
#[inline]
|
||||
pub fn set_content_type(mut self, mime_type: mime::Mime) -> Self {
|
||||
pub fn set_content_type(mut self, mime_type: Mime) -> Self {
|
||||
self.content_type = mime_type;
|
||||
self
|
||||
}
|
||||
@ -284,15 +321,15 @@ impl NamedFile {
|
||||
/// filename is taken from the path provided in the `open` method after converting it to UTF-8
|
||||
/// (using `to_string_lossy`).
|
||||
#[inline]
|
||||
pub fn set_content_disposition(mut self, cd: header::ContentDisposition) -> Self {
|
||||
pub fn set_content_disposition(mut self, cd: ContentDisposition) -> Self {
|
||||
self.content_disposition = cd;
|
||||
self.flags.insert(Flags::CONTENT_DISPOSITION);
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable `Content-Disposition` header.
|
||||
/// Disables `Content-Disposition` header.
|
||||
///
|
||||
/// By default Content-Disposition` header is enabled.
|
||||
/// By default, the `Content-Disposition` header is sent.
|
||||
#[inline]
|
||||
pub fn disable_content_disposition(mut self) -> Self {
|
||||
self.flags.remove(Flags::CONTENT_DISPOSITION);
|
||||
@ -491,11 +528,26 @@ impl NamedFile {
|
||||
length = ranges[0].length;
|
||||
offset = ranges[0].start;
|
||||
|
||||
// don't allow compression middleware to modify partial content
|
||||
res.insert_header((
|
||||
header::CONTENT_ENCODING,
|
||||
HeaderValue::from_static("identity"),
|
||||
));
|
||||
// When a Content-Encoding header is present in a 206 partial content response
|
||||
// for video content, it prevents browser video players from starting playback
|
||||
// before loading the whole video and also prevents seeking.
|
||||
//
|
||||
// See: https://github.com/actix/actix-web/issues/2815
|
||||
//
|
||||
// The assumption of this fix is that the video player knows to not send an
|
||||
// Accept-Encoding header for this request and that downstream middleware will
|
||||
// not attempt compression for requests without it.
|
||||
//
|
||||
// TODO: Solve question around what to do if self.encoding is set and partial
|
||||
// range is requested. Reject request? Ignoring self.encoding seems wrong, too.
|
||||
// In practice, it should not come up.
|
||||
if req.headers().contains_key(&header::ACCEPT_ENCODING) {
|
||||
// don't allow compression middleware to modify partial content
|
||||
res.insert_header((
|
||||
header::CONTENT_ENCODING,
|
||||
HeaderValue::from_static("identity"),
|
||||
));
|
||||
}
|
||||
|
||||
res.insert_header((
|
||||
header::CONTENT_RANGE,
|
||||
|
@ -1,11 +1,11 @@
|
||||
use actix_files::Files;
|
||||
use actix_files::{Files, NamedFile};
|
||||
use actix_web::{
|
||||
http::{
|
||||
header::{self, HeaderValue},
|
||||
StatusCode,
|
||||
},
|
||||
test::{self, TestRequest},
|
||||
App,
|
||||
web, App,
|
||||
};
|
||||
|
||||
#[actix_web::test]
|
||||
@ -36,3 +36,31 @@ async fn test_utf8_file_contents() {
|
||||
Some(&HeaderValue::from_static("text/plain")),
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn partial_range_response_encoding() {
|
||||
let srv = test::init_service(App::new().default_service(web::to(|| async {
|
||||
NamedFile::open_async("./tests/test.binary").await.unwrap()
|
||||
})))
|
||||
.await;
|
||||
|
||||
// range request without accept-encoding returns no content-encoding header
|
||||
let req = TestRequest::with_uri("/")
|
||||
.append_header((header::RANGE, "bytes=10-20"))
|
||||
.to_request();
|
||||
let res = test::call_service(&srv, req).await;
|
||||
assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT);
|
||||
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
|
||||
|
||||
// range request with accept-encoding returns a content-encoding header
|
||||
let req = TestRequest::with_uri("/")
|
||||
.append_header((header::RANGE, "bytes=10-20"))
|
||||
.append_header((header::ACCEPT_ENCODING, "identity"))
|
||||
.to_request();
|
||||
let res = test::call_service(&srv, req).await;
|
||||
assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT);
|
||||
assert_eq!(
|
||||
res.headers().get(header::CONTENT_ENCODING).unwrap(),
|
||||
"identity"
|
||||
);
|
||||
}
|
||||
|
@ -1,6 +1,26 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
|
||||
|
||||
## 3.0.0 - 2022-07-24
|
||||
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
||||
- Added `TestServer::client_headers` method. [#2097]
|
||||
- Update `actix-server` dependency to `2`.
|
||||
- Update `actix-tls` dependency to `3`.
|
||||
- Update `bytes` to `1.0`. [#1813]
|
||||
- Minimum supported Rust version (MSRV) is now 1.57.
|
||||
|
||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||
[#2097]: https://github.com/actix/actix-web/pull/2097
|
||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||
|
||||
|
||||
<details>
|
||||
<summary>3.0.0 Pre-Releases</summary>
|
||||
|
||||
## 3.0.0-beta.13 - 2022-02-16
|
||||
- No significant changes since `3.0.0-beta.12`.
|
||||
|
||||
|
||||
## 3.0.0-beta.12 - 2022-01-31
|
||||
@ -64,6 +84,7 @@
|
||||
|
||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||
|
||||
</details>
|
||||
|
||||
## 2.1.0 - 2020-11-25
|
||||
- Add ability to set address for `TestServer`. [#1645]
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-http-test"
|
||||
version = "3.0.0-beta.12"
|
||||
version = "3.0.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Various helpers for Actix applications to use during testing"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
@ -29,13 +29,13 @@ default = []
|
||||
openssl = ["tls-openssl", "awc/openssl"]
|
||||
|
||||
[dependencies]
|
||||
actix-service = "2.0.0"
|
||||
actix-codec = "0.4.1"
|
||||
actix-tls = "3.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-service = "2"
|
||||
actix-codec = "0.5"
|
||||
actix-tls = "3"
|
||||
actix-utils = "3"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2"
|
||||
awc = { version = "3.0.0-beta.20", default-features = false }
|
||||
awc = { version = "3", default-features = false }
|
||||
|
||||
base64 = "0.13"
|
||||
bytes = "1"
|
||||
@ -51,5 +51,5 @@ tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
||||
tokio = { version = "1.8.4", features = ["sync"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-rc.1", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-rc.1"
|
||||
actix-web = { version = "4", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3"
|
||||
|
@ -3,11 +3,11 @@
|
||||
> Various helpers for Actix applications to use during testing.
|
||||
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.12)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|
||||
[](https://docs.rs/actix-http-test/3.0.0)
|
||||

|
||||

|
||||
<br>
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.12)
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0)
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
@ -1,6 +1,370 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
|
||||
|
||||
## 3.2.1 - 2022-07-02
|
||||
### Fixed
|
||||
- Fix parsing ambiguity in Transfer-Encoding and Content-Length headers for HTTP/1.0 requests. [#2794]
|
||||
|
||||
[#2794]: https://github.com/actix/actix-web/pull/2794
|
||||
|
||||
|
||||
## 3.2.0 - 2022-06-30
|
||||
### Changed
|
||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||
|
||||
### Fixed
|
||||
- Websocket parser no longer throws endless overflow errors after receiving an oversized frame. [#2790]
|
||||
- Retain previously set Vary headers when using compression encoder. [#2798]
|
||||
|
||||
[#2790]: https://github.com/actix/actix-web/pull/2790
|
||||
[#2798]: https://github.com/actix/actix-web/pull/2798
|
||||
|
||||
|
||||
## 3.1.0 - 2022-06-11
|
||||
### Changed
|
||||
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||
|
||||
### Fixed
|
||||
- Revert broken fix in [#2624] that caused erroneous 500 error responses. Temporarily re-introduces [#2357] bug. [#2779]
|
||||
|
||||
[#2357]: https://github.com/actix/actix-web/issues/2357
|
||||
[#2624]: https://github.com/actix/actix-web/issues/2624
|
||||
[#2779]: https://github.com/actix/actix-web/issues/2779
|
||||
|
||||
|
||||
## 3.0.4 - 2022-03-09
|
||||
### Fixed
|
||||
- Document on docs.rs with `ws` feature enabled.
|
||||
|
||||
|
||||
## 3.0.3 - 2022-03-08
|
||||
### Fixed
|
||||
- Allow spaces between header name and colon when parsing responses. [#2684]
|
||||
|
||||
[#2684]: https://github.com/actix/actix-web/issues/2684
|
||||
|
||||
|
||||
## 3.0.2 - 2022-03-05
|
||||
### Fixed
|
||||
- Fix encoding camel-case header names with more than one hyphen. [#2683]
|
||||
|
||||
[#2683]: https://github.com/actix/actix-web/issues/2683
|
||||
|
||||
|
||||
## 3.0.1 - 2022-03-04
|
||||
- Fix panic in H1 dispatcher when pipelining is used with keep-alive. [#2678]
|
||||
|
||||
[#2678]: https://github.com/actix/actix-web/issues/2678
|
||||
|
||||
## 3.0.0 - 2022-02-25
|
||||
### Dependencies
|
||||
- Updated `actix-*` to Tokio v1-based versions. [#1813]
|
||||
- Updated `bytes` to `1.0`. [#1813]
|
||||
- Updated `h2` to `0.3`. [#1813]
|
||||
- Updated `rustls` to `0.20.0`. [#2414]
|
||||
- Updated `language-tags` to `0.3`.
|
||||
- Updated `tokio` to `1`.
|
||||
|
||||
### Added
|
||||
- Crate Features:
|
||||
- `ws`; disabled by default. [#2618]
|
||||
- `http2`; disabled by default. [#2618]
|
||||
- `compress-brotli`; disabled by default. [#2618]
|
||||
- `compress-gzip`; disabled by default. [#2618]
|
||||
- `compress-zstd`; disabled by default. [#2618]
|
||||
- Functions:
|
||||
- `body::to_bytes` for async collecting message body into Bytes. [#2158]
|
||||
- Traits:
|
||||
- `TryIntoHeaderPair`; allows using typed and untyped headers in the same methods. [#1869]
|
||||
- Types:
|
||||
- `body::BoxBody`; a boxed message body with boxed errors. [#2183]
|
||||
- `body::EitherBody` enum. [#2468]
|
||||
- `body::None` struct. [#2468]
|
||||
- Re-export `http` crate's `Error` type as `error::HttpError`. [#2171]
|
||||
- Variants:
|
||||
- `ContentEncoding::Zstd` along with . [#2244]
|
||||
- `Protocol::Http3` for future compatibility and also mark `#[non_exhaustive]`. [00ba8d55]
|
||||
- Methods:
|
||||
- `ContentEncoding::to_header_value()`. [#2501]
|
||||
- `header::QualityItem::{max, min}()`. [#2486]
|
||||
- `header::QualityItem::zero()` that uses `Quality::ZERO`. [#2501]
|
||||
- `HeaderMap::drain()` as an efficient draining iterator. [#1964]
|
||||
- `HeaderMap::len_keys()` has the behavior of the old `len` method. [#1964]
|
||||
- `MessageBody::boxed` trait method for wrapping boxing types efficiently. [#2520]
|
||||
- `MessageBody::try_into_bytes` trait method, with default implementation, for optimizations on body types that complete in exactly one poll. [#2522]
|
||||
- `Request::conn_data()`. [#2491]
|
||||
- `Request::take_conn_data()`. [#2491]
|
||||
- `Request::take_req_data()`. [#2487]
|
||||
- `Response::{ok, bad_request, not_found, internal_server_error}()`. [#2159]
|
||||
- `Response::into_body()` that consumes response and returns body type. [#2201]
|
||||
- `Response::map_into_boxed_body()`. [#2468]
|
||||
- `ResponseBuilder::append_header()` method which allows using typed and untyped headers. [#1869]
|
||||
- `ResponseBuilder::insert_header()` method which allows using typed and untyped headers. [#1869]
|
||||
- `ResponseHead::set_camel_case_headers()`. [#2587]
|
||||
- `TestRequest::insert_header()` method which allows using typed and untyped headers. [#1869]
|
||||
- Implementations:
|
||||
- Implement `Clone for ws::HandshakeError`. [#2468]
|
||||
- Implement `Clone` for `body::AnyBody<S> where S: Clone`. [#2448]
|
||||
- Implement `Clone` for `RequestHead`. [#2487]
|
||||
- Implement `Clone` for `ResponseHead`. [#2585]
|
||||
- Implement `Copy` for `QualityItem<T> where T: Copy`. [#2501]
|
||||
- Implement `Default` for `ContentEncoding`. [#1912]
|
||||
- Implement `Default` for `HttpServiceBuilder`. [#2611]
|
||||
- Implement `Default` for `KeepAlive`. [#2611]
|
||||
- Implement `Default` for `Response`. [#2201]
|
||||
- Implement `Default` for `ws::Codec`. [#1920]
|
||||
- Implement `Display` for `header::Quality`. [#2486]
|
||||
- Implement `Eq` for `header::ContentEncoding`. [#2501]
|
||||
- Implement `ExactSizeIterator` and `FusedIterator` for all `HeaderMap` iterators. [#2470]
|
||||
- Implement `From<Duration>` for `KeepAlive`. [#2611]
|
||||
- Implement `From<Option<Duration>>` for `KeepAlive`. [#2611]
|
||||
- Implement `From<Vec<u8>>` for `Response<Vec<u8>>`. [#2625]
|
||||
- Implement `FromStr` for `ContentEncoding`. [#1912]
|
||||
- Implement `Header` for `ContentEncoding`. [#1912]
|
||||
- Implement `IntoHeaderValue` for `ContentEncoding`. [#1912]
|
||||
- Implement `IntoIterator` for `HeaderMap`. [#1964]
|
||||
- Implement `MessageBody` for `bytestring::ByteString`. [#2468]
|
||||
- Implement `MessageBody` for `Pin<Box<T>> where T: MessageBody`. [#2152]
|
||||
- Misc:
|
||||
- Re-export `StatusCode`, `Method`, `Version` and `Uri` at the crate root. [#2171]
|
||||
- Re-export `ContentEncoding` and `ConnectionType` at the crate root. [#2171]
|
||||
- `Quality::ZERO` associated constant equivalent to `q=0`. [#2501]
|
||||
- `header::Quality::{MAX, MIN}` associated constants equivalent to `q=1` and `q=0.001`, respectively. [#2486]
|
||||
- Timeout for canceling HTTP/2 server side connection handshake. Configurable with `ServiceConfig::client_timeout`; defaults to 5 seconds. [#2483]
|
||||
- `#[must_use]` for `ws::Codec` to prevent subtle bugs. [#1920]
|
||||
|
||||
### Changed
|
||||
- Traits:
|
||||
- Rename `IntoHeaderValue => TryIntoHeaderValue`. [#2510]
|
||||
- `MessageBody` now has an associated `Error` type. [#2183]
|
||||
- Types:
|
||||
- `Protocol` enum is now marked `#[non_exhaustive]`.
|
||||
- `error::DispatcherError` enum is now marked `#[non_exhaustive]`. [#2624]
|
||||
- `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
|
||||
- Error enums are marked `#[non_exhaustive]`. [#2161]
|
||||
- Rename `PayloadStream` to `BoxedPayloadStream`. [#2545]
|
||||
- The body type parameter of `Response` no longer has a default. [#2152]
|
||||
- Enum Variants:
|
||||
- Rename `ContentEncoding::{Br => Brotli}`. [#2501]
|
||||
- `Payload` inner fields are now named. [#2545]
|
||||
- `ws::Message::Text` now contains a `bytestring::ByteString`. [#1864]
|
||||
- Methods:
|
||||
- Rename `ServiceConfig::{client_timer_expire => client_request_deadline}`. [#2611]
|
||||
- Rename `ServiceConfig::{client_disconnect_timer => client_disconnect_deadline}`. [#2611]
|
||||
- Rename `h1::Codec::{keepalive => keep_alive}`. [#2611]
|
||||
- Rename `h1::Codec::{keepalive_enabled => keep_alive_enabled}`. [#2611]
|
||||
- Rename `h1::ClientCodec::{keepalive => keep_alive}`. [#2611]
|
||||
- Rename `h1::ClientPayloadCodec::{keepalive => keep_alive}`. [#2611]
|
||||
- Rename `header::EntityTag::{weak => new_weak, strong => new_strong}`. [#2565]
|
||||
- Rename `TryIntoHeaderValue::{try_into => try_into_value}` to avoid ambiguity with std `TryInto` trait. [#1894]
|
||||
- Deadline methods in `ServiceConfig` now return `std::time::Instant`s instead of Tokio's wrapper type. [#2611]
|
||||
- Places in `Response` where `ResponseBody<B>` was received or returned now simply use `B`. [#2201]
|
||||
- `encoding::Encoder::response` now returns `AnyBody<Encoder<B>>`. [#2448]
|
||||
- `Extensions::insert` returns replaced item. [#1904]
|
||||
- `HeaderMap::get_all` now returns a `std::slice::Iter`. [#2527]
|
||||
- `HeaderMap::insert` now returns iterator of removed values. [#1964]
|
||||
- `HeaderMap::len` now returns number of values instead of number of keys. [#1964]
|
||||
- `HeaderMap::remove` now returns iterator of removed values. [#1964]
|
||||
- `ResponseBuilder::body(B)` now returns `Response<EitherBody<B>>`. [#2468]
|
||||
- `ResponseBuilder::content_type` now takes an `impl TryIntoHeaderValue` to support using typed `mime` types. [#1894]
|
||||
- `ResponseBuilder::finish()` now returns `Response<EitherBody<()>>`. [#2468]
|
||||
- `ResponseBuilder::json` now takes `impl Serialize`. [#2052]
|
||||
- `ResponseBuilder::message_body` now returns a `Result`. [#2201]∑
|
||||
- `ServiceConfig::keep_alive` now returns a `KeepAlive`. [#2611]
|
||||
- `ws::hash_key` now returns array. [#2035]
|
||||
- Trait Implementations:
|
||||
- Implementation of `Stream` for `Payload` no longer requires the `Stream` variant be `Unpin`. [#2545]
|
||||
- Implementation of `Future` for `h1::SendResponse` no longer requires the body type be `Unpin`. [#2545]
|
||||
- Implementation of `Stream` for `encoding::Decoder` no longer requires the stream type be `Unpin`. [#2545]
|
||||
- Implementation of `From` for error types now return a `Response<BoxBody>`. [#2468]
|
||||
- Misc:
|
||||
- `header` module is now public. [#2171]
|
||||
- `uri` module is now public. [#2171]
|
||||
- Request-local data container is no longer part of a `RequestHead`. Instead it is a distinct part of a `Request`. [#2487]
|
||||
- All error trait bounds in server service builders have changed from `Into<Error>` to `Into<Response<BoxBody>>`. [#2253]
|
||||
- All error trait bounds in message body and stream impls changed from `Into<Error>` to `Into<Box<dyn std::error::Error>>`. [#2253]
|
||||
- Guarantee ordering of `header::GetAll` iterator to be same as insertion order. [#2467]
|
||||
- Connection data set through the `on_connect_ext` callbacks is now accessible only from the new `Request::conn_data()` method. [#2491]
|
||||
- Brotli (de)compression support is now provided by the `brotli` crate. [#2538]
|
||||
- Minimum supported Rust version (MSRV) is now 1.54.
|
||||
|
||||
### Fixed
|
||||
- A `Vary` header is now correctly sent along with compressed content. [#2501]
|
||||
- HTTP/1.1 dispatcher correctly uses client request timeout. [#2611]
|
||||
- Fixed issue where handlers that took payload but then dropped without reading it to EOF it would cause keep-alive connections to become stuck. [#2624]
|
||||
- `ContentEncoding`'s `Identity` variant can now be parsed from a string. [#2501]
|
||||
- `HttpServer::{listen_rustls(), bind_rustls()}` now honor the ALPN protocols in the configuration parameter. [#2226]
|
||||
- Remove unnecessary `Into<Error>` bound on `Encoder` body types. [#2375]
|
||||
- Remove unnecessary `Unpin` bound on `ResponseBuilder::streaming`. [#2253]
|
||||
- `BodyStream` and `SizedStream` are no longer restricted to `Unpin` types. [#2152]
|
||||
- Fixed slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
|
||||
- Fixed quality parse error in Accept-Encoding header. [#2344]
|
||||
|
||||
### Removed
|
||||
- Crate Features:
|
||||
- `compress` feature. [#2065]
|
||||
- `cookies` feature. [#2065]
|
||||
- `trust-dns` feature. [#2425]
|
||||
- `actors` optional feature and trait implementation for `actix` types. [#1969]
|
||||
- Functions:
|
||||
- `header::qitem` helper. Replaced with `header::QualityItem::max`. [#2486]
|
||||
- Types:
|
||||
- `body::Body`; replaced with `EitherBody` and `BoxBody`. [#2468]
|
||||
- `body::ResponseBody`. [#2446]
|
||||
- `ConnectError::SslHandshakeError` and re-export of `HandshakeError`. Due to the removal of this type from `tokio-openssl` crate. OpenSSL handshake error now returns `ConnectError::SslError`. [#1813]
|
||||
- `error::Canceled` re-export. [#1994]
|
||||
- `error::Result` type alias. [#2201]
|
||||
- `error::BlockingError` [#2660]
|
||||
- `InternalError` and all the error types it constructed were moved up to `actix-web`. [#2215]
|
||||
- Typed HTTP headers; they have moved up to `actix-web`. [2094]
|
||||
- Re-export of `http` crate's `HeaderMap` types in addition to ours. [#2171]
|
||||
- Enum Variants:
|
||||
- `body::BodySize::Empty`; an empty body can now only be represented as a `Sized(0)` variant. [#2446]
|
||||
- `ContentEncoding::Auto`. [#2501]
|
||||
- `EncoderError::Boxed`. [#2446]
|
||||
- Methods:
|
||||
- `ContentEncoding::is_compression()`. [#2501]
|
||||
- `h1::Payload::readany()`. [#2545]
|
||||
- `HttpMessage::cookie[s]()` trait methods. [#2065]
|
||||
- `HttpServiceBuilder::new()`; use `default` instead. [#2611]
|
||||
- `on_connect` (previously deprecated) methods have been removed; use `on_connect_ext`. [#1857]
|
||||
- `Response::build_from()`. [#2159]
|
||||
- `Response::error()` [#2205]
|
||||
- `Response::take_body()` and old `Response::into_body()` method that casted body type. [#2201]
|
||||
- `Response`'s status code builders. [#2159]
|
||||
- `ResponseBuilder::{if_true, if_some}()` (previously deprecated). [#2148]
|
||||
- `ResponseBuilder::{set, set_header}()`; use `ResponseBuilder::insert_header()`. [#1869]
|
||||
- `ResponseBuilder::extensions[_mut]()`. [#2585]
|
||||
- `ResponseBuilder::header()`; use `ResponseBuilder::append_header()`. [#1869]
|
||||
- `ResponseBuilder::json()`. [#2148]
|
||||
- `ResponseBuilder::json2()`. [#1903]
|
||||
- `ResponseBuilder::streaming()`. [#2468]
|
||||
- `ResponseHead::extensions[_mut]()`. [#2585]
|
||||
- `ServiceConfig::{client_timer, keep_alive_timer}()`. [#2611]
|
||||
- `TestRequest::with_hdr()`; use `TestRequest::default().insert_header()`. [#1869]
|
||||
- `TestRequest::with_header()`; use `TestRequest::default().insert_header()`. [#1869]
|
||||
- Trait implementations:
|
||||
- Implementation of `Copy` for `ws::Codec`. [#1920]
|
||||
- Implementation of `From<Option<usize>> for KeepAlive`; use `Duration`s instead. [#2611]
|
||||
- Implementation of `From<serde_json::Value>` for `Body`. [#2148]
|
||||
- Implementation of `From<usize> for KeepAlive`; use `Duration`s instead. [#2611]
|
||||
- Implementation of `Future` for `Response`. [#2201]
|
||||
- Implementation of `Future` for `ResponseBuilder`. [#2468]
|
||||
- Implementation of `Into<Error>` for `Response<Body>`. [#2215]
|
||||
- Implementation of `Into<Error>` for `ResponseBuilder`. [#2215]
|
||||
- Implementation of `ResponseError` for `actix_utils::timeout::TimeoutError`. [#2127]
|
||||
- Implementation of `ResponseError` for `CookieParseError`. [#2065]
|
||||
- Implementation of `TryFrom<u16>` for `header::Quality`. [#2486]
|
||||
- Misc:
|
||||
- `http` module; most everything it contained is exported at the crate root. [#2488]
|
||||
- `cookies` module (re-export). [#2065]
|
||||
- `client` module. Connector types now live in `awc`. [#2425]
|
||||
- `error` field from `Response`. [#2205]
|
||||
- `downcast` and `downcast_get_type_id` macros. [#2291]
|
||||
- Down-casting for `MessageBody` types; use standard `Any` trait. [#2183]
|
||||
|
||||
|
||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||
[#1845]: https://github.com/actix/actix-web/pull/1845
|
||||
[#1857]: https://github.com/actix/actix-web/pull/1857
|
||||
[#1864]: https://github.com/actix/actix-web/pull/1864
|
||||
[#1869]: https://github.com/actix/actix-web/pull/1869
|
||||
[#1878]: https://github.com/actix/actix-web/pull/1878
|
||||
[#1894]: https://github.com/actix/actix-web/pull/1894
|
||||
[#1903]: https://github.com/actix/actix-web/pull/1903
|
||||
[#1904]: https://github.com/actix/actix-web/pull/1904
|
||||
[#1912]: https://github.com/actix/actix-web/pull/1912
|
||||
[#1920]: https://github.com/actix/actix-web/pull/1920
|
||||
[#1964]: https://github.com/actix/actix-web/pull/1964
|
||||
[#1969]: https://github.com/actix/actix-web/pull/1969
|
||||
[#1981]: https://github.com/actix/actix-web/pull/1981
|
||||
[#1994]: https://github.com/actix/actix-web/pull/1994
|
||||
[#2035]: https://github.com/actix/actix-web/pull/2035
|
||||
[#2052]: https://github.com/actix/actix-web/pull/2052
|
||||
[#2065]: https://github.com/actix/actix-web/pull/2065
|
||||
[#2094]: https://github.com/actix/actix-web/pull/2094
|
||||
[#2127]: https://github.com/actix/actix-web/pull/2127
|
||||
[#2148]: https://github.com/actix/actix-web/pull/2148
|
||||
[#2152]: https://github.com/actix/actix-web/pull/2152
|
||||
[#2158]: https://github.com/actix/actix-web/pull/2158
|
||||
[#2159]: https://github.com/actix/actix-web/pull/2159
|
||||
[#2161]: https://github.com/actix/actix-web/pull/2161
|
||||
[#2171]: https://github.com/actix/actix-web/pull/2171
|
||||
[#2183]: https://github.com/actix/actix-web/pull/2183
|
||||
[#2196]: https://github.com/actix/actix-web/pull/2196
|
||||
[#2201]: https://github.com/actix/actix-web/pull/2201
|
||||
[#2205]: https://github.com/actix/actix-web/pull/2205
|
||||
[#2215]: https://github.com/actix/actix-web/pull/2215
|
||||
[#2244]: https://github.com/actix/actix-web/pull/2244
|
||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||
[#2253]: https://github.com/actix/actix-web/pull/2253
|
||||
[#2291]: https://github.com/actix/actix-web/pull/2291
|
||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||
[#2364]: https://github.com/actix/actix-web/pull/2364
|
||||
[#2375]: https://github.com/actix/actix-web/pull/2375
|
||||
[#2377]: https://github.com/actix/actix-web/pull/2377
|
||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
||||
[#2425]: https://github.com/actix/actix-web/pull/2425
|
||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||
[#2446]: https://github.com/actix/actix-web/pull/2446
|
||||
[#2448]: https://github.com/actix/actix-web/pull/2448
|
||||
[#2456]: https://github.com/actix/actix-web/pull/2456
|
||||
[#2467]: https://github.com/actix/actix-web/pull/2467
|
||||
[#2468]: https://github.com/actix/actix-web/pull/2468
|
||||
[#2470]: https://github.com/actix/actix-web/pull/2470
|
||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
||||
[#2483]: https://github.com/actix/actix-web/pull/2483
|
||||
[#2486]: https://github.com/actix/actix-web/pull/2486
|
||||
[#2487]: https://github.com/actix/actix-web/pull/2487
|
||||
[#2488]: https://github.com/actix/actix-web/pull/2488
|
||||
[#2491]: https://github.com/actix/actix-web/pull/2491
|
||||
[#2497]: https://github.com/actix/actix-web/pull/2497
|
||||
[#2501]: https://github.com/actix/actix-web/pull/2501
|
||||
[#2510]: https://github.com/actix/actix-web/pull/2510
|
||||
[#2520]: https://github.com/actix/actix-web/pull/2520
|
||||
[#2522]: https://github.com/actix/actix-web/pull/2522
|
||||
[#2527]: https://github.com/actix/actix-web/pull/2527
|
||||
[#2538]: https://github.com/actix/actix-web/pull/2538
|
||||
[#2545]: https://github.com/actix/actix-web/pull/2545
|
||||
[#2565]: https://github.com/actix/actix-web/pull/2565
|
||||
[#2585]: https://github.com/actix/actix-web/pull/2585
|
||||
[#2587]: https://github.com/actix/actix-web/pull/2587
|
||||
[#2611]: https://github.com/actix/actix-web/pull/2611
|
||||
[#2618]: https://github.com/actix/actix-web/pull/2618
|
||||
[#2624]: https://github.com/actix/actix-web/pull/2624
|
||||
[#2625]: https://github.com/actix/actix-web/pull/2625
|
||||
[#2660]: https://github.com/actix/actix-web/pull/2660
|
||||
[00ba8d55]: https://github.com/actix/actix-web/commit/00ba8d55492284581695d824648590715a8bd386
|
||||
|
||||
|
||||
<details>
|
||||
<summary>3.0.0 Pre-Releases</summary>
|
||||
|
||||
## 3.0.0-rc.4 - 2022-02-22
|
||||
### Fixed
|
||||
- Fix h1 dispatcher panic. [1ce58ecb]
|
||||
|
||||
[1ce58ecb]: https://github.com/actix/actix-web/commit/1ce58ecb305c60e51db06e6c913b7a1344e229ca
|
||||
|
||||
|
||||
## 3.0.0-rc.3 - 2022-02-16
|
||||
- No significant changes since `3.0.0-rc.2`.
|
||||
|
||||
|
||||
## 3.0.0-rc.2 - 2022-02-08
|
||||
### Added
|
||||
- Implement `From<Vec<u8>>` for `Response<Vec<u8>>`. [#2625]
|
||||
|
||||
### Changed
|
||||
- `error::DispatcherError` enum is now marked `#[non_exhaustive]`. [#2624]
|
||||
|
||||
### Fixed
|
||||
- Issue where handlers that took payload but then dropped without reading it to EOF it would cause keep-alive connections to become stuck. [#2624]
|
||||
|
||||
[#2624]: https://github.com/actix/actix-web/pull/2624
|
||||
[#2625]: https://github.com/actix/actix-web/pull/2625
|
||||
|
||||
|
||||
## 3.0.0-rc.1 - 2022-01-31
|
||||
@ -80,7 +444,7 @@
|
||||
|
||||
|
||||
## 3.0.0-beta.17 - 2021-12-27
|
||||
### Changes
|
||||
### Changed
|
||||
- `HeaderMap::get_all` now returns a `std::slice::Iter`. [#2527]
|
||||
- `Payload` inner fields are now named. [#2545]
|
||||
- `impl Stream` for `Payload` no longer requires the `Stream` variant be `Unpin`. [#2545]
|
||||
@ -303,7 +667,7 @@
|
||||
- `Response::{ok, bad_request, not_found, internal_server_error}`. [#2159]
|
||||
- Helper `body::to_bytes` for async collecting message body into Bytes. [#2158]
|
||||
|
||||
### Changes
|
||||
### Changed
|
||||
- The type parameter of `Response` no longer has a default. [#2152]
|
||||
- The `Message` variant of `body::Body` is now `Pin<Box<dyn MessageBody>>`. [#2152]
|
||||
- `BodyStream` and `SizedStream` are no longer restricted to Unpin types. [#2152]
|
||||
@ -436,10 +800,10 @@
|
||||
- Remove `ResponseError` impl for `actix::actors::resolver::ResolverError`
|
||||
due to deprecate of resolver actor. [#1813]
|
||||
- Remove `ConnectError::SslHandshakeError` and re-export of `HandshakeError`.
|
||||
due to the removal of this type from `tokio-openssl` crate. openssl handshake
|
||||
due to the removal of this type from `tokio-openssl` crate. openssl handshake
|
||||
error would return as `ConnectError::SslError`. [#1813]
|
||||
- Remove `actix-threadpool` dependency. Use `actix_rt::task::spawn_blocking`.
|
||||
Due to this change `actix_threadpool::BlockingError` type is moved into
|
||||
Due to this change `actix_threadpool::BlockingError` type is moved into
|
||||
`actix_http::error` module. [#1878]
|
||||
|
||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||
@ -447,6 +811,8 @@
|
||||
[#1864]: https://github.com/actix/actix-web/pull/1864
|
||||
[#1878]: https://github.com/actix/actix-web/pull/1878
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
## 2.2.2 - 2022-01-21
|
||||
### Changed
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.0.0-rc.1"
|
||||
version = "3.2.1"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
@ -20,7 +20,7 @@ edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# features that docs.rs will build with
|
||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
||||
features = ["http2", "ws", "openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
||||
|
||||
[lib]
|
||||
name = "actix_http"
|
||||
@ -37,7 +37,7 @@ ws = [
|
||||
"local-channel",
|
||||
"base64",
|
||||
"rand",
|
||||
"sha-1",
|
||||
"sha1",
|
||||
]
|
||||
|
||||
# TLS via OpenSSL
|
||||
@ -57,7 +57,7 @@ __compress = []
|
||||
|
||||
[dependencies]
|
||||
actix-service = "2"
|
||||
actix-codec = "0.4.1"
|
||||
actix-codec = "0.5"
|
||||
actix-utils = "3"
|
||||
actix-rt = { version = "2.2", default-features = false }
|
||||
|
||||
@ -73,11 +73,11 @@ httparse = "1.5.1"
|
||||
httpdate = "1.0.1"
|
||||
itoa = "1"
|
||||
language-tags = "0.3"
|
||||
log = "0.4"
|
||||
mime = "0.3"
|
||||
percent-encoding = "2.1"
|
||||
pin-project-lite = "0.2"
|
||||
smallvec = "1.6.1"
|
||||
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||
|
||||
# http2
|
||||
h2 = { version = "0.3.9", optional = true }
|
||||
@ -86,21 +86,21 @@ h2 = { version = "0.3.9", optional = true }
|
||||
local-channel = { version = "0.1", optional = true }
|
||||
base64 = { version = "0.13", optional = true }
|
||||
rand = { version = "0.8", optional = true }
|
||||
sha-1 = { version = "0.10", optional = true }
|
||||
sha1 = { version = "0.10", optional = true }
|
||||
|
||||
# openssl/rustls
|
||||
actix-tls = { version = "3.0.0", default-features = false, optional = true }
|
||||
actix-tls = { version = "3", default-features = false, optional = true }
|
||||
|
||||
# compress-*
|
||||
brotli = { version = "3.3.3", optional = true }
|
||||
flate2 = { version = "1.0.13", optional = true }
|
||||
zstd = { version = "0.9", optional = true }
|
||||
zstd = { version = "0.11", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-http-test = { version = "3.0.0-beta.12", features = ["openssl"] }
|
||||
actix-http-test = { version = "3", features = ["openssl"] }
|
||||
actix-server = "2"
|
||||
actix-tls = { version = "3.0.0", features = ["openssl"] }
|
||||
actix-web = "4.0.0-rc.1"
|
||||
actix-tls = { version = "3", features = ["openssl"] }
|
||||
actix-web = "4"
|
||||
|
||||
async-stream = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
@ -108,9 +108,10 @@ env_logger = "0.9"
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
memchr = "2.4"
|
||||
once_cell = "1.9"
|
||||
rcgen = "0.8"
|
||||
rcgen = "0.9"
|
||||
regex = "1.3"
|
||||
rustls-pemfile = "0.2"
|
||||
rustversion = "1"
|
||||
rustls-pemfile = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
static_assertions = "1"
|
||||
@ -120,7 +121,7 @@ tokio = { version = "1.8.4", features = ["net", "rt", "macros"] }
|
||||
|
||||
[[example]]
|
||||
name = "ws"
|
||||
required-features = ["rustls"]
|
||||
required-features = ["ws", "rustls"]
|
||||
|
||||
[[bench]]
|
||||
name = "write-camel-case"
|
||||
|
@ -3,11 +3,11 @@
|
||||
> HTTP primitives for the Actix ecosystem.
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.0.0-rc.1)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|
||||
[](https://docs.rs/actix-http/3.2.1)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-rc.1)
|
||||
[](https://deps.rs/crate/actix-http/3.2.1)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
@ -25,7 +25,7 @@ use actix_http::{HttpService, Response};
|
||||
use actix_server::Server;
|
||||
use futures_util::future;
|
||||
use http::header::HeaderValue;
|
||||
use log::info;
|
||||
use tracing::info;
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
|
@ -114,11 +114,12 @@ mod _original {
|
||||
use std::mem::MaybeUninit;
|
||||
|
||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
||||
#![allow(clippy::uninit_assumed_init)]
|
||||
#![allow(invalid_value, clippy::uninit_assumed_init)]
|
||||
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
|
||||
#[allow(invalid_value)]
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
|
||||
|
@ -18,7 +18,8 @@ async fn main() -> std::io::Result<()> {
|
||||
HttpService::build()
|
||||
// pass the app to service builder
|
||||
// map_config is used to map App's configuration to ServiceBuilder
|
||||
.finish(map_config(app, |_| AppConfig::default()))
|
||||
// h1 will configure server to only use HTTP/1.1
|
||||
.h1(map_config(app, |_| AppConfig::default()))
|
||||
.tcp()
|
||||
})?
|
||||
.run()
|
||||
|
@ -5,6 +5,7 @@ use actix_server::Server;
|
||||
use bytes::BytesMut;
|
||||
use futures_util::StreamExt as _;
|
||||
use http::header::HeaderValue;
|
||||
use tracing::info;
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
@ -22,7 +23,7 @@ async fn main() -> io::Result<()> {
|
||||
body.extend_from_slice(&item?);
|
||||
}
|
||||
|
||||
log::info!("request body: {:?}", body);
|
||||
info!("request body: {:?}", body);
|
||||
|
||||
let res = Response::build(StatusCode::OK)
|
||||
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
||||
|
@ -1,9 +1,8 @@
|
||||
use std::{convert::Infallible, io, time::Duration};
|
||||
|
||||
use actix_http::{
|
||||
header::HeaderValue, HttpMessage, HttpService, Request, Response, StatusCode,
|
||||
};
|
||||
use actix_http::{header::HeaderValue, HttpService, Request, Response, StatusCode};
|
||||
use actix_server::Server;
|
||||
use tracing::info;
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
@ -18,12 +17,12 @@ async fn main() -> io::Result<()> {
|
||||
ext.insert(42u32);
|
||||
})
|
||||
.finish(|req: Request| async move {
|
||||
log::info!("{:?}", req);
|
||||
info!("{:?}", req);
|
||||
|
||||
let mut res = Response::build(StatusCode::OK);
|
||||
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
||||
|
||||
let forty_two = req.extensions().get::<u32>().unwrap().to_string();
|
||||
let forty_two = req.conn_data::<u32>().unwrap().to_string();
|
||||
res.insert_header((
|
||||
"x-forty-two",
|
||||
HeaderValue::from_str(&forty_two).unwrap(),
|
||||
|
@ -12,6 +12,7 @@ use actix_http::{body::BodyStream, HttpService, Response};
|
||||
use actix_server::Server;
|
||||
use async_stream::stream;
|
||||
use bytes::Bytes;
|
||||
use tracing::info;
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
@ -21,7 +22,7 @@ async fn main() -> io::Result<()> {
|
||||
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
||||
HttpService::build()
|
||||
.finish(|req| async move {
|
||||
log::info!("{:?}", req);
|
||||
info!("{:?}", req);
|
||||
let res = Response::ok();
|
||||
|
||||
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
||||
|
@ -17,6 +17,7 @@ use actix_server::Server;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use bytestring::ByteString;
|
||||
use futures_core::{ready, Stream};
|
||||
use tracing::{info, trace};
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
@ -34,13 +35,13 @@ async fn main() -> io::Result<()> {
|
||||
}
|
||||
|
||||
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
||||
log::info!("handshaking");
|
||||
info!("handshaking");
|
||||
let mut res = ws::handshake(req.head())?;
|
||||
|
||||
// handshake will always fail under HTTP/2
|
||||
|
||||
log::info!("responding");
|
||||
Ok(res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))?)
|
||||
info!("responding");
|
||||
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
|
||||
}
|
||||
|
||||
struct Heartbeat {
|
||||
@ -61,7 +62,7 @@ impl Stream for Heartbeat {
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
log::trace!("poll");
|
||||
trace!("poll");
|
||||
|
||||
ready!(self.as_mut().interval.poll_tick(cx));
|
||||
|
||||
|
@ -80,7 +80,7 @@ mod tests {
|
||||
use futures_core::ready;
|
||||
use futures_util::{stream, FutureExt as _};
|
||||
use pin_project_lite::pin_project;
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||
|
||||
use super::*;
|
||||
use crate::body::to_bytes;
|
||||
@ -91,10 +91,10 @@ mod tests {
|
||||
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
||||
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
||||
|
||||
assert_not_impl_all!(BodyStream<stream::Empty<Bytes>>: MessageBody);
|
||||
assert_not_impl_all!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
|
||||
assert_not_impl_any!(BodyStream<stream::Empty<Bytes>>: MessageBody);
|
||||
assert_not_impl_any!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
|
||||
// crate::Error is not Clone
|
||||
assert_not_impl_all!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
assert_not_impl_any!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn skips_empty_chunks() {
|
||||
|
@ -105,14 +105,13 @@ impl MessageBody for BoxBody {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||
|
||||
use super::*;
|
||||
use crate::body::to_bytes;
|
||||
|
||||
assert_impl_all!(BoxBody: MessageBody, fmt::Debug, Unpin);
|
||||
|
||||
assert_not_impl_all!(BoxBody: Send, Sync, Unpin);
|
||||
assert_impl_all!(BoxBody: fmt::Debug, MessageBody, Unpin);
|
||||
assert_not_impl_any!(BoxBody: Send, Sync);
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_boxed_body() {
|
||||
|
@ -10,6 +10,17 @@ use super::{BodySize, BoxBody, MessageBody};
|
||||
use crate::Error;
|
||||
|
||||
pin_project! {
|
||||
/// An "either" type specialized for body types.
|
||||
///
|
||||
/// It is common, in middleware especially, to conditionally return an inner service's unknown/
|
||||
/// generic body `B` type or return early with a new response. This type's "right" variant
|
||||
/// defaults to `BoxBody` since error responses are the common case.
|
||||
///
|
||||
/// For example, middleware will often have `type Response = ServiceResponse<EitherBody<B>>`.
|
||||
/// This means that the inner service's response body type maps to the `Left` variant and the
|
||||
/// middleware's own error responses use the default `Right` variant of `BoxBody`. Of course,
|
||||
/// there's no reason it couldn't use `EitherBody<B, String>` instead if its alternative
|
||||
/// responses have a known type.
|
||||
#[project = EitherBodyProj]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum EitherBody<L, R = BoxBody> {
|
||||
@ -22,7 +33,10 @@ pin_project! {
|
||||
}
|
||||
|
||||
impl<L> EitherBody<L, BoxBody> {
|
||||
/// Creates new `EitherBody` using left variant and boxed right variant.
|
||||
/// Creates new `EitherBody` left variant with a boxed right variant.
|
||||
///
|
||||
/// If the expected `R` type will be inferred and is not `BoxBody` then use the
|
||||
/// [`left`](Self::left) constructor instead.
|
||||
#[inline]
|
||||
pub fn new(body: L) -> Self {
|
||||
Self::Left { body }
|
||||
|
@ -19,7 +19,7 @@ use super::{BodySize, BoxBody};
|
||||
/// It is not usually necessary to create custom body types, this trait is already [implemented for
|
||||
/// a large number of sensible body types](#foreign-impls) including:
|
||||
/// - Empty body: `()`
|
||||
/// - Text-based: `String`, `&'static str`, `ByteString`.
|
||||
/// - Text-based: `String`, `&'static str`, [`ByteString`](https://docs.rs/bytestring/1).
|
||||
/// - Byte-based: `Bytes`, `BytesMut`, `Vec<u8>`, `&'static [u8]`;
|
||||
/// - Streams: [`BodyStream`](super::BodyStream), [`SizedStream`](super::SizedStream)
|
||||
///
|
||||
@ -481,6 +481,7 @@ mod tests {
|
||||
assert_poll_next_none!(pl);
|
||||
}
|
||||
|
||||
#[allow(clippy::let_unit_value)]
|
||||
#[actix_rt::test]
|
||||
async fn test_unit() {
|
||||
let pl = ();
|
||||
|
@ -76,7 +76,7 @@ mod tests {
|
||||
use actix_rt::pin;
|
||||
use actix_utils::future::poll_fn;
|
||||
use futures_util::stream;
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
||||
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||
|
||||
use super::*;
|
||||
use crate::body::to_bytes;
|
||||
@ -87,10 +87,10 @@ mod tests {
|
||||
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
||||
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
||||
|
||||
assert_not_impl_all!(SizedStream<stream::Empty<Bytes>>: MessageBody);
|
||||
assert_not_impl_all!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
|
||||
assert_not_impl_any!(SizedStream<stream::Empty<Bytes>>: MessageBody);
|
||||
assert_not_impl_any!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
|
||||
// crate::Error is not Clone
|
||||
assert_not_impl_all!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
assert_not_impl_any!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn skips_empty_chunks() {
|
||||
|
@ -104,8 +104,13 @@ impl ServiceConfig {
|
||||
self.0.date_service.now()
|
||||
}
|
||||
|
||||
pub(crate) fn write_date_header(&self, dst: &mut BytesMut, camel_case: bool) {
|
||||
let mut buf: [u8; 39] = [0; 39];
|
||||
/// Writes date header to `dst` buffer.
|
||||
///
|
||||
/// Low-level method that utilizes the built-in efficient date service, requiring fewer syscalls
|
||||
/// than normal. Note that a CRLF (`\r\n`) is included in what is written.
|
||||
#[doc(hidden)]
|
||||
pub fn write_date_header(&self, dst: &mut BytesMut, camel_case: bool) {
|
||||
let mut buf: [u8; 37] = [0; 37];
|
||||
|
||||
buf[..6].copy_from_slice(if camel_case { b"Date: " } else { b"date: " });
|
||||
|
||||
@ -113,7 +118,7 @@ impl ServiceConfig {
|
||||
.date_service
|
||||
.with_date(|date| buf[6..35].copy_from_slice(&date.bytes));
|
||||
|
||||
buf[35..].copy_from_slice(b"\r\n\r\n");
|
||||
buf[35..].copy_from_slice(b"\r\n");
|
||||
dst.extend_from_slice(&buf);
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ use zstd::stream::write::Decoder as ZstdDecoder;
|
||||
|
||||
use crate::{
|
||||
encoding::Writer,
|
||||
error::{BlockingError, PayloadError},
|
||||
error::PayloadError,
|
||||
header::{ContentEncoding, HeaderMap, CONTENT_ENCODING},
|
||||
};
|
||||
|
||||
@ -47,14 +47,17 @@ where
|
||||
ContentEncoding::Brotli => Some(ContentDecoder::Brotli(Box::new(
|
||||
brotli::DecompressorWriter::new(Writer::new(), 8_096),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
|
||||
ZlibDecoder::new(Writer::new()),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(GzDecoder::new(
|
||||
Writer::new(),
|
||||
)))),
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
|
||||
ZstdDecoder::new(Writer::new()).expect(
|
||||
@ -98,8 +101,12 @@ where
|
||||
|
||||
loop {
|
||||
if let Some(ref mut fut) = this.fut {
|
||||
let (chunk, decoder) =
|
||||
ready!(Pin::new(fut).poll(cx)).map_err(|_| BlockingError)??;
|
||||
let (chunk, decoder) = ready!(Pin::new(fut).poll(cx)).map_err(|_| {
|
||||
PayloadError::Io(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Blocking task was cancelled unexpectedly",
|
||||
))
|
||||
})??;
|
||||
|
||||
*this.decoder = Some(decoder);
|
||||
this.fut.take();
|
||||
@ -159,10 +166,13 @@ where
|
||||
enum ContentDecoder {
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Deflate(Box<ZlibDecoder<Writer>>),
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
Gzip(Box<GzDecoder<Writer>>),
|
||||
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
Brotli(Box<brotli::DecompressorWriter<Writer>>),
|
||||
|
||||
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
|
||||
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
|
@ -17,13 +17,13 @@ use pin_project_lite::pin_project;
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||
|
||||
use tracing::trace;
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||
|
||||
use super::Writer;
|
||||
use crate::{
|
||||
body::{self, BodySize, MessageBody},
|
||||
error::BlockingError,
|
||||
header::{self, ContentEncoding, HeaderValue, CONTENT_ENCODING},
|
||||
ResponseHead, StatusCode,
|
||||
};
|
||||
@ -173,7 +173,12 @@ where
|
||||
|
||||
if let Some(ref mut fut) = this.fut {
|
||||
let mut encoder = ready!(Pin::new(fut).poll(cx))
|
||||
.map_err(|_| EncoderError::Blocking(BlockingError))?
|
||||
.map_err(|_| {
|
||||
EncoderError::Io(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Blocking task was cancelled unexpectedly",
|
||||
))
|
||||
})?
|
||||
.map_err(EncoderError::Io)?;
|
||||
|
||||
let chunk = encoder.take();
|
||||
@ -252,7 +257,7 @@ fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
|
||||
head.headers_mut()
|
||||
.insert(header::CONTENT_ENCODING, encoding.to_header_value());
|
||||
head.headers_mut()
|
||||
.insert(header::VARY, HeaderValue::from_static("accept-encoding"));
|
||||
.append(header::VARY, HeaderValue::from_static("accept-encoding"));
|
||||
|
||||
head.no_chunking(false);
|
||||
}
|
||||
@ -352,7 +357,7 @@ impl ContentEncoder {
|
||||
ContentEncoder::Brotli(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
log::trace!("Error decoding br encoding: {}", err);
|
||||
trace!("Error decoding br encoding: {}", err);
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
@ -361,7 +366,7 @@ impl ContentEncoder {
|
||||
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
log::trace!("Error decoding gzip encoding: {}", err);
|
||||
trace!("Error decoding gzip encoding: {}", err);
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
@ -370,7 +375,7 @@ impl ContentEncoder {
|
||||
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
log::trace!("Error decoding deflate encoding: {}", err);
|
||||
trace!("Error decoding deflate encoding: {}", err);
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
@ -379,7 +384,7 @@ impl ContentEncoder {
|
||||
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => {
|
||||
log::trace!("Error decoding ztsd encoding: {}", err);
|
||||
trace!("Error decoding ztsd encoding: {}", err);
|
||||
Err(err)
|
||||
}
|
||||
},
|
||||
@ -400,12 +405,11 @@ fn new_brotli_compressor() -> Box<brotli::CompressorWriter<Writer>> {
|
||||
#[derive(Debug, Display)]
|
||||
#[non_exhaustive]
|
||||
pub enum EncoderError {
|
||||
/// Wrapped body stream error.
|
||||
#[display(fmt = "body")]
|
||||
Body(Box<dyn StdError>),
|
||||
|
||||
#[display(fmt = "blocking")]
|
||||
Blocking(BlockingError),
|
||||
|
||||
/// Generic I/O error.
|
||||
#[display(fmt = "io")]
|
||||
Io(io::Error),
|
||||
}
|
||||
@ -414,7 +418,6 @@ impl StdError for EncoderError {
|
||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
match self {
|
||||
EncoderError::Body(err) => Some(&**err),
|
||||
EncoderError::Blocking(err) => Some(err),
|
||||
EncoderError::Io(err) => Some(err),
|
||||
}
|
||||
}
|
||||
|
@ -51,7 +51,7 @@ impl Error {
|
||||
Self::new(Kind::SendResponse)
|
||||
}
|
||||
|
||||
#[allow(unused)] // reserved for future use (TODO: remove allow when being used)
|
||||
#[allow(unused)] // available for future use
|
||||
pub(crate) fn new_io() -> Self {
|
||||
Self::new(Kind::Io)
|
||||
}
|
||||
@ -108,8 +108,10 @@ pub(crate) enum Kind {
|
||||
|
||||
impl fmt::Debug for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// TODO: more detail
|
||||
f.write_str("actix_http::Error")
|
||||
f.debug_struct("actix_http::Error")
|
||||
.field("kind", &self.inner.kind)
|
||||
.field("cause", &self.inner.cause)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
@ -250,12 +252,6 @@ impl From<ParseError> for Response<BoxBody> {
|
||||
}
|
||||
}
|
||||
|
||||
/// A set of errors that can occur running blocking tasks in thread pool.
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Blocking thread pool is gone")]
|
||||
// TODO: non-exhaustive
|
||||
pub struct BlockingError;
|
||||
|
||||
/// A set of errors that can occur during payload parsing.
|
||||
#[derive(Debug, Display)]
|
||||
#[non_exhaustive]
|
||||
@ -293,13 +289,13 @@ impl std::error::Error for PayloadError {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
match self {
|
||||
PayloadError::Incomplete(None) => None,
|
||||
PayloadError::Incomplete(Some(err)) => Some(err as &dyn std::error::Error),
|
||||
PayloadError::Incomplete(Some(err)) => Some(err),
|
||||
PayloadError::EncodingCorrupted => None,
|
||||
PayloadError::Overflow => None,
|
||||
PayloadError::UnknownLength => None,
|
||||
#[cfg(feature = "http2")]
|
||||
PayloadError::Http2Payload(err) => Some(err as &dyn std::error::Error),
|
||||
PayloadError::Io(err) => Some(err as &dyn std::error::Error),
|
||||
PayloadError::Http2Payload(err) => Some(err),
|
||||
PayloadError::Io(err) => Some(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -323,15 +319,6 @@ impl From<io::Error> for PayloadError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BlockingError> for PayloadError {
|
||||
fn from(_: BlockingError) -> Self {
|
||||
PayloadError::Io(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Operation is canceled",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PayloadError> for Error {
|
||||
fn from(err: PayloadError) -> Self {
|
||||
Self::new_payload().with_cause(err)
|
||||
@ -340,6 +327,7 @@ impl From<PayloadError> for Error {
|
||||
|
||||
/// A set of errors that can occur during dispatching HTTP requests.
|
||||
#[derive(Debug, Display, From)]
|
||||
#[non_exhaustive]
|
||||
pub enum DispatchError {
|
||||
/// Service error.
|
||||
#[display(fmt = "Service Error")]
|
||||
@ -373,6 +361,10 @@ pub enum DispatchError {
|
||||
#[display(fmt = "Connection shutdown timeout")]
|
||||
DisconnectTimeout,
|
||||
|
||||
/// Handler dropped payload before reading EOF.
|
||||
#[display(fmt = "Handler dropped payload before reading EOF")]
|
||||
HandlerDroppedPayload,
|
||||
|
||||
/// Internal error.
|
||||
#[display(fmt = "Internal error")]
|
||||
InternalError,
|
||||
@ -381,7 +373,6 @@ pub enum DispatchError {
|
||||
impl StdError for DispatchError {
|
||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
match self {
|
||||
// TODO: error source extraction?
|
||||
DispatchError::Service(_res) => None,
|
||||
DispatchError::Body(err) => Some(&**err),
|
||||
DispatchError::Io(err) => Some(err),
|
||||
@ -397,7 +388,7 @@ impl StdError for DispatchError {
|
||||
|
||||
/// A set of error that can occur during parsing content type.
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[cfg_attr(test, derive(PartialEq))]
|
||||
#[cfg_attr(test, derive(PartialEq, Eq))]
|
||||
#[non_exhaustive]
|
||||
pub enum ContentTypeError {
|
||||
/// Can not parse content type
|
||||
|
@ -1,6 +1,7 @@
|
||||
use std::{io, task::Poll};
|
||||
|
||||
use bytes::{Buf as _, Bytes, BytesMut};
|
||||
use tracing::{debug, trace};
|
||||
|
||||
macro_rules! byte (
|
||||
($rdr:ident) => ({
|
||||
@ -14,7 +15,7 @@ macro_rules! byte (
|
||||
})
|
||||
);
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) enum ChunkedState {
|
||||
Size,
|
||||
SizeLws,
|
||||
@ -76,7 +77,7 @@ impl ChunkedState {
|
||||
Poll::Ready(Ok(ChunkedState::Size))
|
||||
}
|
||||
None => {
|
||||
log::debug!("chunk size would overflow u64");
|
||||
debug!("chunk size would overflow u64");
|
||||
Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Size is too big",
|
||||
@ -124,7 +125,7 @@ impl ChunkedState {
|
||||
rem: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
log::trace!("Chunked read, remaining={:?}", rem);
|
||||
trace!("Chunked read, remaining={:?}", rem);
|
||||
|
||||
let len = rdr.len() as u64;
|
||||
if len == 0 {
|
||||
|
@ -128,7 +128,10 @@ impl Decoder for ClientCodec {
|
||||
type Error = ParseError;
|
||||
|
||||
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
|
||||
debug_assert!(!self.inner.payload.is_some(), "Payload decoder is set");
|
||||
debug_assert!(
|
||||
self.inner.payload.is_none(),
|
||||
"Payload decoder should not be set"
|
||||
);
|
||||
|
||||
if let Some((req, payload)) = self.inner.decoder.decode(src)? {
|
||||
if let Some(conn_type) = req.conn_type() {
|
||||
|
@ -125,11 +125,13 @@ impl Decoder for Codec {
|
||||
self.flags.set(Flags::HEAD, head.method == Method::HEAD);
|
||||
self.version = head.version;
|
||||
self.conn_type = head.connection_type();
|
||||
|
||||
if self.conn_type == ConnectionType::KeepAlive
|
||||
&& !self.flags.contains(Flags::KEEP_ALIVE_ENABLED)
|
||||
{
|
||||
self.conn_type = ConnectionType::Close
|
||||
}
|
||||
|
||||
match payload {
|
||||
PayloadType::None => self.payload = None,
|
||||
PayloadType::Payload(pl) => self.payload = Some(pl),
|
||||
|
@ -6,7 +6,7 @@ use http::{
|
||||
header::{self, HeaderName, HeaderValue},
|
||||
Method, StatusCode, Uri, Version,
|
||||
};
|
||||
use log::{debug, error, trace};
|
||||
use tracing::{debug, error, trace};
|
||||
|
||||
use super::chunked::ChunkedState;
|
||||
use crate::{error::ParseError, header::HeaderMap, ConnectionType, Request, ResponseHead};
|
||||
@ -46,6 +46,23 @@ pub(crate) enum PayloadLength {
|
||||
None,
|
||||
}
|
||||
|
||||
impl PayloadLength {
|
||||
/// Returns true if variant is `None`.
|
||||
fn is_none(&self) -> bool {
|
||||
matches!(self, Self::None)
|
||||
}
|
||||
|
||||
/// Returns true if variant is represents zero-length (not none) payload.
|
||||
fn is_zero(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
PayloadLength::Payload(PayloadType::Payload(PayloadDecoder {
|
||||
kind: Kind::Length(0)
|
||||
}))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait MessageType: Sized {
|
||||
fn set_connection_type(&mut self, conn_type: Option<ConnectionType>);
|
||||
|
||||
@ -59,6 +76,7 @@ pub(crate) trait MessageType: Sized {
|
||||
&mut self,
|
||||
slice: &Bytes,
|
||||
raw_headers: &[HeaderIndex],
|
||||
version: Version,
|
||||
) -> Result<PayloadLength, ParseError> {
|
||||
let mut ka = None;
|
||||
let mut has_upgrade_websocket = false;
|
||||
@ -87,21 +105,23 @@ pub(crate) trait MessageType: Sized {
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::CONTENT_LENGTH => match value.to_str() {
|
||||
Ok(s) if s.trim().starts_with('+') => {
|
||||
debug!("illegal Content-Length: {:?}", s);
|
||||
header::CONTENT_LENGTH => match value.to_str().map(str::trim) {
|
||||
Ok(val) if val.starts_with('+') => {
|
||||
debug!("illegal Content-Length: {:?}", val);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
Ok(s) => {
|
||||
if let Ok(len) = s.parse::<u64>() {
|
||||
if len != 0 {
|
||||
content_length = Some(len);
|
||||
}
|
||||
|
||||
Ok(val) => {
|
||||
if let Ok(len) = val.parse::<u64>() {
|
||||
// accept 0 lengths here and remove them in `decode` after all
|
||||
// headers have been processed to prevent request smuggling issues
|
||||
content_length = Some(len);
|
||||
} else {
|
||||
debug!("illegal Content-Length: {:?}", s);
|
||||
debug!("illegal Content-Length: {:?}", val);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
|
||||
Err(_) => {
|
||||
debug!("illegal Content-Length: {:?}", value);
|
||||
return Err(ParseError::Header);
|
||||
@ -114,22 +134,23 @@ pub(crate) trait MessageType: Sized {
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::TRANSFER_ENCODING => {
|
||||
header::TRANSFER_ENCODING if version == Version::HTTP_11 => {
|
||||
seen_te = true;
|
||||
|
||||
if let Ok(s) = value.to_str().map(str::trim) {
|
||||
if s.eq_ignore_ascii_case("chunked") {
|
||||
if let Ok(val) = value.to_str().map(str::trim) {
|
||||
if val.eq_ignore_ascii_case("chunked") {
|
||||
chunked = true;
|
||||
} else if s.eq_ignore_ascii_case("identity") {
|
||||
} else if val.eq_ignore_ascii_case("identity") {
|
||||
// allow silently since multiple TE headers are already checked
|
||||
} else {
|
||||
debug!("illegal Transfer-Encoding: {:?}", s);
|
||||
debug!("illegal Transfer-Encoding: {:?}", val);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
} else {
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
|
||||
// connection keep-alive state
|
||||
header::CONNECTION => {
|
||||
ka = if let Ok(conn) = value.to_str().map(str::trim) {
|
||||
@ -146,6 +167,7 @@ pub(crate) trait MessageType: Sized {
|
||||
None
|
||||
};
|
||||
}
|
||||
|
||||
header::UPGRADE => {
|
||||
if let Ok(val) = value.to_str().map(str::trim) {
|
||||
if val.eq_ignore_ascii_case("websocket") {
|
||||
@ -153,19 +175,23 @@ pub(crate) trait MessageType: Sized {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
header::EXPECT => {
|
||||
let bytes = value.as_bytes();
|
||||
if bytes.len() >= 4 && &bytes[0..4] == b"100-" {
|
||||
expect = true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
headers.append(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
self.set_connection_type(ka);
|
||||
|
||||
if expect {
|
||||
self.set_expect()
|
||||
}
|
||||
@ -209,15 +235,16 @@ impl MessageType for Request {
|
||||
|
||||
let (len, method, uri, ver, h_len) = {
|
||||
// SAFETY:
|
||||
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
|
||||
// safe because the type we are claiming to have initialized here is a
|
||||
// bunch of `MaybeUninit`s, which do not require initialization.
|
||||
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is safe because the
|
||||
// type we are claiming to have initialized here is a bunch of `MaybeUninit`s, which
|
||||
// do not require initialization.
|
||||
let mut parsed = unsafe {
|
||||
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||
.assume_init()
|
||||
};
|
||||
|
||||
let mut req = httparse::Request::new(&mut []);
|
||||
|
||||
match req.parse_with_uninit_headers(src, &mut parsed)? {
|
||||
httparse::Status::Complete(len) => {
|
||||
let method = Method::from_bytes(req.method.unwrap().as_bytes())
|
||||
@ -232,6 +259,7 @@ impl MessageType for Request {
|
||||
|
||||
(len, method, uri, version, req.headers.len())
|
||||
}
|
||||
|
||||
httparse::Status::Partial => {
|
||||
return if src.len() >= MAX_BUFFER_SIZE {
|
||||
trace!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
||||
@ -247,7 +275,22 @@ impl MessageType for Request {
|
||||
let mut msg = Request::new();
|
||||
|
||||
// convert headers
|
||||
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
|
||||
let mut length =
|
||||
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||
|
||||
// disallow HTTP/1.0 POST requests that do not contain a Content-Length headers
|
||||
// see https://datatracker.ietf.org/doc/html/rfc1945#section-7.2.2
|
||||
if ver == Version::HTTP_10 && method == Method::POST && length.is_none() {
|
||||
debug!("no Content-Length specified for HTTP/1.0 POST request");
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
||||
// Protects against some request smuggling attacks.
|
||||
// See https://github.com/actix/actix-web/issues/2767.
|
||||
if length.is_zero() {
|
||||
length = PayloadLength::None;
|
||||
}
|
||||
|
||||
// payload decoder
|
||||
let decoder = match length {
|
||||
@ -291,22 +334,35 @@ impl MessageType for ResponseHead {
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||
|
||||
let (len, ver, status, h_len) = {
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
||||
// SAFETY:
|
||||
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is safe because the
|
||||
// type we are claiming to have initialized here is a bunch of `MaybeUninit`s, which
|
||||
// do not require initialization.
|
||||
let mut parsed = unsafe {
|
||||
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||
.assume_init()
|
||||
};
|
||||
|
||||
let mut res = httparse::Response::new(&mut parsed);
|
||||
match res.parse(src)? {
|
||||
let mut res = httparse::Response::new(&mut []);
|
||||
|
||||
let mut config = httparse::ParserConfig::default();
|
||||
config.allow_spaces_after_header_name_in_responses(true);
|
||||
|
||||
match config.parse_response_with_uninit_headers(&mut res, src, &mut parsed)? {
|
||||
httparse::Status::Complete(len) => {
|
||||
let version = if res.version.unwrap() == 1 {
|
||||
Version::HTTP_11
|
||||
} else {
|
||||
Version::HTTP_10
|
||||
};
|
||||
|
||||
let status = StatusCode::from_u16(res.code.unwrap())
|
||||
.map_err(|_| ParseError::Status)?;
|
||||
HeaderIndex::record(src, res.headers, &mut headers);
|
||||
|
||||
(len, version, status, res.headers.len())
|
||||
}
|
||||
|
||||
httparse::Status::Partial => {
|
||||
return if src.len() >= MAX_BUFFER_SIZE {
|
||||
error!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
||||
@ -322,7 +378,15 @@ impl MessageType for ResponseHead {
|
||||
msg.version = ver;
|
||||
|
||||
// convert headers
|
||||
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
|
||||
let mut length =
|
||||
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||
|
||||
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
||||
// Protects against some request smuggling attacks.
|
||||
// See https://github.com/actix/actix-web/issues/2767.
|
||||
if length.is_zero() {
|
||||
length = PayloadLength::None;
|
||||
}
|
||||
|
||||
// message payload
|
||||
let decoder = if let PayloadLength::Payload(pl) = length {
|
||||
@ -358,9 +422,6 @@ pub(crate) const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
|
||||
pub(crate) const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
|
||||
[EMPTY_HEADER_INDEX; MAX_HEADERS];
|
||||
|
||||
pub(crate) const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
|
||||
[httparse::EMPTY_HEADER; MAX_HEADERS];
|
||||
|
||||
impl HeaderIndex {
|
||||
pub(crate) fn record(
|
||||
bytes: &[u8],
|
||||
@ -379,7 +440,7 @@ impl HeaderIndex {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
/// Chunk type yielded while decoding a payload.
|
||||
pub enum PayloadItem {
|
||||
Chunk(Bytes),
|
||||
@ -389,7 +450,7 @@ pub enum PayloadItem {
|
||||
/// Decoder that can handle different payload types.
|
||||
///
|
||||
/// If a message body does not use `Transfer-Encoding`, it should include a `Content-Length`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct PayloadDecoder {
|
||||
kind: Kind,
|
||||
}
|
||||
@ -415,7 +476,7 @@ impl PayloadDecoder {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
enum Kind {
|
||||
/// A reader used when a `Content-Length` header is passed with a positive integer.
|
||||
Length(u64),
|
||||
@ -594,14 +655,100 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_post() {
|
||||
let mut buf = BytesMut::from("POST /test2 HTTP/1.0\r\n\r\n");
|
||||
fn parse_h09_reject() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test1 HTTP/0.9\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
reader.decode(&mut buf).unwrap_err();
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
"POST /test2 HTTP/0.9\r\n\
|
||||
Content-Length: 3\r\n\
|
||||
\r\n
|
||||
abc",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
reader.decode(&mut buf).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_h10_get() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test1 HTTP/1.0\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(req.version(), Version::HTTP_10);
|
||||
assert_eq!(*req.method(), Method::GET);
|
||||
assert_eq!(req.path(), "/test1");
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test2 HTTP/1.0\r\n\
|
||||
Content-Length: 0\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(req.version(), Version::HTTP_10);
|
||||
assert_eq!(*req.method(), Method::GET);
|
||||
assert_eq!(req.path(), "/test2");
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test3 HTTP/1.0\r\n\
|
||||
Content-Length: 3\r\n\
|
||||
\r\n
|
||||
abc",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(req.version(), Version::HTTP_10);
|
||||
assert_eq!(*req.method(), Method::GET);
|
||||
assert_eq!(req.path(), "/test3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_h10_post() {
|
||||
let mut buf = BytesMut::from(
|
||||
"POST /test1 HTTP/1.0\r\n\
|
||||
Content-Length: 3\r\n\
|
||||
\r\n\
|
||||
abc",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(req.version(), Version::HTTP_10);
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert_eq!(req.path(), "/test1");
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
"POST /test2 HTTP/1.0\r\n\
|
||||
Content-Length: 0\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(req.version(), Version::HTTP_10);
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert_eq!(req.path(), "/test2");
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
"POST /test3 HTTP/1.0\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let err = reader.decode(&mut buf).unwrap_err();
|
||||
assert!(err.to_string().contains("Header"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -697,121 +844,98 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_conn_default_1_0() {
|
||||
let mut buf = BytesMut::from("GET /test HTTP/1.0\r\n\r\n");
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
let req = parse_ready!(&mut BytesMut::from("GET /test HTTP/1.0\r\n\r\n"));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_default_1_1() {
|
||||
let mut buf = BytesMut::from("GET /test HTTP/1.1\r\n\r\n");
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
let req = parse_ready!(&mut BytesMut::from("GET /test HTTP/1.1\r\n\r\n"));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_close() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
connection: close\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
connection: Close\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_close_1_0() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.0\r\n\
|
||||
connection: close\r\n\r\n",
|
||||
);
|
||||
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_keep_alive_1_0() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.0\r\n\
|
||||
connection: keep-alive\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.0\r\n\
|
||||
connection: Keep-Alive\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_keep_alive_1_1() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
connection: keep-alive\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_other_1_0() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.0\r\n\
|
||||
connection: other\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_other_1_1() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
connection: other\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
));
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conn_upgrade() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
upgrade: websockets\r\n\
|
||||
connection: upgrade\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
));
|
||||
|
||||
assert!(req.upgrade());
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
upgrade: Websockets\r\n\
|
||||
connection: Upgrade\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
));
|
||||
|
||||
assert!(req.upgrade());
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
|
||||
@ -819,59 +943,62 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_conn_upgrade_connect_method() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"CONNECT /test HTTP/1.1\r\n\
|
||||
content-type: text/plain\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
));
|
||||
|
||||
assert!(req.upgrade());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_headers_content_length_err_1() {
|
||||
let mut buf = BytesMut::from(
|
||||
fn test_headers_bad_content_length() {
|
||||
// string CL
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
content-length: line\r\n\r\n",
|
||||
);
|
||||
));
|
||||
|
||||
expect_parse_err!(&mut buf)
|
||||
// negative CL
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
content-length: -1\r\n\r\n",
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_headers_content_length_err_2() {
|
||||
fn octal_ish_cl_parsed_as_decimal() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
content-length: -1\r\n\r\n",
|
||||
"POST /test HTTP/1.1\r\n\
|
||||
content-length: 011\r\n\r\n",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(matches!(
|
||||
pl,
|
||||
PayloadType::Payload(pl) if pl == PayloadDecoder::length(11)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_header() {
|
||||
let mut buf = BytesMut::from(
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
test line\r\n\r\n",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_name() {
|
||||
let mut buf = BytesMut::from(
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
test[]: line\r\n\r\n",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_bad_status_line() {
|
||||
let mut buf = BytesMut::from("getpath \r\n\r\n");
|
||||
expect_parse_err!(&mut buf);
|
||||
expect_parse_err!(&mut BytesMut::from("getpath \r\n\r\n"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -911,11 +1038,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_http_request_parser_utf8() {
|
||||
let mut buf = BytesMut::from(
|
||||
let req = parse_ready!(&mut BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
x-test: тест\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
));
|
||||
|
||||
assert_eq!(
|
||||
req.headers().get("x-test").unwrap().as_bytes(),
|
||||
@ -925,24 +1051,18 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_http_request_parser_two_slashes() {
|
||||
let mut buf = BytesMut::from("GET //path HTTP/1.1\r\n\r\n");
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
let req = parse_ready!(&mut BytesMut::from("GET //path HTTP/1.1\r\n\r\n"));
|
||||
assert_eq!(req.path(), "//path");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_parser_bad_method() {
|
||||
let mut buf = BytesMut::from("!12%()+=~$ /get HTTP/1.1\r\n\r\n");
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
expect_parse_err!(&mut BytesMut::from("!12%()+=~$ /get HTTP/1.1\r\n\r\n"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_parser_bad_version() {
|
||||
let mut buf = BytesMut::from("GET //get HT/11\r\n\r\n");
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
expect_parse_err!(&mut BytesMut::from("GET //get HT/11\r\n\r\n"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -959,29 +1079,66 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn hrs_multiple_content_length() {
|
||||
let mut buf = BytesMut::from(
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 4\r\n\
|
||||
Content-Length: 2\r\n\
|
||||
\r\n\
|
||||
abcd",
|
||||
);
|
||||
));
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 0\r\n\
|
||||
Content-Length: 2\r\n\
|
||||
\r\n\
|
||||
ab",
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_content_length_plus() {
|
||||
let mut buf = BytesMut::from(
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: +3\r\n\
|
||||
\r\n\
|
||||
000",
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_te_http10() {
|
||||
// in HTTP/1.0 transfer encoding is ignored and must therefore contain a CL header
|
||||
|
||||
expect_parse_err!(&mut BytesMut::from(
|
||||
"POST / HTTP/1.0\r\n\
|
||||
Host: example.com\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
3\r\n\
|
||||
aaa\r\n\
|
||||
0\r\n\
|
||||
",
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_cl_and_te_http10() {
|
||||
// in HTTP/1.0 transfer encoding is simply ignored so it's fine to have both
|
||||
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.0\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 3\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
000",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
parse_ready!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -15,6 +15,7 @@ use bitflags::bitflags;
|
||||
use bytes::{Buf, BytesMut};
|
||||
use futures_core::ready;
|
||||
use pin_project_lite::pin_project;
|
||||
use tracing::{error, trace};
|
||||
|
||||
use crate::{
|
||||
body::{BodySize, BoxBody, MessageBody},
|
||||
@ -151,7 +152,8 @@ pin_project! {
|
||||
error: Option<DispatchError>,
|
||||
|
||||
#[pin]
|
||||
state: State<S, B, X>,
|
||||
pub(super) state: State<S, B, X>,
|
||||
// when Some(_) dispatcher is in state of receiving request payload
|
||||
payload: Option<PayloadSender>,
|
||||
messages: VecDeque<DispatcherMessage>,
|
||||
|
||||
@ -174,7 +176,7 @@ enum DispatcherMessage {
|
||||
|
||||
pin_project! {
|
||||
#[project = StateProj]
|
||||
enum State<S, B, X>
|
||||
pub(super) enum State<S, B, X>
|
||||
where
|
||||
S: Service<Request>,
|
||||
X: Service<Request, Response = Request>,
|
||||
@ -194,7 +196,7 @@ where
|
||||
X: Service<Request, Response = Request>,
|
||||
B: MessageBody,
|
||||
{
|
||||
fn is_none(&self) -> bool {
|
||||
pub(super) fn is_none(&self) -> bool {
|
||||
matches!(self, State::None)
|
||||
}
|
||||
}
|
||||
@ -335,7 +337,7 @@ where
|
||||
while written < len {
|
||||
match io.as_mut().poll_write(cx, &write_buf[written..])? {
|
||||
Poll::Ready(0) => {
|
||||
log::error!("write zero; closing");
|
||||
error!("write zero; closing");
|
||||
return Poll::Ready(Err(io::Error::new(io::ErrorKind::WriteZero, "")));
|
||||
}
|
||||
|
||||
@ -374,8 +376,6 @@ where
|
||||
DispatchError::Io(err)
|
||||
})?;
|
||||
|
||||
this.flags.set(Flags::KEEP_ALIVE, this.codec.keep_alive());
|
||||
|
||||
Ok(size)
|
||||
}
|
||||
|
||||
@ -458,7 +458,12 @@ where
|
||||
}
|
||||
|
||||
// all messages are dealt with
|
||||
None => return Ok(PollResponse::DoNothing),
|
||||
None => {
|
||||
// start keep-alive if last request allowed it
|
||||
this.flags.set(Flags::KEEP_ALIVE, this.codec.keep_alive());
|
||||
|
||||
return Ok(PollResponse::DoNothing);
|
||||
}
|
||||
},
|
||||
|
||||
StateProj::ServiceCall { fut } => {
|
||||
@ -564,7 +569,7 @@ where
|
||||
}
|
||||
|
||||
StateProj::ExpectCall { fut } => {
|
||||
log::trace!(" calling expect service");
|
||||
trace!(" calling expect service");
|
||||
|
||||
match fut.poll(cx) {
|
||||
// expect resolved. write continue to buffer and set InnerDispatcher state
|
||||
@ -694,6 +699,7 @@ where
|
||||
|
||||
let mut updated = false;
|
||||
|
||||
// decode from read buf as many full requests as possible
|
||||
loop {
|
||||
match this.codec.decode(this.read_buf) {
|
||||
Ok(Some(msg)) => {
|
||||
@ -746,7 +752,7 @@ where
|
||||
if let Some(ref mut payload) = this.payload {
|
||||
payload.feed_data(chunk);
|
||||
} else {
|
||||
log::error!("Internal server error: unexpected payload chunk");
|
||||
error!("Internal server error: unexpected payload chunk");
|
||||
this.flags.insert(Flags::READ_DISCONNECT);
|
||||
this.messages.push_back(DispatcherMessage::Error(
|
||||
Response::internal_server_error().drop_body(),
|
||||
@ -760,7 +766,7 @@ where
|
||||
if let Some(mut payload) = this.payload.take() {
|
||||
payload.feed_eof();
|
||||
} else {
|
||||
log::error!("Internal server error: unexpected eof");
|
||||
error!("Internal server error: unexpected eof");
|
||||
this.flags.insert(Flags::READ_DISCONNECT);
|
||||
this.messages.push_back(DispatcherMessage::Error(
|
||||
Response::internal_server_error().drop_body(),
|
||||
@ -777,7 +783,7 @@ where
|
||||
Ok(None) => break,
|
||||
|
||||
Err(ParseError::Io(err)) => {
|
||||
log::trace!("I/O error: {}", &err);
|
||||
trace!("I/O error: {}", &err);
|
||||
self.as_mut().client_disconnected();
|
||||
this = self.as_mut().project();
|
||||
*this.error = Some(DispatchError::Io(err));
|
||||
@ -785,7 +791,7 @@ where
|
||||
}
|
||||
|
||||
Err(ParseError::TooLarge) => {
|
||||
log::trace!("request head was too big; returning 431 response");
|
||||
trace!("request head was too big; returning 431 response");
|
||||
|
||||
if let Some(mut payload) = this.payload.take() {
|
||||
payload.set_error(PayloadError::Overflow);
|
||||
@ -805,7 +811,7 @@ where
|
||||
}
|
||||
|
||||
Err(err) => {
|
||||
log::trace!("parse error {}", &err);
|
||||
trace!("parse error {}", &err);
|
||||
|
||||
if let Some(mut payload) = this.payload.take() {
|
||||
payload.set_error(PayloadError::EncodingCorrupted);
|
||||
@ -836,10 +842,7 @@ where
|
||||
if timer.as_mut().poll(cx).is_ready() {
|
||||
// timeout on first request (slow request) return 408
|
||||
|
||||
log::trace!(
|
||||
"timed out on slow request; \
|
||||
replying with 408 and closing connection"
|
||||
);
|
||||
trace!("timed out on slow request; replying with 408 and closing connection");
|
||||
|
||||
let _ = self.as_mut().send_error_response(
|
||||
Response::with_body(StatusCode::REQUEST_TIMEOUT, ()),
|
||||
@ -868,15 +871,21 @@ where
|
||||
"dispatcher should not be in keep-alive phase if state is not none: {:?}",
|
||||
this.state,
|
||||
);
|
||||
debug_assert!(
|
||||
this.write_buf.is_empty(),
|
||||
"dispatcher should not be in keep-alive phase if write_buf is not empty",
|
||||
);
|
||||
|
||||
// Assert removed by @robjtede on account of issue #2655. There are cases where an I/O
|
||||
// flush can be pending after entering the keep-alive state causing the subsequent flush
|
||||
// wake up to panic here. This appears to be a Linux-only problem. Leaving original code
|
||||
// below for posterity because a simple and reliable test could not be found to trigger
|
||||
// the behavior.
|
||||
// debug_assert!(
|
||||
// this.write_buf.is_empty(),
|
||||
// "dispatcher should not be in keep-alive phase if write_buf is not empty",
|
||||
// );
|
||||
|
||||
// keep-alive timer has timed out
|
||||
if timer.as_mut().poll(cx).is_ready() {
|
||||
// no tasks at hand
|
||||
log::trace!("timer timed out; closing connection");
|
||||
trace!("timer timed out; closing connection");
|
||||
this.flags.insert(Flags::SHUTDOWN);
|
||||
|
||||
if let Some(deadline) = this.config.client_disconnect_deadline() {
|
||||
@ -906,7 +915,7 @@ where
|
||||
|
||||
// timed-out during shutdown; drop connection
|
||||
if timer.as_mut().poll(cx).is_ready() {
|
||||
log::trace!("timed-out during shutdown");
|
||||
trace!("timed-out during shutdown");
|
||||
return Err(DispatchError::DisconnectTimeout);
|
||||
}
|
||||
}
|
||||
@ -1065,12 +1074,12 @@ where
|
||||
|
||||
match this.inner.project() {
|
||||
DispatcherStateProj::Upgrade { fut: upgrade } => upgrade.poll(cx).map_err(|err| {
|
||||
log::error!("Upgrade handler error: {}", err);
|
||||
error!("Upgrade handler error: {}", err);
|
||||
DispatchError::Upgrade
|
||||
}),
|
||||
|
||||
DispatcherStateProj::Normal { mut inner } => {
|
||||
log::trace!("start flags: {:?}", &inner.flags);
|
||||
trace!("start flags: {:?}", &inner.flags);
|
||||
|
||||
trace_timer_states(
|
||||
"start",
|
||||
@ -1177,7 +1186,7 @@ where
|
||||
|
||||
// client is gone
|
||||
if inner.flags.contains(Flags::WRITE_DISCONNECT) {
|
||||
log::trace!("client is gone; disconnecting");
|
||||
trace!("client is gone; disconnecting");
|
||||
return Poll::Ready(Ok(()));
|
||||
}
|
||||
|
||||
@ -1186,14 +1195,14 @@ where
|
||||
|
||||
// read half is closed; we do not process any responses
|
||||
if inner_p.flags.contains(Flags::READ_DISCONNECT) && state_is_none {
|
||||
log::trace!("read half closed; start shutdown");
|
||||
trace!("read half closed; start shutdown");
|
||||
inner_p.flags.insert(Flags::SHUTDOWN);
|
||||
}
|
||||
|
||||
// keep-alive and stream errors
|
||||
if state_is_none && inner_p.write_buf.is_empty() {
|
||||
if let Some(err) = inner_p.error.take() {
|
||||
log::error!("stream error: {}", &err);
|
||||
error!("stream error: {}", &err);
|
||||
return Poll::Ready(Err(err));
|
||||
}
|
||||
|
||||
@ -1222,7 +1231,7 @@ where
|
||||
Poll::Pending
|
||||
};
|
||||
|
||||
log::trace!("end flags: {:?}", &inner.flags);
|
||||
trace!("end flags: {:?}", &inner.flags);
|
||||
|
||||
poll
|
||||
}
|
||||
@ -1237,17 +1246,17 @@ fn trace_timer_states(
|
||||
ka_timer: &TimerState,
|
||||
shutdown_timer: &TimerState,
|
||||
) {
|
||||
log::trace!("{} timers:", label);
|
||||
trace!("{} timers:", label);
|
||||
|
||||
if head_timer.is_enabled() {
|
||||
log::trace!(" head {}", &head_timer);
|
||||
trace!(" head {}", &head_timer);
|
||||
}
|
||||
|
||||
if ka_timer.is_enabled() {
|
||||
log::trace!(" keep-alive {}", &ka_timer);
|
||||
trace!(" keep-alive {}", &ka_timer);
|
||||
}
|
||||
|
||||
if shutdown_timer.is_enabled() {
|
||||
log::trace!(" shutdown {}", &shutdown_timer);
|
||||
trace!(" shutdown {}", &shutdown_timer);
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::{future::Future, str, task::Poll, time::Duration};
|
||||
|
||||
use actix_rt::time::sleep;
|
||||
use actix_rt::{pin, time::sleep};
|
||||
use actix_service::fn_service;
|
||||
use actix_utils::future::{ready, Ready};
|
||||
use bytes::Bytes;
|
||||
@ -53,6 +53,14 @@ fn echo_path_service(
|
||||
})
|
||||
}
|
||||
|
||||
fn drop_payload_service(
|
||||
) -> impl Service<Request, Response = Response<&'static str>, Error = Error> {
|
||||
fn_service(|mut req: Request| async move {
|
||||
let _ = req.take_payload();
|
||||
Ok::<_, Error>(Response::with_body(StatusCode::OK, "payload dropped"))
|
||||
})
|
||||
}
|
||||
|
||||
fn echo_payload_service() -> impl Service<Request, Response = Response<Bytes>, Error = Error> {
|
||||
fn_service(|mut req: Request| {
|
||||
Box::pin(async move {
|
||||
@ -89,7 +97,7 @@ async fn late_request() {
|
||||
None,
|
||||
OnConnectData::default(),
|
||||
);
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
lazy(|cx| {
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
@ -156,7 +164,7 @@ async fn oneshot_connection() {
|
||||
None,
|
||||
OnConnectData::default(),
|
||||
);
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
lazy(|cx| {
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
@ -173,13 +181,16 @@ async fn oneshot_connection() {
|
||||
stabilize_date_header(&mut res);
|
||||
let res = &res[..];
|
||||
|
||||
let exp = b"\
|
||||
HTTP/1.1 200 OK\r\n\
|
||||
content-length: 5\r\n\
|
||||
connection: close\r\n\
|
||||
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||
/abcd\
|
||||
";
|
||||
let exp = http_msg(
|
||||
r"
|
||||
HTTP/1.1 200 OK
|
||||
content-length: 5
|
||||
connection: close
|
||||
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||
|
||||
/abcd
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
res,
|
||||
@ -188,7 +199,7 @@ async fn oneshot_connection() {
|
||||
response: {:?}\n\
|
||||
expected: {:?}",
|
||||
String::from_utf8_lossy(res),
|
||||
String::from_utf8_lossy(exp)
|
||||
String::from_utf8_lossy(&exp)
|
||||
);
|
||||
})
|
||||
.await;
|
||||
@ -214,7 +225,7 @@ async fn keep_alive_timeout() {
|
||||
None,
|
||||
OnConnectData::default(),
|
||||
);
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
lazy(|cx| {
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
@ -293,7 +304,7 @@ async fn keep_alive_follow_up_req() {
|
||||
None,
|
||||
OnConnectData::default(),
|
||||
);
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
lazy(|cx| {
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
@ -413,7 +424,7 @@ async fn req_parse_err() {
|
||||
OnConnectData::default(),
|
||||
);
|
||||
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
match h1.as_mut().poll(cx) {
|
||||
Poll::Pending => panic!(),
|
||||
@ -459,7 +470,7 @@ async fn pipelining_ok_then_ok() {
|
||||
OnConnectData::default(),
|
||||
);
|
||||
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
|
||||
@ -529,7 +540,7 @@ async fn pipelining_ok_then_bad() {
|
||||
OnConnectData::default(),
|
||||
);
|
||||
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
|
||||
@ -601,7 +612,7 @@ async fn expect_handling() {
|
||||
",
|
||||
);
|
||||
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
assert!(h1.as_mut().poll(cx).is_pending());
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
@ -678,7 +689,7 @@ async fn expect_eager() {
|
||||
",
|
||||
);
|
||||
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
assert!(h1.as_mut().poll(cx).is_ready());
|
||||
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||
@ -761,7 +772,7 @@ async fn upgrade_handling() {
|
||||
",
|
||||
);
|
||||
|
||||
actix_rt::pin!(h1);
|
||||
pin!(h1);
|
||||
|
||||
assert!(h1.as_mut().poll(cx).is_ready());
|
||||
assert!(matches!(&h1.inner, DispatcherState::Upgrade { .. }));
|
||||
@ -771,3 +782,195 @@ async fn upgrade_handling() {
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
// fix in #2624 reverted temporarily
|
||||
// complete fix tracked in #2745
|
||||
#[ignore]
|
||||
#[actix_rt::test]
|
||||
async fn handler_drop_payload() {
|
||||
let _ = env_logger::try_init();
|
||||
|
||||
let mut buf = TestBuffer::new(http_msg(
|
||||
r"
|
||||
POST /drop-payload HTTP/1.1
|
||||
Content-Length: 3
|
||||
|
||||
abc
|
||||
",
|
||||
));
|
||||
|
||||
let services = HttpFlow::new(
|
||||
drop_payload_service(),
|
||||
ExpectHandler,
|
||||
None::<UpgradeHandler>,
|
||||
);
|
||||
|
||||
let h1 = Dispatcher::new(
|
||||
buf.clone(),
|
||||
services,
|
||||
ServiceConfig::default(),
|
||||
None,
|
||||
OnConnectData::default(),
|
||||
);
|
||||
pin!(h1);
|
||||
|
||||
lazy(|cx| {
|
||||
assert!(h1.as_mut().poll(cx).is_pending());
|
||||
|
||||
// polls: manual
|
||||
assert_eq!(h1.poll_count, 1);
|
||||
|
||||
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
|
||||
stabilize_date_header(&mut res);
|
||||
let res = &res[..];
|
||||
|
||||
let exp = http_msg(
|
||||
r"
|
||||
HTTP/1.1 200 OK
|
||||
content-length: 15
|
||||
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||
|
||||
payload dropped
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
res,
|
||||
exp,
|
||||
"\nexpected response not in write buffer:\n\
|
||||
response: {:?}\n\
|
||||
expected: {:?}",
|
||||
String::from_utf8_lossy(res),
|
||||
String::from_utf8_lossy(&exp)
|
||||
);
|
||||
|
||||
if let DispatcherStateProj::Normal { inner } = h1.as_mut().project().inner.project() {
|
||||
assert!(inner.state.is_none());
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
lazy(|cx| {
|
||||
// add message that claims to have payload longer than provided
|
||||
buf.extend_read_buf(http_msg(
|
||||
r"
|
||||
POST /drop-payload HTTP/1.1
|
||||
Content-Length: 200
|
||||
|
||||
abc
|
||||
",
|
||||
));
|
||||
|
||||
assert!(h1.as_mut().poll(cx).is_pending());
|
||||
|
||||
// polls: manual => manual
|
||||
assert_eq!(h1.poll_count, 2);
|
||||
|
||||
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
|
||||
stabilize_date_header(&mut res);
|
||||
let res = &res[..];
|
||||
|
||||
// expect response immediately even though request side has not finished reading payload
|
||||
let exp = http_msg(
|
||||
r"
|
||||
HTTP/1.1 200 OK
|
||||
content-length: 15
|
||||
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||
|
||||
payload dropped
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
res,
|
||||
exp,
|
||||
"\nexpected response not in write buffer:\n\
|
||||
response: {:?}\n\
|
||||
expected: {:?}",
|
||||
String::from_utf8_lossy(res),
|
||||
String::from_utf8_lossy(&exp)
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
lazy(|cx| {
|
||||
assert!(h1.as_mut().poll(cx).is_ready());
|
||||
|
||||
// polls: manual => manual => manual
|
||||
assert_eq!(h1.poll_count, 3);
|
||||
|
||||
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
|
||||
stabilize_date_header(&mut res);
|
||||
let res = &res[..];
|
||||
|
||||
// expect that unrequested error response is sent back since connection could not be cleaned
|
||||
let exp = http_msg(
|
||||
r"
|
||||
HTTP/1.1 500 Internal Server Error
|
||||
content-length: 0
|
||||
connection: close
|
||||
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
res,
|
||||
exp,
|
||||
"\nexpected response not in write buffer:\n\
|
||||
response: {:?}\n\
|
||||
expected: {:?}",
|
||||
String::from_utf8_lossy(res),
|
||||
String::from_utf8_lossy(&exp)
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
fn http_msg(msg: impl AsRef<str>) -> BytesMut {
|
||||
let mut msg = msg
|
||||
.as_ref()
|
||||
.trim()
|
||||
.split('\n')
|
||||
.into_iter()
|
||||
.map(|line| [line.trim_start(), "\r"].concat())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
// remove trailing \r
|
||||
msg.pop();
|
||||
|
||||
if !msg.is_empty() && !msg.contains("\r\n\r\n") {
|
||||
msg.push_str("\r\n\r\n");
|
||||
}
|
||||
|
||||
BytesMut::from(msg.as_bytes())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn http_msg_creates_msg() {
|
||||
assert_eq!(http_msg(r""), "");
|
||||
|
||||
assert_eq!(
|
||||
http_msg(
|
||||
r"
|
||||
POST / HTTP/1.1
|
||||
Content-Length: 3
|
||||
|
||||
abc
|
||||
"
|
||||
),
|
||||
"POST / HTTP/1.1\r\nContent-Length: 3\r\n\r\nabc"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
http_msg(
|
||||
r"
|
||||
GET / HTTP/1.1
|
||||
Content-Length: 3
|
||||
|
||||
"
|
||||
),
|
||||
"GET / HTTP/1.1\r\nContent-Length: 3\r\n\r\n"
|
||||
);
|
||||
}
|
||||
|
@ -210,14 +210,14 @@ pub(crate) trait MessageType: Sized {
|
||||
dst.advance_mut(pos);
|
||||
}
|
||||
|
||||
// optimized date header, set_date writes \r\n
|
||||
if !has_date {
|
||||
// optimized date header, write_date_header writes its own \r\n
|
||||
config.write_date_header(dst, camel_case);
|
||||
} else {
|
||||
// msg eof
|
||||
dst.extend_from_slice(b"\r\n");
|
||||
}
|
||||
|
||||
// end-of-headers marker
|
||||
dst.extend_from_slice(b"\r\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -517,6 +517,7 @@ unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
||||
if let Some(c @ b'a'..=b'z') = iter.next() {
|
||||
buffer[index] = c & 0b1101_1111;
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
|
||||
index += 1;
|
||||
@ -528,7 +529,7 @@ mod tests {
|
||||
use std::rc::Rc;
|
||||
|
||||
use bytes::Bytes;
|
||||
use http::header::AUTHORIZATION;
|
||||
use http::header::{AUTHORIZATION, UPGRADE_INSECURE_REQUESTS};
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
@ -559,6 +560,9 @@ mod tests {
|
||||
head.headers
|
||||
.insert(CONTENT_TYPE, HeaderValue::from_static("plain/text"));
|
||||
|
||||
head.headers
|
||||
.insert(UPGRADE_INSECURE_REQUESTS, HeaderValue::from_static("1"));
|
||||
|
||||
let mut head = RequestHeadType::Owned(head);
|
||||
|
||||
let _ = head.encode_headers(
|
||||
@ -574,6 +578,7 @@ mod tests {
|
||||
assert!(data.contains("Connection: close\r\n"));
|
||||
assert!(data.contains("Content-Type: plain/text\r\n"));
|
||||
assert!(data.contains("Date: date\r\n"));
|
||||
assert!(data.contains("Upgrade-Insecure-Requests: 1\r\n"));
|
||||
|
||||
let _ = head.encode_headers(
|
||||
&mut bytes,
|
||||
|
@ -16,7 +16,7 @@ use crate::error::PayloadError;
|
||||
/// max buffer size 32k
|
||||
pub(crate) const MAX_BUFFER_SIZE: usize = 32_768;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum PayloadStatus {
|
||||
Read,
|
||||
Pause,
|
||||
@ -263,7 +263,8 @@ mod tests {
|
||||
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
|
||||
|
||||
assert_impl_all!(Inner: Unpin, Send, Sync);
|
||||
assert_not_impl_any!(Inner: UnwindSafe, RefUnwindSafe);
|
||||
// assertion not stable wrt rustc versions yet
|
||||
// assert_impl_all!(Inner: UnwindSafe, RefUnwindSafe);
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_unread_data() {
|
||||
|
@ -13,6 +13,7 @@ use actix_service::{
|
||||
};
|
||||
use actix_utils::future::ready;
|
||||
use futures_core::future::LocalBoxFuture;
|
||||
use tracing::error;
|
||||
|
||||
use crate::{
|
||||
body::{BoxBody, MessageBody},
|
||||
@ -305,13 +306,13 @@ where
|
||||
Box::pin(async move {
|
||||
let expect = expect
|
||||
.await
|
||||
.map_err(|e| log::error!("Init http expect service error: {:?}", e))?;
|
||||
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
||||
|
||||
let upgrade = match upgrade {
|
||||
Some(upgrade) => {
|
||||
let upgrade = upgrade
|
||||
.await
|
||||
.map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
|
||||
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
||||
Some(upgrade)
|
||||
}
|
||||
None => None,
|
||||
@ -319,7 +320,7 @@ where
|
||||
|
||||
let service = service
|
||||
.await
|
||||
.map_err(|e| log::error!("Init http service error: {:?}", e))?;
|
||||
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
||||
|
||||
Ok(H1ServiceHandler::new(
|
||||
cfg,
|
||||
@ -357,7 +358,7 @@ where
|
||||
|
||||
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
self._poll_ready(cx).map_err(|err| {
|
||||
log::error!("HTTP/1 service readiness error: {:?}", err);
|
||||
error!("HTTP/1 service readiness error: {:?}", err);
|
||||
DispatchError::Service(err)
|
||||
})
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
use std::{fmt, future::Future, pin::Pin, task::Context};
|
||||
|
||||
use actix_rt::time::{Instant, Sleep};
|
||||
use tracing::trace;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) enum TimerState {
|
||||
@ -24,7 +25,7 @@ impl TimerState {
|
||||
|
||||
pub(super) fn set(&mut self, timer: Sleep, line: u32) {
|
||||
if matches!(self, Self::Disabled) {
|
||||
log::trace!("setting disabled timer from line {}", line);
|
||||
trace!("setting disabled timer from line {}", line);
|
||||
}
|
||||
|
||||
*self = Self::Active {
|
||||
@ -39,11 +40,11 @@ impl TimerState {
|
||||
|
||||
pub(super) fn clear(&mut self, line: u32) {
|
||||
if matches!(self, Self::Disabled) {
|
||||
log::trace!("trying to clear a disabled timer from line {}", line);
|
||||
trace!("trying to clear a disabled timer from line {}", line);
|
||||
}
|
||||
|
||||
if matches!(self, Self::Inactive) {
|
||||
log::trace!("trying to clear an inactive timer from line {}", line);
|
||||
trace!("trying to clear an inactive timer from line {}", line);
|
||||
}
|
||||
|
||||
*self = Self::Inactive;
|
||||
|
@ -19,13 +19,15 @@ use h2::{
|
||||
server::{Connection, SendResponse},
|
||||
Ping, PingPong,
|
||||
};
|
||||
use log::{error, trace};
|
||||
use pin_project_lite::pin_project;
|
||||
use tracing::{error, trace, warn};
|
||||
|
||||
use crate::{
|
||||
body::{BodySize, BoxBody, MessageBody},
|
||||
config::ServiceConfig,
|
||||
header::{HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING},
|
||||
header::{
|
||||
HeaderName, HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING, UPGRADE,
|
||||
},
|
||||
service::HttpFlow,
|
||||
Extensions, OnConnectData, Payload, Request, Response, ResponseHead,
|
||||
};
|
||||
@ -141,7 +143,7 @@ where
|
||||
DispatchError::SendResponse(err) => {
|
||||
trace!("Error sending HTTP/2 response: {:?}", err)
|
||||
}
|
||||
DispatchError::SendData(err) => log::warn!("{:?}", err),
|
||||
DispatchError::SendData(err) => warn!("{:?}", err),
|
||||
DispatchError::ResponseBody(err) => {
|
||||
error!("Response payload stream error: {:?}", err)
|
||||
}
|
||||
@ -306,13 +308,22 @@ fn prepare_response(
|
||||
|
||||
// copy headers
|
||||
for (key, value) in head.headers.iter() {
|
||||
match *key {
|
||||
// TODO: consider skipping other headers according to:
|
||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
||||
// omit HTTP/1.x only headers
|
||||
CONNECTION | TRANSFER_ENCODING => continue,
|
||||
CONTENT_LENGTH if skip_len => continue,
|
||||
DATE => has_date = true,
|
||||
match key {
|
||||
// omit HTTP/1.x only headers according to:
|
||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
||||
&CONNECTION | &TRANSFER_ENCODING | &UPGRADE => continue,
|
||||
|
||||
&CONTENT_LENGTH if skip_len => continue,
|
||||
&DATE => has_date = true,
|
||||
|
||||
// omit HTTP/1.x only headers according to:
|
||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
||||
hdr if hdr == HeaderName::from_static("keep-alive")
|
||||
|| hdr == HeaderName::from_static("proxy-connection") =>
|
||||
{
|
||||
continue
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
@ -14,7 +14,7 @@ use actix_service::{
|
||||
};
|
||||
use actix_utils::future::ready;
|
||||
use futures_core::{future::LocalBoxFuture, ready};
|
||||
use log::error;
|
||||
use tracing::{error, trace};
|
||||
|
||||
use crate::{
|
||||
body::{BoxBody, MessageBody},
|
||||
@ -355,7 +355,7 @@ where
|
||||
}
|
||||
|
||||
Err(err) => {
|
||||
log::trace!("H2 handshake error: {}", err);
|
||||
trace!("H2 handshake error: {}", err);
|
||||
Poll::Ready(Err(err))
|
||||
}
|
||||
},
|
||||
|
@ -12,7 +12,7 @@ use crate::header::{Charset, HTTP_VALUE};
|
||||
/// - A character sequence representing the actual value (`value`), separated by single quotes.
|
||||
///
|
||||
/// It is defined in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExtendedValue {
|
||||
/// The character set that is used to encode the `value` to a string.
|
||||
pub charset: Charset,
|
||||
|
@ -147,7 +147,7 @@ mod tests {
|
||||
|
||||
// copy of encoding from actix-web headers
|
||||
#[allow(clippy::enum_variant_names)] // allow Encoding prefix on EncodingExt
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Encoding {
|
||||
Chunked,
|
||||
Brotli,
|
||||
|
@ -3,6 +3,7 @@
|
||||
//! ## Crate Features
|
||||
//! | Feature | Functionality |
|
||||
//! | ------------------- | ------------------------------------------- |
|
||||
//! | `http2` | HTTP/2 support via [h2]. |
|
||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||
//! | `rustls` | TLS support via [rustls]. |
|
||||
//! | `compress-brotli` | Payload compression support: Brotli. |
|
||||
@ -10,6 +11,7 @@
|
||||
//! | `compress-zstd` | Payload compression support: Zstd. |
|
||||
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
|
||||
//!
|
||||
//! [h2]: https://crates.io/crates/h2
|
||||
//! [OpenSSL]: https://crates.io/crates/openssl
|
||||
//! [rustls]: https://crates.io/crates/rustls
|
||||
//! [trust-dns]: https://crates.io/crates/trust-dns
|
||||
@ -23,6 +25,7 @@
|
||||
)]
|
||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||
|
||||
pub use ::http::{uri, uri::Uri};
|
||||
pub use ::http::{Method, StatusCode, Version};
|
||||
@ -67,6 +70,8 @@ pub use self::payload::{BoxedPayloadStream, Payload, PayloadStream};
|
||||
pub use self::requests::{Request, RequestHead, RequestHeadType};
|
||||
pub use self::responses::{Response, ResponseBuilder, ResponseHead};
|
||||
pub use self::service::HttpService;
|
||||
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||
pub use self::service::TlsAcceptorConfig;
|
||||
|
||||
/// A major HTTP protocol version.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
|
@ -3,7 +3,7 @@ use std::{cell::RefCell, ops, rc::Rc};
|
||||
use bitflags::bitflags;
|
||||
|
||||
/// Represents various types of connection
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ConnectionType {
|
||||
/// Close connection after response.
|
||||
Close,
|
||||
|
@ -13,7 +13,8 @@ use crate::error::PayloadError;
|
||||
/// A boxed payload stream.
|
||||
pub type BoxedPayloadStream = Pin<Box<dyn Stream<Item = Result<Bytes, PayloadError>>>>;
|
||||
|
||||
#[deprecated(since = "4.0.0", note = "Renamed to `BoxedPayloadStream`.")]
|
||||
#[doc(hidden)]
|
||||
#[deprecated(since = "3.0.0", note = "Renamed to `BoxedPayloadStream`.")]
|
||||
pub type PayloadStream = BoxedPayloadStream;
|
||||
|
||||
#[cfg(not(feature = "http2"))]
|
||||
|
@ -144,7 +144,7 @@ impl ResponseBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Set connection type to Upgrade
|
||||
/// Set connection type to `Upgrade`.
|
||||
#[inline]
|
||||
pub fn upgrade<V>(&mut self, value: V) -> &mut Self
|
||||
where
|
||||
@ -161,7 +161,7 @@ impl ResponseBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Force close connection, even if it is marked as keep-alive
|
||||
/// Force-close connection, even if it is marked as keep-alive.
|
||||
#[inline]
|
||||
pub fn force_close(&mut self) -> &mut Self {
|
||||
if let Some(parts) = self.inner() {
|
||||
|
@ -237,7 +237,7 @@ mod tests {
|
||||
.await;
|
||||
|
||||
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
|
||||
let _ = stream
|
||||
stream
|
||||
.write_all(b"GET /camel HTTP/1.1\r\nConnection: Close\r\n\r\n")
|
||||
.unwrap();
|
||||
let mut data = vec![];
|
||||
@ -251,7 +251,7 @@ mod tests {
|
||||
assert!(memmem::find(&data, b"content-length").is_none());
|
||||
|
||||
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
|
||||
let _ = stream
|
||||
stream
|
||||
.write_all(b"GET /lower HTTP/1.1\r\nConnection: Close\r\n\r\n")
|
||||
.unwrap();
|
||||
let mut data = vec![];
|
||||
|
@ -285,6 +285,24 @@ impl From<&'static [u8]> for Response<&'static [u8]> {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<u8>> for Response<Vec<u8>> {
|
||||
fn from(val: Vec<u8>) -> Self {
|
||||
let mut res = Response::with_body(StatusCode::OK, val);
|
||||
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
|
||||
res.headers_mut().insert(header::CONTENT_TYPE, mime);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Vec<u8>> for Response<Vec<u8>> {
|
||||
fn from(val: &Vec<u8>) -> Self {
|
||||
let mut res = Response::with_body(StatusCode::OK, val.clone());
|
||||
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
|
||||
res.headers_mut().insert(header::CONTENT_TYPE, mime);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Response<String> {
|
||||
fn from(val: String) -> Self {
|
||||
let mut res = Response::with_body(StatusCode::OK, val);
|
||||
|
@ -15,6 +15,7 @@ use actix_service::{
|
||||
};
|
||||
use futures_core::{future::LocalBoxFuture, ready};
|
||||
use pin_project_lite::pin_project;
|
||||
use tracing::error;
|
||||
|
||||
use crate::{
|
||||
body::{BoxBody, MessageBody},
|
||||
@ -180,6 +181,25 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration options used when accepting TLS connection.
|
||||
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "openssl", feature = "rustls"))))]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TlsAcceptorConfig {
|
||||
pub(crate) handshake_timeout: Option<std::time::Duration>,
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
||||
impl TlsAcceptorConfig {
|
||||
/// Set TLS handshake timeout duration.
|
||||
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
|
||||
Self {
|
||||
handshake_timeout: Some(dur),
|
||||
// ..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "openssl")]
|
||||
mod openssl {
|
||||
use actix_service::ServiceFactoryExt as _;
|
||||
@ -229,7 +249,28 @@ mod openssl {
|
||||
Error = TlsError<SslError, DispatchError>,
|
||||
InitError = (),
|
||||
> {
|
||||
Acceptor::new(acceptor)
|
||||
self.openssl_with_config(acceptor, TlsAcceptorConfig::default())
|
||||
}
|
||||
|
||||
/// Create OpenSSL based service with custom TLS acceptor configuration.
|
||||
pub fn openssl_with_config(
|
||||
self,
|
||||
acceptor: SslAcceptor,
|
||||
tls_acceptor_config: TlsAcceptorConfig,
|
||||
) -> impl ServiceFactory<
|
||||
TcpStream,
|
||||
Config = (),
|
||||
Response = (),
|
||||
Error = TlsError<SslError, DispatchError>,
|
||||
InitError = (),
|
||||
> {
|
||||
let mut acceptor = Acceptor::new(acceptor);
|
||||
|
||||
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
|
||||
acceptor.set_handshake_timeout(handshake_timeout);
|
||||
}
|
||||
|
||||
acceptor
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
@ -292,8 +333,23 @@ mod rustls {
|
||||
{
|
||||
/// Create Rustls based service.
|
||||
pub fn rustls(
|
||||
self,
|
||||
config: ServerConfig,
|
||||
) -> impl ServiceFactory<
|
||||
TcpStream,
|
||||
Config = (),
|
||||
Response = (),
|
||||
Error = TlsError<io::Error, DispatchError>,
|
||||
InitError = (),
|
||||
> {
|
||||
self.rustls_with_config(config, TlsAcceptorConfig::default())
|
||||
}
|
||||
|
||||
/// Create Rustls based service with custom TLS acceptor configuration.
|
||||
pub fn rustls_with_config(
|
||||
self,
|
||||
mut config: ServerConfig,
|
||||
tls_acceptor_config: TlsAcceptorConfig,
|
||||
) -> impl ServiceFactory<
|
||||
TcpStream,
|
||||
Config = (),
|
||||
@ -305,7 +361,13 @@ mod rustls {
|
||||
protos.extend_from_slice(&config.alpn_protocols);
|
||||
config.alpn_protocols = protos;
|
||||
|
||||
Acceptor::new(config)
|
||||
let mut acceptor = Acceptor::new(config);
|
||||
|
||||
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
|
||||
acceptor.set_handshake_timeout(handshake_timeout);
|
||||
}
|
||||
|
||||
acceptor
|
||||
.map_init_err(|_| {
|
||||
unreachable!("TLS acceptor service factory does not error on init")
|
||||
})
|
||||
@ -369,13 +431,13 @@ where
|
||||
Box::pin(async move {
|
||||
let expect = expect
|
||||
.await
|
||||
.map_err(|e| log::error!("Init http expect service error: {:?}", e))?;
|
||||
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
||||
|
||||
let upgrade = match upgrade {
|
||||
Some(upgrade) => {
|
||||
let upgrade = upgrade
|
||||
.await
|
||||
.map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
|
||||
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
||||
Some(upgrade)
|
||||
}
|
||||
None => None,
|
||||
@ -383,7 +445,7 @@ where
|
||||
|
||||
let service = service
|
||||
.await
|
||||
.map_err(|e| log::error!("Init http service error: {:?}", e))?;
|
||||
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
||||
|
||||
Ok(HttpServiceHandler::new(
|
||||
cfg,
|
||||
@ -490,7 +552,7 @@ where
|
||||
|
||||
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
self._poll_ready(cx).map_err(|err| {
|
||||
log::error!("HTTP service readiness error: {:?}", err);
|
||||
error!("HTTP service readiness error: {:?}", err);
|
||||
DispatchError::Service(err)
|
||||
})
|
||||
}
|
||||
@ -666,7 +728,7 @@ where
|
||||
self.poll(cx)
|
||||
}
|
||||
Err(err) => {
|
||||
log::trace!("H2 handshake error: {}", err);
|
||||
tracing::trace!("H2 handshake error: {}", err);
|
||||
Poll::Ready(Err(err))
|
||||
}
|
||||
}
|
||||
|
@ -19,29 +19,7 @@ use crate::{
|
||||
Request,
|
||||
};
|
||||
|
||||
/// Test `Request` builder
|
||||
///
|
||||
/// ```ignore
|
||||
/// # use http::{header, StatusCode};
|
||||
/// # use actix_web::*;
|
||||
/// use actix_web::test::TestRequest;
|
||||
///
|
||||
/// fn index(req: &HttpRequest) -> Response {
|
||||
/// if let Some(hdr) = req.headers().get(header::CONTENT_TYPE) {
|
||||
/// Response::Ok().into()
|
||||
/// } else {
|
||||
/// Response::BadRequest().into()
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// let resp = TestRequest::default().insert_header("content-type", "text/plain")
|
||||
/// .run(&index)
|
||||
/// .unwrap();
|
||||
/// assert_eq!(resp.status(), StatusCode::OK);
|
||||
///
|
||||
/// let resp = TestRequest::default().run(&index).unwrap();
|
||||
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
||||
/// ```
|
||||
/// Test `Request` builder.
|
||||
pub struct TestRequest(Option<Inner>);
|
||||
|
||||
struct Inner {
|
||||
|
@ -2,6 +2,7 @@ use actix_codec::{Decoder, Encoder};
|
||||
use bitflags::bitflags;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use bytestring::ByteString;
|
||||
use tracing::error;
|
||||
|
||||
use super::{
|
||||
frame::Parser,
|
||||
@ -10,7 +11,7 @@ use super::{
|
||||
};
|
||||
|
||||
/// A WebSocket message.
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Message {
|
||||
/// Text message.
|
||||
Text(ByteString),
|
||||
@ -35,7 +36,7 @@ pub enum Message {
|
||||
}
|
||||
|
||||
/// A WebSocket frame.
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Frame {
|
||||
/// Text frame. Note that the codec does not validate UTF-8 encoding.
|
||||
Text(Bytes),
|
||||
@ -57,7 +58,7 @@ pub enum Frame {
|
||||
}
|
||||
|
||||
/// A WebSocket continuation item.
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Item {
|
||||
FirstText(Bytes),
|
||||
FirstBinary(Bytes),
|
||||
@ -253,7 +254,7 @@ impl Decoder for Codec {
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
log::error!("Unfinished fragment {:?}", opcode);
|
||||
error!("Unfinished fragment {:?}", opcode);
|
||||
Err(ProtocolError::ContinuationFragment(opcode))
|
||||
}
|
||||
};
|
||||
|
@ -73,8 +73,8 @@ mod inner {
|
||||
use actix_service::{IntoService, Service};
|
||||
use futures_core::stream::Stream;
|
||||
use local_channel::mpsc;
|
||||
use log::debug;
|
||||
use pin_project_lite::pin_project;
|
||||
use tracing::debug;
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Decoder, Encoder, Framed};
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use bytes::{Buf, BufMut, BytesMut};
|
||||
use log::debug;
|
||||
use tracing::debug;
|
||||
|
||||
use super::{
|
||||
mask::apply_mask,
|
||||
@ -17,7 +17,6 @@ impl Parser {
|
||||
fn parse_metadata(
|
||||
src: &[u8],
|
||||
server: bool,
|
||||
max_size: usize,
|
||||
) -> Result<Option<(usize, bool, OpCode, usize, Option<[u8; 4]>)>, ProtocolError> {
|
||||
let chunk_len = src.len();
|
||||
|
||||
@ -60,20 +59,12 @@ impl Parser {
|
||||
return Ok(None);
|
||||
}
|
||||
let len = u64::from_be_bytes(TryFrom::try_from(&src[idx..idx + 8]).unwrap());
|
||||
if len > max_size as u64 {
|
||||
return Err(ProtocolError::Overflow);
|
||||
}
|
||||
idx += 8;
|
||||
len as usize
|
||||
} else {
|
||||
len as usize
|
||||
};
|
||||
|
||||
// check for max allowed size
|
||||
if length > max_size {
|
||||
return Err(ProtocolError::Overflow);
|
||||
}
|
||||
|
||||
let mask = if server {
|
||||
if chunk_len < idx + 4 {
|
||||
return Ok(None);
|
||||
@ -98,11 +89,10 @@ impl Parser {
|
||||
max_size: usize,
|
||||
) -> Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError> {
|
||||
// try to parse ws frame metadata
|
||||
let (idx, finished, opcode, length, mask) =
|
||||
match Parser::parse_metadata(src, server, max_size)? {
|
||||
None => return Ok(None),
|
||||
Some(res) => res,
|
||||
};
|
||||
let (idx, finished, opcode, length, mask) = match Parser::parse_metadata(src, server)? {
|
||||
None => return Ok(None),
|
||||
Some(res) => res,
|
||||
};
|
||||
|
||||
// not enough data
|
||||
if src.len() < idx + length {
|
||||
@ -112,6 +102,13 @@ impl Parser {
|
||||
// remove prefix
|
||||
src.advance(idx);
|
||||
|
||||
// check for max allowed size
|
||||
if length > max_size {
|
||||
// drop the payload
|
||||
src.advance(length);
|
||||
return Err(ProtocolError::Overflow);
|
||||
}
|
||||
|
||||
// no need for body
|
||||
if length == 0 {
|
||||
return Ok(Some((finished, opcode, None)));
|
||||
@ -339,6 +336,30 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_frame_max_size_recoverability() {
|
||||
let mut buf = BytesMut::new();
|
||||
// The first text frame with length == 2, payload doesn't matter.
|
||||
buf.extend(&[0b0000_0001u8, 0b0000_0010u8, 0b0000_0000u8, 0b0000_0000u8]);
|
||||
// Next binary frame with length == 2 and payload == `[0x1111_1111u8, 0x1111_1111u8]`.
|
||||
buf.extend(&[0b0000_0010u8, 0b0000_0010u8, 0b1111_1111u8, 0b1111_1111u8]);
|
||||
|
||||
assert_eq!(buf.len(), 8);
|
||||
assert!(matches!(
|
||||
Parser::parse(&mut buf, false, 1),
|
||||
Err(ProtocolError::Overflow)
|
||||
));
|
||||
assert_eq!(buf.len(), 4);
|
||||
let frame = extract(Parser::parse(&mut buf, false, 2));
|
||||
assert!(!frame.finished);
|
||||
assert_eq!(frame.opcode, OpCode::Binary);
|
||||
assert_eq!(
|
||||
frame.payload,
|
||||
Bytes::from(vec![0b1111_1111u8, 0b1111_1111u8])
|
||||
);
|
||||
assert_eq!(buf.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ping_frame() {
|
||||
let mut buf = BytesMut::new();
|
||||
|
@ -47,40 +47,6 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// legacy test from old apply mask test. kept for now for back compat test.
|
||||
// TODO: remove it and favor the other test.
|
||||
#[test]
|
||||
fn test_apply_mask_legacy() {
|
||||
let mask = [0x6d, 0xb6, 0xb2, 0x80];
|
||||
|
||||
let unmasked = vec![
|
||||
0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0x74, 0xf9,
|
||||
0x12, 0x03,
|
||||
];
|
||||
|
||||
// Check masking with proper alignment.
|
||||
{
|
||||
let mut masked = unmasked.clone();
|
||||
apply_mask_fallback(&mut masked, mask);
|
||||
|
||||
let mut masked_fast = unmasked.clone();
|
||||
apply_mask(&mut masked_fast, mask);
|
||||
|
||||
assert_eq!(masked, masked_fast);
|
||||
}
|
||||
|
||||
// Check masking without alignment.
|
||||
{
|
||||
let mut masked = unmasked.clone();
|
||||
apply_mask_fallback(&mut masked[1..], mask);
|
||||
|
||||
let mut masked_fast = unmasked;
|
||||
apply_mask(&mut masked_fast[1..], mask);
|
||||
|
||||
assert_eq!(masked, masked_fast);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_mask() {
|
||||
let mask = [0x6d, 0xb6, 0xb2, 0x80];
|
||||
|
@ -67,7 +67,7 @@ pub enum ProtocolError {
|
||||
}
|
||||
|
||||
/// WebSocket handshake errors
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Display, Error)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, Error)]
|
||||
pub enum HandshakeError {
|
||||
/// Only get method is allowed.
|
||||
#[display(fmt = "Method not allowed.")]
|
||||
|
@ -3,6 +3,8 @@ use std::{
|
||||
fmt,
|
||||
};
|
||||
|
||||
use tracing::error;
|
||||
|
||||
/// Operation codes defined in [RFC 6455 §11.8].
|
||||
///
|
||||
/// [RFC 6455]: https://datatracker.ietf.org/doc/html/rfc6455#section-11.8
|
||||
@ -58,7 +60,7 @@ impl From<OpCode> for u8 {
|
||||
Ping => 9,
|
||||
Pong => 10,
|
||||
Bad => {
|
||||
log::error!("Attempted to convert invalid opcode to u8. This is a bug.");
|
||||
error!("Attempted to convert invalid opcode to u8. This is a bug.");
|
||||
8 // if this somehow happens, a close frame will help us tear down quickly
|
||||
}
|
||||
}
|
||||
|
@ -2,13 +2,13 @@
|
||||
|
||||
extern crate tls_openssl as openssl;
|
||||
|
||||
use std::{convert::Infallible, io};
|
||||
use std::{convert::Infallible, io, time::Duration};
|
||||
|
||||
use actix_http::{
|
||||
body::{BodyStream, BoxBody, SizedStream},
|
||||
error::PayloadError,
|
||||
header::{self, HeaderValue},
|
||||
Error, HttpService, Method, Request, Response, StatusCode, Version,
|
||||
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
|
||||
};
|
||||
use actix_http_test::test_server;
|
||||
use actix_service::{fn_service, ServiceFactoryExt};
|
||||
@ -66,7 +66,7 @@ fn tls_config() -> SslAcceptor {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2() -> io::Result<()> {
|
||||
async fn h2() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Error>(Response::ok()))
|
||||
@ -81,7 +81,7 @@ async fn test_h2() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_1() -> io::Result<()> {
|
||||
async fn h2_1() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.finish(|req: Request| {
|
||||
@ -89,7 +89,10 @@ async fn test_h2_1() -> io::Result<()> {
|
||||
assert_eq!(req.version(), Version::HTTP_2);
|
||||
ok::<_, Error>(Response::ok())
|
||||
})
|
||||
.openssl(tls_config())
|
||||
.openssl_with_config(
|
||||
tls_config(),
|
||||
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
|
||||
)
|
||||
.map_err(|_| ())
|
||||
})
|
||||
.await;
|
||||
@ -100,7 +103,7 @@ async fn test_h2_1() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body() -> io::Result<()> {
|
||||
async fn h2_body() -> io::Result<()> {
|
||||
let data = "HELLOWORLD".to_owned().repeat(64 * 1024); // 640 KiB
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
@ -122,7 +125,7 @@ async fn test_h2_body() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_content_length() {
|
||||
async fn h2_content_length() {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|req: Request| {
|
||||
@ -164,7 +167,7 @@ async fn test_h2_content_length() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_headers() {
|
||||
async fn h2_headers() {
|
||||
let data = STR.repeat(10);
|
||||
let data2 = data.clone();
|
||||
|
||||
@ -229,7 +232,7 @@ const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
|
||||
Hello World Hello World Hello World Hello World Hello World";
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body2() {
|
||||
async fn h2_body2() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -247,7 +250,7 @@ async fn test_h2_body2() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_head_empty() {
|
||||
async fn h2_head_empty() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -271,7 +274,7 @@ async fn test_h2_head_empty() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_head_binary() {
|
||||
async fn h2_head_binary() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -294,7 +297,7 @@ async fn test_h2_head_binary() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_head_binary2() {
|
||||
async fn h2_head_binary2() {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -313,7 +316,7 @@ async fn test_h2_head_binary2() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body_length() {
|
||||
async fn h2_body_length() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| async {
|
||||
@ -338,7 +341,7 @@ async fn test_h2_body_length() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body_chunked_explicit() {
|
||||
async fn h2_body_chunked_explicit() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| {
|
||||
@ -366,7 +369,7 @@ async fn test_h2_body_chunked_explicit() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_response_http_error_handling() {
|
||||
async fn h2_response_http_error_handling() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(fn_service(|_| {
|
||||
@ -406,7 +409,7 @@ impl From<BadRequest> for Response<BoxBody> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_service_error() {
|
||||
async fn h2_service_error() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| err::<Response<BoxBody>, _>(BadRequest))
|
||||
@ -424,7 +427,7 @@ async fn test_h2_service_error() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_on_connect() {
|
||||
async fn h2_on_connect() {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.on_connect_ext(|_, data| {
|
||||
|
@ -8,13 +8,14 @@ use std::{
|
||||
net::{SocketAddr, TcpStream as StdTcpStream},
|
||||
sync::Arc,
|
||||
task::Poll,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use actix_http::{
|
||||
body::{BodyStream, BoxBody, SizedStream},
|
||||
error::PayloadError,
|
||||
header::{self, HeaderName, HeaderValue},
|
||||
Error, HttpService, Method, Request, Response, StatusCode, Version,
|
||||
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
|
||||
};
|
||||
use actix_http_test::test_server;
|
||||
use actix_rt::pin;
|
||||
@ -106,7 +107,7 @@ pub fn get_negotiated_alpn_protocol(
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h1() -> io::Result<()> {
|
||||
async fn h1() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h1(|_| ok::<_, Error>(Response::ok()))
|
||||
@ -120,7 +121,7 @@ async fn test_h1() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2() -> io::Result<()> {
|
||||
async fn h2() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Error>(Response::ok()))
|
||||
@ -134,7 +135,7 @@ async fn test_h2() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h1_1() -> io::Result<()> {
|
||||
async fn h1_1() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h1(|req: Request| {
|
||||
@ -152,7 +153,7 @@ async fn test_h1_1() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_1() -> io::Result<()> {
|
||||
async fn h2_1() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.finish(|req: Request| {
|
||||
@ -160,7 +161,10 @@ async fn test_h2_1() -> io::Result<()> {
|
||||
assert_eq!(req.version(), Version::HTTP_2);
|
||||
ok::<_, Error>(Response::ok())
|
||||
})
|
||||
.rustls(tls_config())
|
||||
.rustls_with_config(
|
||||
tls_config(),
|
||||
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
@ -170,7 +174,7 @@ async fn test_h2_1() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body1() -> io::Result<()> {
|
||||
async fn h2_body1() -> io::Result<()> {
|
||||
let data = "HELLOWORLD".to_owned().repeat(64 * 1024);
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
@ -191,7 +195,7 @@ async fn test_h2_body1() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_content_length() {
|
||||
async fn h2_content_length() {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|req: Request| {
|
||||
@ -212,6 +216,7 @@ async fn test_h2_content_length() {
|
||||
let value = HeaderValue::from_static("0");
|
||||
|
||||
{
|
||||
#[allow(clippy::single_element_loop)]
|
||||
for &i in &[0] {
|
||||
let req = srv
|
||||
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
|
||||
@ -226,6 +231,7 @@ async fn test_h2_content_length() {
|
||||
// assert_eq!(response.headers().get(&header), None);
|
||||
}
|
||||
|
||||
#[allow(clippy::single_element_loop)]
|
||||
for &i in &[1] {
|
||||
let req = srv
|
||||
.request(Method::GET, srv.surl(&format!("/{}", i)))
|
||||
@ -245,7 +251,7 @@ async fn test_h2_content_length() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_headers() {
|
||||
async fn h2_headers() {
|
||||
let data = STR.repeat(10);
|
||||
let data2 = data.clone();
|
||||
|
||||
@ -309,7 +315,7 @@ const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
|
||||
Hello World Hello World Hello World Hello World Hello World";
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body2() {
|
||||
async fn h2_body2() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -326,7 +332,7 @@ async fn test_h2_body2() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_head_empty() {
|
||||
async fn h2_head_empty() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -352,7 +358,7 @@ async fn test_h2_head_empty() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_head_binary() {
|
||||
async fn h2_head_binary() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -377,7 +383,7 @@ async fn test_h2_head_binary() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_head_binary2() {
|
||||
async fn h2_head_binary2() {
|
||||
let srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
|
||||
@ -398,7 +404,7 @@ async fn test_h2_head_binary2() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body_length() {
|
||||
async fn h2_body_length() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| {
|
||||
@ -420,7 +426,7 @@ async fn test_h2_body_length() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_body_chunked_explicit() {
|
||||
async fn h2_body_chunked_explicit() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| {
|
||||
@ -447,7 +453,7 @@ async fn test_h2_body_chunked_explicit() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_response_http_error_handling() {
|
||||
async fn h2_response_http_error_handling() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(fn_factory_with_config(|_: ()| {
|
||||
@ -486,7 +492,7 @@ impl From<BadRequest> for Response<BoxBody> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h2_service_error() {
|
||||
async fn h2_service_error() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| err::<Response<BoxBody>, _>(BadRequest))
|
||||
@ -503,7 +509,7 @@ async fn test_h2_service_error() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_h1_service_error() {
|
||||
async fn h1_service_error() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h1(|_| err::<Response<BoxBody>, _>(BadRequest))
|
||||
@ -524,7 +530,7 @@ const HTTP1_1_ALPN_PROTOCOL: &[u8] = b"http/1.1";
|
||||
const CUSTOM_ALPN_PROTOCOL: &[u8] = b"custom";
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_alpn_h1() -> io::Result<()> {
|
||||
async fn alpn_h1() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
let mut config = tls_config();
|
||||
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
|
||||
@ -546,7 +552,7 @@ async fn test_alpn_h1() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_alpn_h2() -> io::Result<()> {
|
||||
async fn alpn_h2() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
let mut config = tls_config();
|
||||
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
|
||||
@ -572,7 +578,7 @@ async fn test_alpn_h2() -> io::Result<()> {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_alpn_h2_1() -> io::Result<()> {
|
||||
async fn alpn_h2_1() -> io::Result<()> {
|
||||
let srv = test_server(move || {
|
||||
let mut config = tls_config();
|
||||
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
|
||||
|
@ -850,7 +850,8 @@ async fn not_modified_spec_h1() {
|
||||
Some(&header::HeaderValue::from_static("4")),
|
||||
);
|
||||
// server does not prevent payload from being sent but clients may choose not to read it
|
||||
// TODO: this is probably a bug, especially since CL header can differ in length from the body
|
||||
// TODO: this is probably a bug in the client, especially since CL header can differ in length
|
||||
// from the body
|
||||
assert!(!srv.load_body(res).await.unwrap().is_empty());
|
||||
|
||||
// TODO: add stream response tests
|
||||
|
@ -1,6 +1,11 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||
|
||||
|
||||
## 0.4.0 - 2022-02-25
|
||||
- No significant changes since `0.4.0-beta.13`.
|
||||
|
||||
|
||||
## 0.4.0-beta.13 - 2022-01-31
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-multipart"
|
||||
version = "0.4.0-beta.13"
|
||||
version = "0.4.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Multipart form support for Actix Web"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
@ -14,8 +14,8 @@ name = "actix_multipart"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = { version = "4.0.0-rc.1", default-features = false }
|
||||
actix-utils = "3"
|
||||
actix-web = { version = "4", default-features = false }
|
||||
|
||||
bytes = "1"
|
||||
derive_more = "0.99.5"
|
||||
@ -28,7 +28,7 @@ twoway = "0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-http = "3.0.0-rc.1"
|
||||
actix-http = "3.0.0"
|
||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
tokio = { version = "1.8.4", features = ["sync"] }
|
||||
tokio-stream = "0.1"
|
||||
|
@ -3,11 +3,11 @@
|
||||
> Multipart form support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.13)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|
||||
[](https://docs.rs/actix-multipart/0.4.0)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.13)
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0)
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
@ -1,8 +1,80 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||
|
||||
|
||||
## 0.5.0 - 2022-02-22
|
||||
### Added
|
||||
- Add `Path::as_str`. [#2590]
|
||||
- Add `ResourceDef::set_name`. [#373][net#373]
|
||||
- Add `RouterBuilder::push`. [#2612]
|
||||
- Implement `IntoPatterns` for `bytestring::ByteString`. [#372][net#372]
|
||||
- Introduce `ResourceDef::join`. [#380][net#380]
|
||||
- Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373][net#373]
|
||||
- `Resource` is now implemented for `&mut Path<_>` and `RefMut<Path<_>>`. [#2568]
|
||||
- Support `build_resource_path` on multi-pattern resources. [#2356]
|
||||
- Support multi-pattern prefixes and joins. [#2356]
|
||||
|
||||
### Changed
|
||||
- Change signature of `ResourceDef::capture_match_info_fn` to remove `user_data` parameter. [#2612]
|
||||
- Deprecate `Path::path`. [#2590]
|
||||
- Disallow prefix routes with tail segments. [#379][net#379]
|
||||
- Enforce path separators on dynamic prefixes. [#378][net#378]
|
||||
- Minimum supported Rust version (MSRV) is now 1.54.
|
||||
- Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
|
||||
- Prefix segments with trailing slashes define a trailing empty segment. [#2355]
|
||||
- `Quoter::requote` now returns `Option<Vec<u8>>`. [#2613]
|
||||
- Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372][net#372]
|
||||
- Rename `Path::{len => segment_count}` to be more descriptive of its purpose. [#370][net#370]
|
||||
- Rename `ResourceDef::{is_prefix_match => find_match}`. [#373][net#373]
|
||||
- Rename `ResourceDef::{match_path => capture_match_info}`. [#373][net#373]
|
||||
- Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373][net#373]
|
||||
- Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371][net#371]
|
||||
- Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371][net#371]
|
||||
- Rename `Router::{*_checked => *_fn}`. [#373][net#373]
|
||||
- Replace `Option<U>` with `U` in `Router` API. [#2612]
|
||||
- `Resource` trait now uses an associated type, `Path`, instead of a generic parameter. [#2568]
|
||||
- `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356]
|
||||
- `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373][net#373]
|
||||
- Return type of `ResourceDef::name` is now `Option<&str>`. [#373][net#373]
|
||||
- Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373][net#373]
|
||||
|
||||
### Fixed
|
||||
- Fix `ResourceDef`'s `PartialEq` implementation. [#373][net#373]
|
||||
- Fix segment interpolation leaving `Path` in unintended state after matching. [#368][net#368]
|
||||
- Improve malformed path error message. [#384][net#384]
|
||||
- `PathDeserializer` now decodes all percent encoded characters in dynamic segments. [#2566]
|
||||
- Relax bounds on `Router::recognize*` and `ResourceDef::capture_match_info`. [#2612]
|
||||
- Static patterns in multi-patterns are no longer interpreted as regex. [#366][net#366]
|
||||
|
||||
### Removed
|
||||
- `ResourceDef::name_mut`. [#373][net#373]
|
||||
- Unused `ResourceInfo`. [#2612]
|
||||
|
||||
[#2355]: https://github.com/actix/actix-web/pull/2355
|
||||
[#2356]: https://github.com/actix/actix-web/pull/2356
|
||||
[#2566]: https://github.com/actix/actix-net/pull/2566
|
||||
[#2568]: https://github.com/actix/actix-web/pull/2568
|
||||
[#2590]: https://github.com/actix/actix-web/pull/2590
|
||||
[#2612]: https://github.com/actix/actix-web/pull/2612
|
||||
[#2613]: https://github.com/actix/actix-web/pull/2613
|
||||
[net#366]: https://github.com/actix/actix-net/pull/366
|
||||
[net#368]: https://github.com/actix/actix-net/pull/368
|
||||
[net#368]: https://github.com/actix/actix-net/pull/368
|
||||
[net#370]: https://github.com/actix/actix-net/pull/370
|
||||
[net#371]: https://github.com/actix/actix-net/pull/371
|
||||
[net#372]: https://github.com/actix/actix-net/pull/372
|
||||
[net#373]: https://github.com/actix/actix-net/pull/373
|
||||
[net#378]: https://github.com/actix/actix-net/pull/378
|
||||
[net#379]: https://github.com/actix/actix-net/pull/379
|
||||
[net#380]: https://github.com/actix/actix-net/pull/380
|
||||
[net#384]: https://github.com/actix/actix-net/pull/384
|
||||
|
||||
|
||||
<details>
|
||||
<summary>0.5.0 Pre-Releases</summary>
|
||||
|
||||
## 0.5.0-rc.3 - 2022-01-31
|
||||
- Remove unused `ResourceInfo`. [#2612]
|
||||
- Add `RouterBuilder::push`. [#2612]
|
||||
@ -41,10 +113,10 @@
|
||||
|
||||
|
||||
## 0.5.0-beta.2 - 2021-09-09
|
||||
- Introduce `ResourceDef::join`. [#380]
|
||||
- Disallow prefix routes with tail segments. [#379]
|
||||
- Enforce path separators on dynamic prefixes. [#378]
|
||||
- Improve malformed path error message. [#384]
|
||||
- Introduce `ResourceDef::join`. [#380][net#380]
|
||||
- Disallow prefix routes with tail segments. [#379][net#379]
|
||||
- Enforce path separators on dynamic prefixes. [#378][net#378]
|
||||
- Improve malformed path error message. [#384][net#384]
|
||||
- Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
|
||||
- Prefix segments with trailing slashes define a trailing empty segment. [#2355]
|
||||
- Support multi-pattern prefixes and joins. [#2356]
|
||||
@ -52,52 +124,54 @@
|
||||
- Support `build_resource_path` on multi-pattern resources. [#2356]
|
||||
- Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
[#378]: https://github.com/actix/actix-net/pull/378
|
||||
[#379]: https://github.com/actix/actix-net/pull/379
|
||||
[#380]: https://github.com/actix/actix-net/pull/380
|
||||
[#384]: https://github.com/actix/actix-net/pull/384
|
||||
[net#378]: https://github.com/actix/actix-net/pull/378
|
||||
[net#379]: https://github.com/actix/actix-net/pull/379
|
||||
[net#380]: https://github.com/actix/actix-net/pull/380
|
||||
[net#384]: https://github.com/actix/actix-net/pull/384
|
||||
[#2355]: https://github.com/actix/actix-web/pull/2355
|
||||
[#2356]: https://github.com/actix/actix-web/pull/2356
|
||||
|
||||
|
||||
## 0.5.0-beta.1 - 2021-07-20
|
||||
- Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366]
|
||||
- Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373]
|
||||
- Fix segment interpolation leaving `Path` in unintended state after matching. [#368]
|
||||
- Fix `ResourceDef` `PartialEq` implementation. [#373]
|
||||
- Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372]
|
||||
- Implement `IntoPatterns` for `bytestring::ByteString`. [#372]
|
||||
- Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370]
|
||||
- Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371]
|
||||
- `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373]
|
||||
- Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371]
|
||||
- Rename `ResourceDef::{is_prefix_match => find_match}`. [#373]
|
||||
- Rename `ResourceDef::{match_path => capture_match_info}`. [#373]
|
||||
- Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373]
|
||||
- Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373]
|
||||
- Rename `Router::{*_checked => *_fn}`. [#373]
|
||||
- Return type of `ResourceDef::name` is now `Option<&str>`. [#373]
|
||||
- Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373]
|
||||
- Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366][net#366]
|
||||
- Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373][net#373]
|
||||
- Fix segment interpolation leaving `Path` in unintended state after matching. [#368][net#368]
|
||||
- Fix `ResourceDef` `PartialEq` implementation. [#373][net#373]
|
||||
- Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372][net#372]
|
||||
- Implement `IntoPatterns` for `bytestring::ByteString`. [#372][net#372]
|
||||
- Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370][net#370]
|
||||
- Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371][net#371]
|
||||
- `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373][net#373]
|
||||
- Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371][net#371]
|
||||
- Rename `ResourceDef::{is_prefix_match => find_match}`. [#373][net#373]
|
||||
- Rename `ResourceDef::{match_path => capture_match_info}`. [#373][net#373]
|
||||
- Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373][net#373]
|
||||
- Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373][net#373]
|
||||
- Rename `Router::{*_checked => *_fn}`. [#373][net#373]
|
||||
- Return type of `ResourceDef::name` is now `Option<&str>`. [#373][net#373]
|
||||
- Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373][net#373]
|
||||
|
||||
[#368]: https://github.com/actix/actix-net/pull/368
|
||||
[#366]: https://github.com/actix/actix-net/pull/366
|
||||
[#368]: https://github.com/actix/actix-net/pull/368
|
||||
[#370]: https://github.com/actix/actix-net/pull/370
|
||||
[#371]: https://github.com/actix/actix-net/pull/371
|
||||
[#372]: https://github.com/actix/actix-net/pull/372
|
||||
[#373]: https://github.com/actix/actix-net/pull/373
|
||||
[net#368]: https://github.com/actix/actix-net/pull/368
|
||||
[net#366]: https://github.com/actix/actix-net/pull/366
|
||||
[net#368]: https://github.com/actix/actix-net/pull/368
|
||||
[net#370]: https://github.com/actix/actix-net/pull/370
|
||||
[net#371]: https://github.com/actix/actix-net/pull/371
|
||||
[net#372]: https://github.com/actix/actix-net/pull/372
|
||||
[net#373]: https://github.com/actix/actix-net/pull/373
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
## 0.4.0 - 2021-06-06
|
||||
- When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357]
|
||||
- Path tail patterns now match new lines (`\n`) in request URL. [#360]
|
||||
- Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359]
|
||||
- Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345]
|
||||
- When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357][net#357]
|
||||
- Path tail patterns now match new lines (`\n`) in request URL. [#360][net#360]
|
||||
- Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359][net#359]
|
||||
- Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345][net#345]
|
||||
|
||||
[#345]: https://github.com/actix/actix-net/pull/345
|
||||
[#357]: https://github.com/actix/actix-net/pull/357
|
||||
[#359]: https://github.com/actix/actix-net/pull/359
|
||||
[#360]: https://github.com/actix/actix-net/pull/360
|
||||
[net#345]: https://github.com/actix/actix-net/pull/345
|
||||
[net#357]: https://github.com/actix/actix-net/pull/357
|
||||
[net#359]: https://github.com/actix/actix-net/pull/359
|
||||
[net#360]: https://github.com/actix/actix-net/pull/360
|
||||
|
||||
|
||||
## 0.3.0 - 2019-12-31
|
||||
@ -105,15 +179,15 @@
|
||||
|
||||
|
||||
## 0.2.7 - 2021-02-06
|
||||
- Add `Router::recognize_checked` [#247]
|
||||
- Add `Router::recognize_checked` [#247][net#247]
|
||||
|
||||
[#247]: https://github.com/actix/actix-net/pull/247
|
||||
[net#247]: https://github.com/actix/actix-net/pull/247
|
||||
|
||||
|
||||
## 0.2.6 - 2021-01-09
|
||||
- Use `bytestring` version range compatible with Bytes v1.0. [#246]
|
||||
- Use `bytestring` version range compatible with Bytes v1.0. [#246][net#246]
|
||||
|
||||
[#246]: https://github.com/actix/actix-net/pull/246
|
||||
[net#246]: https://github.com/actix/actix-net/pull/246
|
||||
|
||||
|
||||
## 0.2.5 - 2020-09-20
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-router"
|
||||
version = "0.5.0-rc.3"
|
||||
version = "0.5.0"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
|
||||
@ -21,18 +21,21 @@ default = ["http"]
|
||||
|
||||
[dependencies]
|
||||
bytestring = ">=0.1.5, <2"
|
||||
firestorm = "0.5"
|
||||
http = { version = "0.2.3", optional = true }
|
||||
log = "0.4"
|
||||
http = { version = "0.2.5", optional = true }
|
||||
regex = "1.5"
|
||||
serde = "1"
|
||||
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
firestorm = { version = "0.5", features = ["enable_system_time"] }
|
||||
http = "0.2.5"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
percent-encoding = "2.1"
|
||||
|
||||
[[bench]]
|
||||
name = "router"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "quoter"
|
||||
harness = false
|
||||
|
52
actix-router/benches/quoter.rs
Normal file
52
actix-router/benches/quoter.rs
Normal file
@ -0,0 +1,52 @@
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
fn compare_quoters(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Compare Quoters");
|
||||
|
||||
let quoter = actix_router::Quoter::new(b"", b"");
|
||||
let path_quoted = (0..=0x7f)
|
||||
.map(|c| format!("%{:02X}", c))
|
||||
.collect::<String>();
|
||||
let path_unquoted = ('\u{00}'..='\u{7f}').collect::<String>();
|
||||
|
||||
group.bench_function("quoter_unquoted", |b| {
|
||||
b.iter(|| {
|
||||
for _ in 0..10 {
|
||||
black_box(quoter.requote(path_unquoted.as_bytes()));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("percent_encode_unquoted", |b| {
|
||||
b.iter(|| {
|
||||
for _ in 0..10 {
|
||||
let decode = percent_encoding::percent_decode(path_unquoted.as_bytes());
|
||||
black_box(Into::<Cow<'_, [u8]>>::into(decode));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("quoter_quoted", |b| {
|
||||
b.iter(|| {
|
||||
for _ in 0..10 {
|
||||
black_box(quoter.requote(path_quoted.as_bytes()));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("percent_encode_quoted", |b| {
|
||||
b.iter(|| {
|
||||
for _ in 0..10 {
|
||||
let decode = percent_encoding::percent_decode(path_quoted.as_bytes());
|
||||
black_box(Into::<Cow<'_, [u8]>>::into(decode));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(benches, compare_quoters);
|
||||
criterion_main!(benches);
|
@ -145,7 +145,8 @@ macro_rules! register {
|
||||
concat!("/user/keys"),
|
||||
concat!("/user/keys/", $p1),
|
||||
];
|
||||
std::array::IntoIter::new(arr)
|
||||
|
||||
IntoIterator::into_iter(arr)
|
||||
}};
|
||||
}
|
||||
|
||||
@ -158,7 +159,7 @@ fn call() -> impl Iterator<Item = &'static str> {
|
||||
"/repos/rust-lang/rust/releases/1.51.0",
|
||||
];
|
||||
|
||||
std::array::IntoIter::new(arr)
|
||||
IntoIterator::into_iter(arr)
|
||||
}
|
||||
|
||||
fn compare_routers(c: &mut Criterion) {
|
||||
|
@ -1,169 +0,0 @@
|
||||
macro_rules! register {
|
||||
(brackets) => {{
|
||||
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
|
||||
}};
|
||||
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
|
||||
let arr = [
|
||||
concat!("/authorizations"),
|
||||
concat!("/authorizations/", $p1),
|
||||
concat!("/applications/", $p1, "/tokens/", $p2),
|
||||
concat!("/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/events"),
|
||||
concat!("/networks/", $p1, "/", $p2, "/events"),
|
||||
concat!("/orgs/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/received_events"),
|
||||
concat!("/users/", $p1, "/received_events/public"),
|
||||
concat!("/users/", $p1, "/events"),
|
||||
concat!("/users/", $p1, "/events/public"),
|
||||
concat!("/users/", $p1, "/events/orgs/", $p2),
|
||||
concat!("/feeds"),
|
||||
concat!("/notifications"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/notifications"),
|
||||
concat!("/notifications/threads/", $p1),
|
||||
concat!("/notifications/threads/", $p1, "/subscription"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
|
||||
concat!("/users/", $p1, "/starred"),
|
||||
concat!("/user/starred"),
|
||||
concat!("/user/starred/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
|
||||
concat!("/users/", $p1, "/subscriptions"),
|
||||
concat!("/user/subscriptions"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/subscription"),
|
||||
concat!("/user/subscriptions/", $p1, "/", $p2),
|
||||
concat!("/users/", $p1, "/gists"),
|
||||
concat!("/gists"),
|
||||
concat!("/gists/", $p1),
|
||||
concat!("/gists/", $p1, "/star"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
|
||||
concat!("/issues"),
|
||||
concat!("/user/issues"),
|
||||
concat!("/orgs/", $p1, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
|
||||
concat!("/emojis"),
|
||||
concat!("/gitignore/templates"),
|
||||
concat!("/gitignore/templates/", $p1),
|
||||
concat!("/meta"),
|
||||
concat!("/rate_limit"),
|
||||
concat!("/users/", $p1, "/orgs"),
|
||||
concat!("/user/orgs"),
|
||||
concat!("/orgs/", $p1),
|
||||
concat!("/orgs/", $p1, "/members"),
|
||||
concat!("/orgs/", $p1, "/members", $p2),
|
||||
concat!("/orgs/", $p1, "/public_members"),
|
||||
concat!("/orgs/", $p1, "/public_members/", $p2),
|
||||
concat!("/orgs/", $p1, "/teams"),
|
||||
concat!("/teams/", $p1),
|
||||
concat!("/teams/", $p1, "/members"),
|
||||
concat!("/teams/", $p1, "/members", $p2),
|
||||
concat!("/teams/", $p1, "/repos"),
|
||||
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
|
||||
concat!("/user/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
|
||||
concat!("/user/repos"),
|
||||
concat!("/users/", $p1, "/repos"),
|
||||
concat!("/orgs/", $p1, "/repos"),
|
||||
concat!("/repositories"),
|
||||
concat!("/repos/", $p1, "/", $p2),
|
||||
concat!("/repos/", $p1, "/", $p2, "/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/languages"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/teams"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/tags"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/readme"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/forks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
|
||||
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
|
||||
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
|
||||
concat!("/search/repositories"),
|
||||
concat!("/search/code"),
|
||||
concat!("/search/issues"),
|
||||
concat!("/search/users"),
|
||||
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
|
||||
concat!("/legacy/repos/search/", $p1),
|
||||
concat!("/legacy/user/search/", $p1),
|
||||
concat!("/legacy/user/email/", $p1),
|
||||
concat!("/users/", $p1),
|
||||
concat!("/user"),
|
||||
concat!("/users"),
|
||||
concat!("/user/emails"),
|
||||
concat!("/users/", $p1, "/followers"),
|
||||
concat!("/user/followers"),
|
||||
concat!("/users/", $p1, "/following"),
|
||||
concat!("/user/following"),
|
||||
concat!("/user/following/", $p1),
|
||||
concat!("/users/", $p1, "/following", $p2),
|
||||
concat!("/users/", $p1, "/keys"),
|
||||
concat!("/user/keys"),
|
||||
concat!("/user/keys/", $p1),
|
||||
];
|
||||
|
||||
arr.to_vec()
|
||||
}};
|
||||
}
|
||||
|
||||
static PATHS: [&str; 5] = [
|
||||
"/authorizations",
|
||||
"/user/repos",
|
||||
"/repos/rust-lang/rust/stargazers",
|
||||
"/orgs/rust-lang/public_members/nikomatsakis",
|
||||
"/repos/rust-lang/rust/releases/1.51.0",
|
||||
];
|
||||
|
||||
fn main() {
|
||||
let mut router = actix_router::Router::<bool>::build();
|
||||
|
||||
for route in register!(brackets) {
|
||||
router.path(route, true);
|
||||
}
|
||||
|
||||
let actix = router.finish();
|
||||
|
||||
if firestorm::enabled() {
|
||||
firestorm::bench("target", || {
|
||||
for &route in &PATHS {
|
||||
let mut path = actix_router::Path::new(route);
|
||||
actix.recognize(&mut path).unwrap();
|
||||
}
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
}
|
@ -7,7 +7,7 @@ use crate::path::{Path, PathIter};
|
||||
use crate::{Quoter, ResourcePath};
|
||||
|
||||
thread_local! {
|
||||
static FULL_QUOTER: Quoter = Quoter::new(b"+/%", b"");
|
||||
static FULL_QUOTER: Quoter = Quoter::new(b"", b"");
|
||||
}
|
||||
|
||||
macro_rules! unsupported_type {
|
||||
|
@ -1,7 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::ops::{DerefMut, Index};
|
||||
|
||||
use firestorm::profile_method;
|
||||
use serde::de;
|
||||
|
||||
use crate::{de::PathDeserializer, Resource, ResourcePath};
|
||||
@ -52,7 +51,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
/// Returns full path as a string.
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
profile_method!(as_str);
|
||||
self.path.path()
|
||||
}
|
||||
|
||||
@ -61,7 +59,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
/// Returns empty string if no more is to be processed.
|
||||
#[inline]
|
||||
pub fn unprocessed(&self) -> &str {
|
||||
profile_method!(unprocessed);
|
||||
// clamp skip to path length
|
||||
let skip = (self.skip as usize).min(self.as_str().len());
|
||||
&self.path.path()[skip..]
|
||||
@ -72,8 +69,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
#[deprecated(since = "0.6.0", note = "Use `.as_str()` or `.unprocessed()`.")]
|
||||
#[inline]
|
||||
pub fn path(&self) -> &str {
|
||||
profile_method!(path);
|
||||
|
||||
let skip = self.skip as usize;
|
||||
let path = self.path.path();
|
||||
if skip <= path.len() {
|
||||
@ -86,8 +81,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
/// Set new path.
|
||||
#[inline]
|
||||
pub fn set(&mut self, path: T) {
|
||||
profile_method!(set);
|
||||
|
||||
self.skip = 0;
|
||||
self.path = path;
|
||||
self.segments.clear();
|
||||
@ -96,8 +89,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
/// Reset state.
|
||||
#[inline]
|
||||
pub fn reset(&mut self) {
|
||||
profile_method!(reset);
|
||||
|
||||
self.skip = 0;
|
||||
self.segments.clear();
|
||||
}
|
||||
@ -105,13 +96,10 @@ impl<T: ResourcePath> Path<T> {
|
||||
/// Skip first `n` chars in path.
|
||||
#[inline]
|
||||
pub fn skip(&mut self, n: u16) {
|
||||
profile_method!(skip);
|
||||
self.skip += n;
|
||||
}
|
||||
|
||||
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
|
||||
profile_method!(add);
|
||||
|
||||
match value {
|
||||
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
|
||||
PathItem::Segment(begin, end) => self.segments.push((
|
||||
@ -127,8 +115,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
name: impl Into<Cow<'static, str>>,
|
||||
value: impl Into<Cow<'static, str>>,
|
||||
) {
|
||||
profile_method!(add_static);
|
||||
|
||||
self.segments
|
||||
.push((name.into(), PathItem::Static(value.into())));
|
||||
}
|
||||
@ -147,8 +133,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
|
||||
/// Get matched parameter by name without type conversion
|
||||
pub fn get(&self, name: &str) -> Option<&str> {
|
||||
profile_method!(get);
|
||||
|
||||
for (seg_name, val) in self.segments.iter() {
|
||||
if name == seg_name {
|
||||
return match val {
|
||||
@ -167,8 +151,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
///
|
||||
/// If keyed parameter is not available empty string is used as default value.
|
||||
pub fn query(&self, key: &str) -> &str {
|
||||
profile_method!(query);
|
||||
|
||||
if let Some(s) = self.get(key) {
|
||||
s
|
||||
} else {
|
||||
@ -186,7 +168,6 @@ impl<T: ResourcePath> Path<T> {
|
||||
|
||||
/// Try to deserialize matching parameters to a specified type `U`
|
||||
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
|
||||
profile_method!(load);
|
||||
de::Deserialize::deserialize(PathDeserializer::new(self))
|
||||
}
|
||||
}
|
||||
|
@ -1,132 +1,89 @@
|
||||
#[allow(dead_code)]
|
||||
const GEN_DELIMS: &[u8] = b":/?#[]@";
|
||||
|
||||
#[allow(dead_code)]
|
||||
const SUB_DELIMS_WITHOUT_QS: &[u8] = b"!$'()*,";
|
||||
|
||||
#[allow(dead_code)]
|
||||
const SUB_DELIMS: &[u8] = b"!$'()*,+?=;";
|
||||
|
||||
#[allow(dead_code)]
|
||||
const RESERVED: &[u8] = b":/?#[]@!$'()*,+?=;";
|
||||
|
||||
#[allow(dead_code)]
|
||||
const UNRESERVED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||
1234567890
|
||||
-._~";
|
||||
|
||||
const ALLOWED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ
|
||||
1234567890
|
||||
-._~
|
||||
!$'()*,";
|
||||
|
||||
const QS: &[u8] = b"+&=;b";
|
||||
|
||||
/// A quoter
|
||||
/// Partial percent-decoding.
|
||||
///
|
||||
/// Performs percent-decoding on a slice but can selectively skip decoding certain sequences.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// # use actix_router::Quoter;
|
||||
/// // + is set as a protected character and will not be decoded...
|
||||
/// let q = Quoter::new(&[], b"+");
|
||||
///
|
||||
/// // ...but the other encoded characters (like the hyphen below) will.
|
||||
/// assert_eq!(q.requote(b"/a%2Db%2Bc").unwrap(), b"/a-b%2Bc");
|
||||
/// ```
|
||||
pub struct Quoter {
|
||||
/// Simple bit-map of safe values in the 0-127 ASCII range.
|
||||
safe_table: [u8; 16],
|
||||
|
||||
/// Simple bit-map of protected values in the 0-127 ASCII range.
|
||||
protected_table: [u8; 16],
|
||||
protected_table: AsciiBitmap,
|
||||
}
|
||||
|
||||
impl Quoter {
|
||||
pub fn new(safe: &[u8], protected: &[u8]) -> Quoter {
|
||||
let mut quoter = Quoter {
|
||||
safe_table: [0; 16],
|
||||
protected_table: [0; 16],
|
||||
};
|
||||
|
||||
// prepare safe table
|
||||
for ch in 0..128 {
|
||||
if ALLOWED.contains(&ch) {
|
||||
set_bit(&mut quoter.safe_table, ch);
|
||||
}
|
||||
|
||||
if QS.contains(&ch) {
|
||||
set_bit(&mut quoter.safe_table, ch);
|
||||
}
|
||||
}
|
||||
|
||||
for &ch in safe {
|
||||
set_bit(&mut quoter.safe_table, ch)
|
||||
}
|
||||
/// Constructs a new `Quoter` instance given a set of protected ASCII bytes.
|
||||
///
|
||||
/// The first argument is ignored but is kept for backward compatibility.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if any of the `protected` bytes are not in the 0-127 ASCII range.
|
||||
pub fn new(_: &[u8], protected: &[u8]) -> Quoter {
|
||||
let mut protected_table = AsciiBitmap::default();
|
||||
|
||||
// prepare protected table
|
||||
for &ch in protected {
|
||||
set_bit(&mut quoter.safe_table, ch);
|
||||
set_bit(&mut quoter.protected_table, ch);
|
||||
protected_table.set_bit(ch);
|
||||
}
|
||||
|
||||
quoter
|
||||
Quoter { protected_table }
|
||||
}
|
||||
|
||||
/// Decodes safe percent-encoded sequences from `val`.
|
||||
///
|
||||
/// Returns `None` when no modification to the original byte string was required.
|
||||
///
|
||||
/// Non-ASCII bytes are accepted as valid input.
|
||||
///
|
||||
/// Behavior for invalid/incomplete percent-encoding sequences is unspecified and may include
|
||||
/// removing the invalid sequence from the output or passing it as-is.
|
||||
pub fn requote(&self, val: &[u8]) -> Option<Vec<u8>> {
|
||||
let mut has_pct = 0;
|
||||
let mut pct = [b'%', 0, 0];
|
||||
let mut idx = 0;
|
||||
let mut cloned: Option<Vec<u8>> = None;
|
||||
|
||||
let len = val.len();
|
||||
|
||||
while idx < len {
|
||||
let ch = val[idx];
|
||||
|
||||
if has_pct != 0 {
|
||||
pct[has_pct] = val[idx];
|
||||
has_pct += 1;
|
||||
|
||||
if has_pct == 3 {
|
||||
has_pct = 0;
|
||||
let buf = cloned.as_mut().unwrap();
|
||||
|
||||
if let Some(ch) = hex_pair_to_char(pct[1], pct[2]) {
|
||||
if ch < 128 {
|
||||
if bit_at(&self.protected_table, ch) {
|
||||
buf.extend_from_slice(&pct);
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if bit_at(&self.safe_table, ch) {
|
||||
buf.push(ch);
|
||||
idx += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
buf.push(ch);
|
||||
} else {
|
||||
buf.extend_from_slice(&pct[..]);
|
||||
}
|
||||
/// Decodes the next escape sequence, if any, and advances `val`.
|
||||
#[inline(always)]
|
||||
fn decode_next<'a>(&self, val: &mut &'a [u8]) -> Option<(&'a [u8], u8)> {
|
||||
for i in 0..val.len() {
|
||||
if let (prev, [b'%', p1, p2, rem @ ..]) = val.split_at(i) {
|
||||
if let Some(ch) = hex_pair_to_char(*p1, *p2)
|
||||
// ignore protected ascii bytes
|
||||
.filter(|&ch| !(ch < 128 && self.protected_table.bit_at(ch)))
|
||||
{
|
||||
*val = rem;
|
||||
return Some((prev, ch));
|
||||
}
|
||||
} else if ch == b'%' {
|
||||
has_pct = 1;
|
||||
|
||||
if cloned.is_none() {
|
||||
let mut c = Vec::with_capacity(len);
|
||||
c.extend_from_slice(&val[..idx]);
|
||||
cloned = Some(c);
|
||||
}
|
||||
} else if let Some(ref mut cloned) = cloned {
|
||||
cloned.push(ch)
|
||||
}
|
||||
|
||||
idx += 1;
|
||||
}
|
||||
|
||||
cloned
|
||||
None
|
||||
}
|
||||
|
||||
/// Partially percent-decodes the given bytes.
|
||||
///
|
||||
/// Escape sequences of the protected set are *not* decoded.
|
||||
///
|
||||
/// Returns `None` when no modification to the original bytes was required.
|
||||
///
|
||||
/// Invalid/incomplete percent-encoding sequences are passed unmodified.
|
||||
pub fn requote(&self, val: &[u8]) -> Option<Vec<u8>> {
|
||||
let mut remaining = val;
|
||||
|
||||
// early return indicates that no percent-encoded sequences exist and we can skip allocation
|
||||
let (pre, decoded_char) = self.decode_next(&mut remaining)?;
|
||||
|
||||
// decoded output will always be shorter than the input
|
||||
let mut decoded = Vec::<u8>::with_capacity(val.len());
|
||||
|
||||
// push first segment and decoded char
|
||||
decoded.extend_from_slice(pre);
|
||||
decoded.push(decoded_char);
|
||||
|
||||
// decode and push rest of segments and decoded chars
|
||||
while let Some((prev, ch)) = self.decode_next(&mut remaining) {
|
||||
// this ugly conditional achieves +50% perf in cases where this is a tight loop.
|
||||
if !prev.is_empty() {
|
||||
decoded.extend_from_slice(prev);
|
||||
}
|
||||
decoded.push(ch);
|
||||
}
|
||||
|
||||
decoded.extend_from_slice(remaining);
|
||||
|
||||
Some(decoded)
|
||||
}
|
||||
|
||||
pub(crate) fn requote_str_lossy(&self, val: &str) -> Option<String> {
|
||||
@ -135,24 +92,6 @@ impl Quoter {
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an ASCII character in the hex-encoded set (`0-9`, `A-F`, `a-f`) to its integer
|
||||
/// representation from `0x0`–`0xF`.
|
||||
///
|
||||
/// - `0x30 ('0') => 0x0`
|
||||
/// - `0x39 ('9') => 0x9`
|
||||
/// - `0x41 ('a') => 0xA`
|
||||
/// - `0x61 ('A') => 0xA`
|
||||
/// - `0x46 ('f') => 0xF`
|
||||
/// - `0x66 ('F') => 0xF`
|
||||
fn from_ascii_hex(v: u8) -> Option<u8> {
|
||||
match v {
|
||||
b'0'..=b'9' => Some(v - 0x30), // ord('0') == 0x30
|
||||
b'A'..=b'F' => Some(v - 0x41 + 10), // ord('A') == 0x41
|
||||
b'a'..=b'f' => Some(v - 0x61 + 10), // ord('a') == 0x61
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Decode a ASCII hex-encoded pair to an integer.
|
||||
///
|
||||
/// Returns `None` if either portion of the decoded pair does not evaluate to a valid hex value.
|
||||
@ -160,64 +99,52 @@ fn from_ascii_hex(v: u8) -> Option<u8> {
|
||||
/// - `0x33 ('3'), 0x30 ('0') => 0x30 ('0')`
|
||||
/// - `0x34 ('4'), 0x31 ('1') => 0x41 ('A')`
|
||||
/// - `0x36 ('6'), 0x31 ('1') => 0x61 ('a')`
|
||||
#[inline(always)]
|
||||
fn hex_pair_to_char(d1: u8, d2: u8) -> Option<u8> {
|
||||
let (d_high, d_low) = (from_ascii_hex(d1)?, from_ascii_hex(d2)?);
|
||||
let d_high = char::from(d1).to_digit(16)?;
|
||||
let d_low = char::from(d2).to_digit(16)?;
|
||||
|
||||
// left shift high nibble by 4 bits
|
||||
Some(d_high << 4 | d_low)
|
||||
Some((d_high as u8) << 4 | (d_low as u8))
|
||||
}
|
||||
|
||||
/// Sets bit in given bit-map to 1=true.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if `ch` index is out of bounds.
|
||||
fn set_bit(array: &mut [u8], ch: u8) {
|
||||
array[(ch >> 3) as usize] |= 0b1 << (ch & 0b111)
|
||||
#[derive(Debug, Default, Clone)]
|
||||
struct AsciiBitmap {
|
||||
array: [u8; 16],
|
||||
}
|
||||
|
||||
/// Returns true if bit to true in given bit-map.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if `ch` index is out of bounds.
|
||||
fn bit_at(array: &[u8], ch: u8) -> bool {
|
||||
array[(ch >> 3) as usize] & (0b1 << (ch & 0b111)) != 0
|
||||
impl AsciiBitmap {
|
||||
/// Sets bit in given bit-map to 1=true.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if `ch` index is out of bounds.
|
||||
fn set_bit(&mut self, ch: u8) {
|
||||
self.array[(ch >> 3) as usize] |= 0b1 << (ch & 0b111)
|
||||
}
|
||||
|
||||
/// Returns true if bit to true in given bit-map.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if `ch` index is out of bounds.
|
||||
fn bit_at(&self, ch: u8) -> bool {
|
||||
self.array[(ch >> 3) as usize] & (0b1 << (ch & 0b111)) != 0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn hex_encoding() {
|
||||
let hex = b"0123456789abcdefABCDEF";
|
||||
|
||||
for i in 0..256 {
|
||||
let c = i as u8;
|
||||
if hex.contains(&c) {
|
||||
assert!(from_ascii_hex(c).is_some())
|
||||
} else {
|
||||
assert!(from_ascii_hex(c).is_none())
|
||||
}
|
||||
}
|
||||
|
||||
let expected = [
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15,
|
||||
];
|
||||
for i in 0..hex.len() {
|
||||
assert_eq!(from_ascii_hex(hex[i]).unwrap(), expected[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_quoter() {
|
||||
let q = Quoter::new(b"", b"+");
|
||||
assert_eq!(q.requote(b"/a%25c").unwrap(), b"/a%c");
|
||||
assert_eq!(q.requote(b"/a%2Bc").unwrap(), b"/a%2Bc");
|
||||
assert_eq!(q.requote(b"/a%2Bc"), None);
|
||||
|
||||
let q = Quoter::new(b"%+", b"/");
|
||||
assert_eq!(q.requote(b"/a%25b%2Bc").unwrap(), b"/a%b+c");
|
||||
assert_eq!(q.requote(b"/a%2fb").unwrap(), b"/a%2fb");
|
||||
assert_eq!(q.requote(b"/a%2Fb").unwrap(), b"/a%2Fb");
|
||||
assert_eq!(q.requote(b"/a%2fb"), None);
|
||||
assert_eq!(q.requote(b"/a%2Fb"), None);
|
||||
assert_eq!(q.requote(b"/a%0Ab").unwrap(), b"/a\nb");
|
||||
assert_eq!(q.requote(b"/a%FE\xffb").unwrap(), b"/a\xfe\xffb");
|
||||
assert_eq!(q.requote(b"/a\xfe\xffb"), None);
|
||||
@ -233,7 +160,8 @@ mod tests {
|
||||
#[test]
|
||||
fn invalid_sequences() {
|
||||
let q = Quoter::new(b"%+", b"/");
|
||||
assert_eq!(q.requote(b"/a%2x%2X%%").unwrap(), b"/a%2x%2X");
|
||||
assert_eq!(q.requote(b"/a%2x%2X%%"), None);
|
||||
assert_eq!(q.requote(b"/a%20%2X%%").unwrap(), b"/a %2X%%");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -5,8 +5,8 @@ use std::{
|
||||
mem,
|
||||
};
|
||||
|
||||
use firestorm::{profile_fn, profile_method, profile_section};
|
||||
use regex::{escape, Regex, RegexSet};
|
||||
use tracing::error;
|
||||
|
||||
use crate::{path::PathItem, IntoPatterns, Patterns, Resource, ResourcePath};
|
||||
|
||||
@ -271,7 +271,6 @@ impl ResourceDef {
|
||||
/// assert!(!resource.is_match("/foo"));
|
||||
/// ```
|
||||
pub fn new<T: IntoPatterns>(paths: T) -> Self {
|
||||
profile_method!(new);
|
||||
Self::construct(paths, false)
|
||||
}
|
||||
|
||||
@ -299,7 +298,6 @@ impl ResourceDef {
|
||||
/// assert!(!resource.is_match("/foo"));
|
||||
/// ```
|
||||
pub fn prefix<T: IntoPatterns>(paths: T) -> Self {
|
||||
profile_method!(prefix);
|
||||
ResourceDef::construct(paths, true)
|
||||
}
|
||||
|
||||
@ -324,7 +322,6 @@ impl ResourceDef {
|
||||
/// assert!(!resource.is_match("user/123"));
|
||||
/// ```
|
||||
pub fn root_prefix(path: &str) -> Self {
|
||||
profile_method!(root_prefix);
|
||||
ResourceDef::prefix(insert_slash(path).into_owned())
|
||||
}
|
||||
|
||||
@ -548,8 +545,6 @@ impl ResourceDef {
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn is_match(&self, path: &str) -> bool {
|
||||
profile_method!(is_match);
|
||||
|
||||
// this function could be expressed as:
|
||||
// `self.find_match(path).is_some()`
|
||||
// but this skips some checks and uses potentially faster regex methods
|
||||
@ -597,8 +592,6 @@ impl ResourceDef {
|
||||
/// assert_eq!(resource.find_match("/profile/1234"), Some(13));
|
||||
/// ```
|
||||
pub fn find_match(&self, path: &str) -> Option<usize> {
|
||||
profile_method!(find_match);
|
||||
|
||||
match &self.pat_type {
|
||||
PatternType::Static(pattern) => self.static_match(pattern, path),
|
||||
|
||||
@ -633,7 +626,6 @@ impl ResourceDef {
|
||||
/// assert_eq!(path.unprocessed(), "");
|
||||
/// ```
|
||||
pub fn capture_match_info<R: Resource>(&self, resource: &mut R) -> bool {
|
||||
profile_method!(capture_match_info);
|
||||
self.capture_match_info_fn(resource, |_| true)
|
||||
}
|
||||
|
||||
@ -657,7 +649,7 @@ impl ResourceDef {
|
||||
/// resource.capture_match_info_fn(
|
||||
/// path,
|
||||
/// // when env var is not set, reject when path contains "admin"
|
||||
/// |res| !(!admin_allowed && res.path().contains("admin")),
|
||||
/// |path| !(!admin_allowed && path.as_str().contains("admin")),
|
||||
/// )
|
||||
/// }
|
||||
///
|
||||
@ -679,56 +671,35 @@ impl ResourceDef {
|
||||
R: Resource,
|
||||
F: FnOnce(&R) -> bool,
|
||||
{
|
||||
profile_method!(capture_match_info_fn);
|
||||
|
||||
let mut segments = <[PathItem; MAX_DYNAMIC_SEGMENTS]>::default();
|
||||
let path = resource.resource_path();
|
||||
let path_str = path.unprocessed();
|
||||
|
||||
let (matched_len, matched_vars) = match &self.pat_type {
|
||||
PatternType::Static(pattern) => {
|
||||
profile_section!(pattern_static_or_prefix);
|
||||
|
||||
match self.static_match(pattern, path_str) {
|
||||
Some(len) => (len, None),
|
||||
None => return false,
|
||||
}
|
||||
}
|
||||
PatternType::Static(pattern) => match self.static_match(pattern, path_str) {
|
||||
Some(len) => (len, None),
|
||||
None => return false,
|
||||
},
|
||||
|
||||
PatternType::Dynamic(re, names) => {
|
||||
profile_section!(pattern_dynamic);
|
||||
|
||||
let captures = {
|
||||
profile_section!(pattern_dynamic_regex_exec);
|
||||
|
||||
match re.captures(path.unprocessed()) {
|
||||
Some(captures) => captures,
|
||||
_ => return false,
|
||||
}
|
||||
let captures = match re.captures(path.unprocessed()) {
|
||||
Some(captures) => captures,
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
{
|
||||
profile_section!(pattern_dynamic_extract_captures);
|
||||
|
||||
for (no, name) in names.iter().enumerate() {
|
||||
if let Some(m) = captures.name(name) {
|
||||
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
||||
} else {
|
||||
log::error!(
|
||||
"Dynamic path match but not all segments found: {}",
|
||||
name
|
||||
);
|
||||
return false;
|
||||
}
|
||||
for (no, name) in names.iter().enumerate() {
|
||||
if let Some(m) = captures.name(name) {
|
||||
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
||||
} else {
|
||||
error!("Dynamic path match but not all segments found: {}", name);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
(captures[1].len(), Some(names))
|
||||
}
|
||||
|
||||
PatternType::DynamicSet(re, params) => {
|
||||
profile_section!(pattern_dynamic_set);
|
||||
|
||||
let path = path.unprocessed();
|
||||
let (pattern, names) = match re.matches(path).into_iter().next() {
|
||||
Some(idx) => ¶ms[idx],
|
||||
@ -744,7 +715,7 @@ impl ResourceDef {
|
||||
if let Some(m) = captures.name(name) {
|
||||
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
||||
} else {
|
||||
log::error!("Dynamic path match but not all segments found: {}", name);
|
||||
error!("Dynamic path match but not all segments found: {}", name);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -811,7 +782,6 @@ impl ResourceDef {
|
||||
I: IntoIterator,
|
||||
I::Item: AsRef<str>,
|
||||
{
|
||||
profile_method!(resource_path_from_iter);
|
||||
let mut iter = values.into_iter();
|
||||
self.build_resource_path(path, |_| iter.next())
|
||||
}
|
||||
@ -847,7 +817,6 @@ impl ResourceDef {
|
||||
V: AsRef<str>,
|
||||
S: BuildHasher,
|
||||
{
|
||||
profile_method!(resource_path_from_map);
|
||||
self.build_resource_path(path, |name| values.get(name))
|
||||
}
|
||||
|
||||
@ -868,8 +837,6 @@ impl ResourceDef {
|
||||
}
|
||||
|
||||
fn construct<T: IntoPatterns>(paths: T, is_prefix: bool) -> Self {
|
||||
profile_method!(construct);
|
||||
|
||||
let patterns = paths.patterns();
|
||||
let (pat_type, segments) = match &patterns {
|
||||
Patterns::Single(pattern) => ResourceDef::parse(pattern, is_prefix, false),
|
||||
@ -898,7 +865,7 @@ impl ResourceDef {
|
||||
}
|
||||
|
||||
let pattern_re_set = RegexSet::new(re_set).unwrap();
|
||||
let segments = segments.unwrap_or_else(Vec::new);
|
||||
let segments = segments.unwrap_or_default();
|
||||
|
||||
(
|
||||
PatternType::DynamicSet(pattern_re_set, pattern_data),
|
||||
@ -928,8 +895,6 @@ impl ResourceDef {
|
||||
/// # Panics
|
||||
/// Panics if given patterns does not contain a dynamic segment.
|
||||
fn parse_param(pattern: &str) -> (PatternSegment, String, &str, bool) {
|
||||
profile_method!(parse_param);
|
||||
|
||||
const DEFAULT_PATTERN: &str = "[^/]+";
|
||||
const DEFAULT_PATTERN_TAIL: &str = ".*";
|
||||
|
||||
@ -999,8 +964,6 @@ impl ResourceDef {
|
||||
is_prefix: bool,
|
||||
force_dynamic: bool,
|
||||
) -> (PatternType, Vec<PatternSegment>) {
|
||||
profile_method!(parse);
|
||||
|
||||
if !force_dynamic && pattern.find('{').is_none() && !pattern.ends_with('*') {
|
||||
// pattern is static
|
||||
return (
|
||||
@ -1038,7 +1001,7 @@ impl ResourceDef {
|
||||
// tail segments in prefixes have no defined semantics
|
||||
|
||||
#[cfg(not(test))]
|
||||
log::warn!(
|
||||
tracing::warn!(
|
||||
"Prefix resources should not have tail segments. \
|
||||
Use `ResourceDef::new` constructor. \
|
||||
This may become a panic in the future."
|
||||
@ -1053,7 +1016,7 @@ impl ResourceDef {
|
||||
// unnamed tail segment
|
||||
|
||||
#[cfg(not(test))]
|
||||
log::warn!(
|
||||
tracing::warn!(
|
||||
"Tail segments must have names. \
|
||||
Consider `.../{{tail}}*`. \
|
||||
This may become a panic in the future."
|
||||
@ -1133,8 +1096,6 @@ impl From<String> for ResourceDef {
|
||||
}
|
||||
|
||||
pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> {
|
||||
profile_fn!(insert_slash);
|
||||
|
||||
if !path.is_empty() && !path.starts_with('/') {
|
||||
let mut new_path = String::with_capacity(path.len() + 1);
|
||||
new_path.push('/');
|
||||
|
@ -27,7 +27,7 @@ impl<'a> ResourcePath for &'a str {
|
||||
|
||||
impl ResourcePath for bytestring::ByteString {
|
||||
fn path(&self) -> &str {
|
||||
&*self
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,6 @@
|
||||
use firestorm::profile_method;
|
||||
|
||||
use crate::{IntoPatterns, Resource, ResourceDef};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub struct ResourceId(pub u16);
|
||||
|
||||
/// Resource router.
|
||||
@ -30,7 +28,6 @@ impl<T, U> Router<T, U> {
|
||||
where
|
||||
R: Resource,
|
||||
{
|
||||
profile_method!(recognize);
|
||||
self.recognize_fn(resource, |_, _| true)
|
||||
}
|
||||
|
||||
@ -39,7 +36,6 @@ impl<T, U> Router<T, U> {
|
||||
where
|
||||
R: Resource,
|
||||
{
|
||||
profile_method!(recognize_mut);
|
||||
self.recognize_mut_fn(resource, |_, _| true)
|
||||
}
|
||||
|
||||
@ -55,8 +51,6 @@ impl<T, U> Router<T, U> {
|
||||
R: Resource,
|
||||
F: FnMut(&R, &U) -> bool,
|
||||
{
|
||||
profile_method!(recognize_checked);
|
||||
|
||||
for (rdef, val, ctx) in self.routes.iter() {
|
||||
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
|
||||
return Some((val, ResourceId(rdef.id())));
|
||||
@ -77,8 +71,6 @@ impl<T, U> Router<T, U> {
|
||||
R: Resource,
|
||||
F: FnMut(&R, &U) -> bool,
|
||||
{
|
||||
profile_method!(recognize_mut_checked);
|
||||
|
||||
for (rdef, val, ctx) in self.routes.iter_mut() {
|
||||
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
|
||||
return Some((val, ResourceId(rdef.id())));
|
||||
@ -104,7 +96,6 @@ impl<T, U> RouterBuilder<T, U> {
|
||||
val: T,
|
||||
ctx: U,
|
||||
) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||
profile_method!(push);
|
||||
self.routes.push((rdef, val, ctx));
|
||||
self.routes
|
||||
.last_mut()
|
||||
@ -131,7 +122,6 @@ where
|
||||
path: impl IntoPatterns,
|
||||
val: T,
|
||||
) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||
profile_method!(path);
|
||||
self.push(ResourceDef::new(path), val, U::default())
|
||||
}
|
||||
|
||||
@ -141,13 +131,11 @@ where
|
||||
prefix: impl IntoPatterns,
|
||||
val: T,
|
||||
) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||
profile_method!(prefix);
|
||||
self.push(ResourceDef::prefix(prefix), val, U::default())
|
||||
}
|
||||
|
||||
/// Registers resource for [`ResourceDef`].
|
||||
pub fn rdef(&mut self, rdef: ResourceDef, val: T) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||
profile_method!(rdef);
|
||||
self.push(rdef, val, U::default())
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use crate::ResourcePath;
|
||||
use crate::Quoter;
|
||||
|
||||
thread_local! {
|
||||
static DEFAULT_QUOTER: Quoter = Quoter::new(b"@:", b"%/+");
|
||||
static DEFAULT_QUOTER: Quoter = Quoter::new(b"", b"%/+");
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
|
@ -1,6 +1,11 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||
|
||||
|
||||
## 0.1.0-beta.13 - 2022-02-16
|
||||
- No significant changes since `0.1.0-beta.12`.
|
||||
|
||||
|
||||
## 0.1.0-beta.12 - 2022-01-31
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-test"
|
||||
version = "0.1.0-beta.12"
|
||||
version = "0.1.0-beta.13"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
@ -28,14 +28,14 @@ rustls = ["tls-rustls", "actix-http/rustls", "awc/rustls"]
|
||||
openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"]
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.1"
|
||||
actix-http = "3.0.0-rc.1"
|
||||
actix-http-test = "3.0.0-beta.12"
|
||||
actix-codec = "0.5"
|
||||
actix-http = "3"
|
||||
actix-http-test = "3"
|
||||
actix-rt = "2.1"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = { version = "4.0.0-rc.1", default-features = false, features = ["cookies"] }
|
||||
awc = { version = "3.0.0-beta.20", default-features = false, features = ["cookies"] }
|
||||
actix-service = "2"
|
||||
actix-utils = "3"
|
||||
actix-web = { version = "4", default-features = false, features = ["cookies"] }
|
||||
awc = { version = "3", default-features = false, features = ["cookies"] }
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
futures-util = { version = "0.3.7", default-features = false, features = [] }
|
||||
|
@ -43,7 +43,7 @@ pub use actix_http_test::unused_addr;
|
||||
use actix_service::{map_config, IntoServiceFactory, ServiceFactory, ServiceFactoryExt as _};
|
||||
pub use actix_web::test::{
|
||||
call_and_read_body, call_and_read_body_json, call_service, init_service, ok_service,
|
||||
read_body, read_body_json, simple_service, TestRequest,
|
||||
read_body, read_body_json, status_service, TestRequest,
|
||||
};
|
||||
use actix_web::{
|
||||
body::MessageBody,
|
||||
|
@ -1,6 +1,21 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||
|
||||
|
||||
## 4.1.0 - 2022-03-02
|
||||
- Add support for `actix` version `0.13`. [#2675]
|
||||
|
||||
[#2675]: https://github.com/actix/actix-web/pull/2675
|
||||
|
||||
|
||||
## 4.0.0 - 2022-02-25
|
||||
- No significant changes since `4.0.0-beta.12`.
|
||||
|
||||
|
||||
## 4.0.0-beta.12 - 2022-02-16
|
||||
- No significant changes since `4.0.0-beta.11`.
|
||||
|
||||
|
||||
## 4.0.0-beta.11 - 2022-01-31
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web-actors"
|
||||
version = "4.0.0-beta.11"
|
||||
version = "4.1.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix actors support for Actix Web"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
@ -14,21 +14,24 @@ name = "actix_web_actors"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix = { version = "0.12.0", default-features = false }
|
||||
actix-codec = "0.4.1"
|
||||
actix-http = "3.0.0-rc.1"
|
||||
actix-web = { version = "4.0.0-rc.1", default-features = false }
|
||||
actix = { version = ">=0.12, <0.14", default-features = false }
|
||||
actix-codec = "0.5"
|
||||
actix-http = "3"
|
||||
actix-web = { version = "4", default-features = false }
|
||||
|
||||
bytes = "1"
|
||||
bytestring = "1"
|
||||
futures-core = { version = "0.3.7", default-features = false }
|
||||
pin-project-lite = "0.2"
|
||||
tokio = { version = "1.8.4", features = ["sync"] }
|
||||
tokio = { version = "1.13.1", features = ["sync"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.12"
|
||||
awc = { version = "3.0.0-beta.20", default-features = false }
|
||||
actix-test = "0.1.0-beta.13"
|
||||
awc = { version = "3", default-features = false }
|
||||
actix-web = { version = "4", features = ["macros"] }
|
||||
|
||||
mime = "0.3"
|
||||
|
||||
env_logger = "0.9"
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
|
@ -3,11 +3,11 @@
|
||||
> Actix actors support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.11)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|
||||
[](https://docs.rs/actix-web-actors/4.1.0)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.11)
|
||||
[](https://deps.rs/crate/actix-web-actors/4.1.0)
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
@ -14,6 +14,58 @@ use futures_core::Stream;
|
||||
use tokio::sync::oneshot::Sender;
|
||||
|
||||
/// Execution context for HTTP actors
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// A demonstration of [server-sent events](https://developer.mozilla.org/docs/Web/API/Server-sent_events) using actors:
|
||||
///
|
||||
/// ```no_run
|
||||
/// use std::time::Duration;
|
||||
///
|
||||
/// use actix::{Actor, AsyncContext};
|
||||
/// use actix_web::{get, http::header, App, HttpResponse, HttpServer};
|
||||
/// use actix_web_actors::HttpContext;
|
||||
/// use bytes::Bytes;
|
||||
///
|
||||
/// struct MyActor {
|
||||
/// count: usize,
|
||||
/// }
|
||||
///
|
||||
/// impl Actor for MyActor {
|
||||
/// type Context = HttpContext<Self>;
|
||||
///
|
||||
/// fn started(&mut self, ctx: &mut Self::Context) {
|
||||
/// ctx.run_later(Duration::from_millis(100), Self::write);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// impl MyActor {
|
||||
/// fn write(&mut self, ctx: &mut HttpContext<Self>) {
|
||||
/// self.count += 1;
|
||||
/// if self.count > 3 {
|
||||
/// ctx.write_eof()
|
||||
/// } else {
|
||||
/// ctx.write(Bytes::from(format!("event: count\ndata: {}\n\n", self.count)));
|
||||
/// ctx.run_later(Duration::from_millis(100), Self::write);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// #[get("/")]
|
||||
/// async fn index() -> HttpResponse {
|
||||
/// HttpResponse::Ok()
|
||||
/// .insert_header(header::ContentType(mime::TEXT_EVENT_STREAM))
|
||||
/// .streaming(HttpContext::create(MyActor { count: 0 }))
|
||||
/// }
|
||||
///
|
||||
/// #[actix_web::main]
|
||||
/// async fn main() -> std::io::Result<()> {
|
||||
/// HttpServer::new(|| App::new().service(index))
|
||||
/// .bind(("127.0.0.1", 8080))?
|
||||
/// .run()
|
||||
/// .await
|
||||
/// }
|
||||
/// ```
|
||||
pub struct HttpContext<A>
|
||||
where
|
||||
A: Actor<Context = HttpContext<A>>,
|
||||
@ -210,7 +262,7 @@ mod tests {
|
||||
type Context = HttpContext<Self>;
|
||||
|
||||
fn started(&mut self, ctx: &mut Self::Context) {
|
||||
ctx.run_later(Duration::from_millis(100), |slf, ctx| slf.write(ctx));
|
||||
ctx.run_later(Duration::from_millis(100), Self::write);
|
||||
}
|
||||
}
|
||||
|
||||
@ -221,7 +273,7 @@ mod tests {
|
||||
ctx.write_eof()
|
||||
} else {
|
||||
ctx.write(Bytes::from(format!("LINE-{}", self.count)));
|
||||
ctx.run_later(Duration::from_millis(100), |slf, ctx| slf.write(ctx));
|
||||
ctx.run_later(Duration::from_millis(100), Self::write);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,59 @@
|
||||
//! Actix actors support for Actix Web.
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```no_run
|
||||
//! use actix::{Actor, StreamHandler};
|
||||
//! use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
|
||||
//! use actix_web_actors::ws;
|
||||
//!
|
||||
//! /// Define Websocket actor
|
||||
//! struct MyWs;
|
||||
//!
|
||||
//! impl Actor for MyWs {
|
||||
//! type Context = ws::WebsocketContext<Self>;
|
||||
//! }
|
||||
//!
|
||||
//! /// Handler for ws::Message message
|
||||
//! impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
|
||||
//! fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
|
||||
//! match msg {
|
||||
//! Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
|
||||
//! Ok(ws::Message::Text(text)) => ctx.text(text),
|
||||
//! Ok(ws::Message::Binary(bin)) => ctx.binary(bin),
|
||||
//! _ => (),
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! #[get("/ws")]
|
||||
//! async fn index(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||
//! ws::start(MyWs, &req, stream)
|
||||
//! }
|
||||
//!
|
||||
//! #[actix_web::main]
|
||||
//! async fn main() -> std::io::Result<()> {
|
||||
//! HttpServer::new(|| App::new().service(index))
|
||||
//! .bind(("127.0.0.1", 8080))?
|
||||
//! .run()
|
||||
//! .await
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! # Documentation & Community Resources
|
||||
//! In addition to this API documentation, several other resources are available:
|
||||
//!
|
||||
//! * [Website & User Guide](https://actix.rs/)
|
||||
//! * [Documentation for `actix_web`](actix_web)
|
||||
//! * [Examples Repository](https://github.com/actix/examples)
|
||||
//! * [Community Chat on Discord](https://discord.gg/NWpN5mmg3x)
|
||||
//!
|
||||
//! To get started navigating the API docs, you may consider looking at the following pages first:
|
||||
//!
|
||||
//! * [`ws`]: This module provides actor support for WebSockets.
|
||||
//!
|
||||
//! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses.
|
||||
//!
|
||||
|
||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||
#![warn(future_incompatible)]
|
||||
|
@ -1,4 +1,60 @@
|
||||
//! Websocket integration.
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```no_run
|
||||
//! use actix::{Actor, StreamHandler};
|
||||
//! use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
|
||||
//! use actix_web_actors::ws;
|
||||
//!
|
||||
//! /// Define Websocket actor
|
||||
//! struct MyWs;
|
||||
//!
|
||||
//! impl Actor for MyWs {
|
||||
//! type Context = ws::WebsocketContext<Self>;
|
||||
//! }
|
||||
//!
|
||||
//! /// Handler for ws::Message message
|
||||
//! impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
|
||||
//! fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
|
||||
//! match msg {
|
||||
//! Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
|
||||
//! Ok(ws::Message::Text(text)) => ctx.text(text),
|
||||
//! Ok(ws::Message::Binary(bin)) => ctx.binary(bin),
|
||||
//! _ => (),
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! #[get("/ws")]
|
||||
//! async fn websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||
//! ws::start(MyWs, &req, stream)
|
||||
//! }
|
||||
//!
|
||||
//! const MAX_FRAME_SIZE: usize = 16_384; // 16KiB
|
||||
//!
|
||||
//! #[get("/custom-ws")]
|
||||
//! async fn custom_websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||
//! // Create a Websocket session with a specific max frame size, and protocols.
|
||||
//! ws::WsResponseBuilder::new(MyWs, &req, stream)
|
||||
//! .frame_size(MAX_FRAME_SIZE)
|
||||
//! .protocols(&["A", "B"])
|
||||
//! .start()
|
||||
//! }
|
||||
//!
|
||||
//! #[actix_web::main]
|
||||
//! async fn main() -> std::io::Result<()> {
|
||||
//! HttpServer::new(|| {
|
||||
//! App::new()
|
||||
//! .service(websocket)
|
||||
//! .service(custom_websocket)
|
||||
//! })
|
||||
//! .bind(("127.0.0.1", 8080))?
|
||||
//! .run()
|
||||
//! .await
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
|
||||
use std::{
|
||||
collections::VecDeque,
|
||||
@ -41,20 +97,51 @@ use tokio::sync::oneshot;
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Create a Websocket session response with default configuration.
|
||||
/// ```ignore
|
||||
/// WsResponseBuilder::new(WsActor, &req, stream).start()
|
||||
/// ```
|
||||
/// ```no_run
|
||||
/// # use actix::{Actor, StreamHandler};
|
||||
/// # use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
|
||||
/// # use actix_web_actors::ws;
|
||||
/// #
|
||||
/// # struct MyWs;
|
||||
/// #
|
||||
/// # impl Actor for MyWs {
|
||||
/// # type Context = ws::WebsocketContext<Self>;
|
||||
/// # }
|
||||
/// #
|
||||
/// # /// Handler for ws::Message message
|
||||
/// # impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
|
||||
/// # fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {}
|
||||
/// # }
|
||||
/// #
|
||||
/// #[get("/ws")]
|
||||
/// async fn websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||
/// ws::WsResponseBuilder::new(MyWs, &req, stream).start()
|
||||
/// }
|
||||
///
|
||||
/// Create a Websocket session with a specific max frame size, [`Codec`], and protocols.
|
||||
/// ```ignore
|
||||
/// const MAX_FRAME_SIZE: usize = 16_384; // 16KiB
|
||||
///
|
||||
/// ws::WsResponseBuilder::new(WsActor, &req, stream)
|
||||
/// .codec(Codec::new())
|
||||
/// .protocols(&["A", "B"])
|
||||
/// .frame_size(MAX_FRAME_SIZE)
|
||||
/// .start()
|
||||
/// #[get("/custom-ws")]
|
||||
/// async fn custom_websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
|
||||
/// // Create a Websocket session with a specific max frame size, codec, and protocols.
|
||||
/// ws::WsResponseBuilder::new(MyWs, &req, stream)
|
||||
/// .codec(actix_http::ws::Codec::new())
|
||||
/// // This will overwrite the codec's max frame-size
|
||||
/// .frame_size(MAX_FRAME_SIZE)
|
||||
/// .protocols(&["A", "B"])
|
||||
/// .start()
|
||||
/// }
|
||||
/// #
|
||||
/// # #[actix_web::main]
|
||||
/// # async fn main() -> std::io::Result<()> {
|
||||
/// # HttpServer::new(|| {
|
||||
/// # App::new()
|
||||
/// # .service(websocket)
|
||||
/// # .service(custom_websocket)
|
||||
/// # })
|
||||
/// # .bind(("127.0.0.1", 8080))?
|
||||
/// # .run()
|
||||
/// # .await
|
||||
/// # }
|
||||
/// ```
|
||||
pub struct WsResponseBuilder<'a, A, T>
|
||||
where
|
||||
|
@ -1,6 +1,26 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
## Unreleased - 2022-xx-xx
|
||||
- Add `#[routes]` macro to support multiple paths for one handler. [#2718]
|
||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||
|
||||
[#2718]: https://github.com/actix/actix-web/pull/2718
|
||||
|
||||
|
||||
## 4.0.1 - 2022-06-11
|
||||
- Fix support for guard paths in route handler macros. [#2771]
|
||||
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||
|
||||
[#2771]: https://github.com/actix/actix-web/pull/2771
|
||||
|
||||
|
||||
## 4.0.0 - 2022-02-24
|
||||
- Version aligned with `actix-web` and will remain in sync going forward.
|
||||
- No significant changes since `0.5.0`.
|
||||
|
||||
|
||||
## 0.5.0 - 2022-02-24
|
||||
- No significant changes since `0.5.0-rc.2`.
|
||||
|
||||
|
||||
## 0.5.0-rc.2 - 2022-02-01
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web-codegen"
|
||||
version = "0.5.0-rc.2"
|
||||
version = "4.0.1"
|
||||
description = "Routing and runtime macros for Actix Web"
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
@ -15,17 +15,17 @@ edition = "2018"
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
actix-router = "0.5.0-rc.3"
|
||||
actix-router = "0.5.0"
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = { version = "1", features = ["full", "parsing"] }
|
||||
syn = { version = "1", features = ["full", "extra-traits"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-macros = "0.2.3"
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.12"
|
||||
actix-test = "0.1.0-beta.13"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = "4.0.0-rc.1"
|
||||
actix-web = "4.0.0"
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
trybuild = "1"
|
||||
|
@ -3,11 +3,11 @@
|
||||
> Routing and runtime macros for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-rc.2)
|
||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
|
||||
[](https://docs.rs/actix-web-codegen/4.0.1)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-rc.2)
|
||||
[](https://deps.rs/crate/actix-web-codegen/4.0.1)
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
@ -46,9 +46,20 @@
|
||||
//! ```
|
||||
//!
|
||||
//! # Multiple Path Handlers
|
||||
//! There are no macros to generate multi-path handlers. Let us know in [this issue].
|
||||
//! Acts as a wrapper for multiple single method handler macros. It takes no arguments and
|
||||
//! delegates those to the macros for the individual methods. See [macro@routes] macro docs.
|
||||
//!
|
||||
//! [this issue]: https://github.com/actix/actix-web/issues/1709
|
||||
//! ```
|
||||
//! # use actix_web::HttpResponse;
|
||||
//! # use actix_web_codegen::routes;
|
||||
//! #[routes]
|
||||
//! #[get("/test")]
|
||||
//! #[get("/test2")]
|
||||
//! #[delete("/test")]
|
||||
//! async fn example() -> HttpResponse {
|
||||
//! HttpResponse::Ok().finish()
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! [actix-web attributes docs]: https://docs.rs/actix-web/latest/actix_web/#attributes
|
||||
//! [GET]: macro@get
|
||||
@ -104,6 +115,39 @@ pub fn route(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
route::with_method(None, args, input)
|
||||
}
|
||||
|
||||
/// Creates resource handler, allowing multiple HTTP methods and paths.
|
||||
///
|
||||
/// # Syntax
|
||||
/// ```plain
|
||||
/// #[routes]
|
||||
/// #[<method>("path", ...)]
|
||||
/// #[<method>("path", ...)]
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// # Attributes
|
||||
/// The `routes` macro itself has no parameters, but allows specifying the attribute macros for
|
||||
/// the multiple paths and/or methods, e.g. [`GET`](macro@get) and [`POST`](macro@post).
|
||||
///
|
||||
/// These helper attributes take the same parameters as the [single method handlers](crate#single-method-handler).
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// # use actix_web::HttpResponse;
|
||||
/// # use actix_web_codegen::routes;
|
||||
/// #[routes]
|
||||
/// #[get("/test")]
|
||||
/// #[get("/test2")]
|
||||
/// #[delete("/test")]
|
||||
/// async fn example() -> HttpResponse {
|
||||
/// HttpResponse::Ok().finish()
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
pub fn routes(_: TokenStream, input: TokenStream) -> TokenStream {
|
||||
route::with_methods(input)
|
||||
}
|
||||
|
||||
macro_rules! method_macro {
|
||||
($variant:ident, $method:ident) => {
|
||||
#[doc = concat!("Creates route handler with `actix_web::guard::", stringify!($variant), "`.")]
|
||||
@ -152,6 +196,10 @@ method_macro!(Patch, patch);
|
||||
|
||||
/// Marks async main function as the Actix Web system entry-point.
|
||||
///
|
||||
/// Note that Actix Web also works under `#[tokio::main]` since version 4.0. However, this macro is
|
||||
/// still necessary for actor support (since actors use a `System`). Read more in the
|
||||
/// [`actix_web::rt`](https://docs.rs/actix-web/4/actix_web/rt) module docs.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// #[actix_web::main]
|
||||
|
@ -3,24 +3,12 @@ use std::{collections::HashSet, convert::TryFrom};
|
||||
use actix_router::ResourceDef;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||
use quote::{format_ident, quote, ToTokens, TokenStreamExt};
|
||||
use syn::{parse_macro_input, AttributeArgs, Ident, LitStr, NestedMeta};
|
||||
|
||||
enum ResourceType {
|
||||
Async,
|
||||
Sync,
|
||||
}
|
||||
|
||||
impl ToTokens for ResourceType {
|
||||
fn to_tokens(&self, stream: &mut TokenStream2) {
|
||||
let ident = format_ident!("to");
|
||||
stream.append(ident);
|
||||
}
|
||||
}
|
||||
use quote::{quote, ToTokens, TokenStreamExt};
|
||||
use syn::{parse_macro_input, AttributeArgs, Ident, LitStr, Meta, NestedMeta, Path};
|
||||
|
||||
macro_rules! method_type {
|
||||
(
|
||||
$($variant:ident, $upper:ident,)+
|
||||
$($variant:ident, $upper:ident, $lower:ident,)+
|
||||
) => {
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum MethodType {
|
||||
@ -42,20 +30,27 @@ macro_rules! method_type {
|
||||
_ => Err(format!("Unexpected HTTP method: `{}`", method)),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_path(method: &Path) -> Result<Self, ()> {
|
||||
match () {
|
||||
$(_ if method.is_ident(stringify!($lower)) => Ok(Self::$variant),)+
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
method_type! {
|
||||
Get, GET,
|
||||
Post, POST,
|
||||
Put, PUT,
|
||||
Delete, DELETE,
|
||||
Head, HEAD,
|
||||
Connect, CONNECT,
|
||||
Options, OPTIONS,
|
||||
Trace, TRACE,
|
||||
Patch, PATCH,
|
||||
Get, GET, get,
|
||||
Post, POST, post,
|
||||
Put, PUT, put,
|
||||
Delete, DELETE, delete,
|
||||
Head, HEAD, head,
|
||||
Connect, CONNECT, connect,
|
||||
Options, OPTIONS, options,
|
||||
Trace, TRACE, trace,
|
||||
Patch, PATCH, patch,
|
||||
}
|
||||
|
||||
impl ToTokens for MethodType {
|
||||
@ -77,7 +72,7 @@ impl TryFrom<&syn::LitStr> for MethodType {
|
||||
struct Args {
|
||||
path: syn::LitStr,
|
||||
resource_name: Option<syn::LitStr>,
|
||||
guards: Vec<Ident>,
|
||||
guards: Vec<Path>,
|
||||
wrappers: Vec<syn::Type>,
|
||||
methods: HashSet<MethodType>,
|
||||
}
|
||||
@ -90,6 +85,18 @@ impl Args {
|
||||
let mut wrappers = Vec::new();
|
||||
let mut methods = HashSet::new();
|
||||
|
||||
if args.is_empty() {
|
||||
return Err(syn::Error::new(
|
||||
Span::call_site(),
|
||||
format!(
|
||||
r#"invalid service definition, expected #[{}("<path>")]"#,
|
||||
method
|
||||
.map_or("route", |it| it.as_str())
|
||||
.to_ascii_lowercase()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
let is_route_macro = method.is_none();
|
||||
if let Some(method) = method {
|
||||
methods.insert(method);
|
||||
@ -121,7 +128,7 @@ impl Args {
|
||||
}
|
||||
} else if nv.path.is_ident("guard") {
|
||||
if let syn::Lit::Str(lit) = nv.lit {
|
||||
guards.push(Ident::new(&lit.value(), Span::call_site()));
|
||||
guards.push(lit.parse::<Path>()?);
|
||||
} else {
|
||||
return Err(syn::Error::new_spanned(
|
||||
nv.lit,
|
||||
@ -183,55 +190,27 @@ impl Args {
|
||||
}
|
||||
|
||||
pub struct Route {
|
||||
/// Name of the handler function being annotated.
|
||||
name: syn::Ident,
|
||||
args: Args,
|
||||
|
||||
/// Args passed to routing macro.
|
||||
///
|
||||
/// When using `#[routes]`, this will contain args for each specific routing macro.
|
||||
args: Vec<Args>,
|
||||
|
||||
/// AST of the handler function being annotated.
|
||||
ast: syn::ItemFn,
|
||||
resource_type: ResourceType,
|
||||
|
||||
/// The doc comment attributes to copy to generated struct, if any.
|
||||
doc_attributes: Vec<syn::Attribute>,
|
||||
}
|
||||
|
||||
fn guess_resource_type(typ: &syn::Type) -> ResourceType {
|
||||
let mut guess = ResourceType::Sync;
|
||||
|
||||
if let syn::Type::ImplTrait(typ) = typ {
|
||||
for bound in typ.bounds.iter() {
|
||||
if let syn::TypeParamBound::Trait(bound) = bound {
|
||||
for bound in bound.path.segments.iter() {
|
||||
if bound.ident == "Future" {
|
||||
guess = ResourceType::Async;
|
||||
break;
|
||||
} else if bound.ident == "Responder" {
|
||||
guess = ResourceType::Sync;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
guess
|
||||
}
|
||||
|
||||
impl Route {
|
||||
pub fn new(
|
||||
args: AttributeArgs,
|
||||
ast: syn::ItemFn,
|
||||
method: Option<MethodType>,
|
||||
) -> syn::Result<Self> {
|
||||
if args.is_empty() {
|
||||
return Err(syn::Error::new(
|
||||
Span::call_site(),
|
||||
format!(
|
||||
r#"invalid service definition, expected #[{}("<some path>")]"#,
|
||||
method
|
||||
.map_or("route", |it| it.as_str())
|
||||
.to_ascii_lowercase()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
let name = ast.sig.ident.clone();
|
||||
|
||||
// Try and pull out the doc comments so that we can reapply them to the generated struct.
|
||||
@ -244,6 +223,7 @@ impl Route {
|
||||
.collect();
|
||||
|
||||
let args = Args::new(args, method)?;
|
||||
|
||||
if args.methods.is_empty() {
|
||||
return Err(syn::Error::new(
|
||||
Span::call_site(),
|
||||
@ -251,25 +231,44 @@ impl Route {
|
||||
));
|
||||
}
|
||||
|
||||
let resource_type = if ast.sig.asyncness.is_some() {
|
||||
ResourceType::Async
|
||||
} else {
|
||||
match ast.sig.output {
|
||||
syn::ReturnType::Default => {
|
||||
return Err(syn::Error::new_spanned(
|
||||
ast,
|
||||
"Function has no return type. Cannot be used as handler",
|
||||
));
|
||||
}
|
||||
syn::ReturnType::Type(_, ref typ) => guess_resource_type(typ.as_ref()),
|
||||
}
|
||||
};
|
||||
if matches!(ast.sig.output, syn::ReturnType::Default) {
|
||||
return Err(syn::Error::new_spanned(
|
||||
ast,
|
||||
"Function has no return type. Cannot be used as handler",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
name,
|
||||
args: vec![args],
|
||||
ast,
|
||||
doc_attributes,
|
||||
})
|
||||
}
|
||||
|
||||
fn multiple(args: Vec<Args>, ast: syn::ItemFn) -> syn::Result<Self> {
|
||||
let name = ast.sig.ident.clone();
|
||||
|
||||
// Try and pull out the doc comments so that we can reapply them to the generated struct.
|
||||
// Note that multi line doc comments are converted to multiple doc attributes.
|
||||
let doc_attributes = ast
|
||||
.attrs
|
||||
.iter()
|
||||
.filter(|attr| attr.path.is_ident("doc"))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
if matches!(ast.sig.output, syn::ReturnType::Default) {
|
||||
return Err(syn::Error::new_spanned(
|
||||
ast,
|
||||
"Function has no return type. Cannot be used as handler",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
name,
|
||||
args,
|
||||
ast,
|
||||
resource_type,
|
||||
doc_attributes,
|
||||
})
|
||||
}
|
||||
@ -280,38 +279,57 @@ impl ToTokens for Route {
|
||||
let Self {
|
||||
name,
|
||||
ast,
|
||||
args:
|
||||
Args {
|
||||
args,
|
||||
doc_attributes,
|
||||
} = self;
|
||||
|
||||
let registrations: TokenStream2 = args
|
||||
.iter()
|
||||
.map(|args| {
|
||||
let Args {
|
||||
path,
|
||||
resource_name,
|
||||
guards,
|
||||
wrappers,
|
||||
methods,
|
||||
},
|
||||
resource_type,
|
||||
doc_attributes,
|
||||
} = self;
|
||||
let resource_name = resource_name
|
||||
.as_ref()
|
||||
.map_or_else(|| name.to_string(), LitStr::value);
|
||||
let method_guards = {
|
||||
let mut others = methods.iter();
|
||||
// unwrapping since length is checked to be at least one
|
||||
let first = others.next().unwrap();
|
||||
} = args;
|
||||
|
||||
let resource_name = resource_name
|
||||
.as_ref()
|
||||
.map_or_else(|| name.to_string(), LitStr::value);
|
||||
|
||||
let method_guards = {
|
||||
let mut others = methods.iter();
|
||||
|
||||
// unwrapping since length is checked to be at least one
|
||||
let first = others.next().unwrap();
|
||||
|
||||
if methods.len() > 1 {
|
||||
quote! {
|
||||
.guard(
|
||||
::actix_web::guard::Any(::actix_web::guard::#first())
|
||||
#(.or(::actix_web::guard::#others()))*
|
||||
)
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
.guard(::actix_web::guard::#first())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if methods.len() > 1 {
|
||||
quote! {
|
||||
.guard(
|
||||
::actix_web::guard::Any(::actix_web::guard::#first())
|
||||
#(.or(::actix_web::guard::#others()))*
|
||||
)
|
||||
let __resource = ::actix_web::Resource::new(#path)
|
||||
.name(#resource_name)
|
||||
#method_guards
|
||||
#(.guard(::actix_web::guard::fn_guard(#guards)))*
|
||||
#(.wrap(#wrappers))*
|
||||
.to(#name);
|
||||
|
||||
::actix_web::dev::HttpServiceFactory::register(__resource, __config);
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
.guard(::actix_web::guard::#first())
|
||||
}
|
||||
}
|
||||
};
|
||||
})
|
||||
.collect();
|
||||
|
||||
let stream = quote! {
|
||||
#(#doc_attributes)*
|
||||
@ -321,14 +339,7 @@ impl ToTokens for Route {
|
||||
impl ::actix_web::dev::HttpServiceFactory for #name {
|
||||
fn register(self, __config: &mut actix_web::dev::AppService) {
|
||||
#ast
|
||||
let __resource = ::actix_web::Resource::new(#path)
|
||||
.name(#resource_name)
|
||||
#method_guards
|
||||
#(.guard(::actix_web::guard::fn_guard(#guards)))*
|
||||
#(.wrap(#wrappers))*
|
||||
.#resource_type(#name);
|
||||
|
||||
::actix_web::dev::HttpServiceFactory::register(__resource, __config)
|
||||
#registrations
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -357,6 +368,57 @@ pub(crate) fn with_method(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn with_methods(input: TokenStream) -> TokenStream {
|
||||
let mut ast = match syn::parse::<syn::ItemFn>(input.clone()) {
|
||||
Ok(ast) => ast,
|
||||
// on parse error, make IDEs happy; see fn docs
|
||||
Err(err) => return input_and_compile_error(input, err),
|
||||
};
|
||||
|
||||
let (methods, others) = ast
|
||||
.attrs
|
||||
.into_iter()
|
||||
.map(|attr| match MethodType::from_path(&attr.path) {
|
||||
Ok(method) => Ok((method, attr)),
|
||||
Err(_) => Err(attr),
|
||||
})
|
||||
.partition::<Vec<_>, _>(Result::is_ok);
|
||||
|
||||
ast.attrs = others.into_iter().map(Result::unwrap_err).collect();
|
||||
|
||||
let methods =
|
||||
match methods
|
||||
.into_iter()
|
||||
.map(Result::unwrap)
|
||||
.map(|(method, attr)| {
|
||||
attr.parse_meta().and_then(|args| {
|
||||
if let Meta::List(args) = args {
|
||||
Args::new(args.nested.into_iter().collect(), Some(method))
|
||||
} else {
|
||||
Err(syn::Error::new_spanned(attr, "Invalid input for macro"))
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
{
|
||||
Ok(methods) if methods.is_empty() => return input_and_compile_error(
|
||||
input,
|
||||
syn::Error::new(
|
||||
Span::call_site(),
|
||||
"The #[routes] macro requires at least one `#[<method>(..)]` attribute.",
|
||||
),
|
||||
),
|
||||
Ok(methods) => methods,
|
||||
Err(err) => return input_and_compile_error(input, err),
|
||||
};
|
||||
|
||||
match Route::multiple(methods, ast) {
|
||||
Ok(route) => route.into_token_stream().into(),
|
||||
// on macro related error, make IDEs happy; see fn docs
|
||||
Err(err) => input_and_compile_error(input, err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the error to a token stream and appends it to the original input.
|
||||
///
|
||||
/// Returning the original input in addition to the error is good for IDEs which can gracefully
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user