1
0
mirror of https://github.com/fafhrd91/actix-web synced 2025-07-03 09:36:36 +02:00

Compare commits

...

173 Commits

Author SHA1 Message Date
7767cf3071 prepare actix-web release 4.2.0 2022-09-11 16:44:46 +01:00
b59a96d9d7 prepare actix-web-codegen release 4.1.0 2022-09-11 16:42:28 +01:00
037740bf62 prepare actix-http release 3.2.2 2022-09-11 16:41:29 +01:00
386258c285 clarify worker_max_blocking_threads default 2022-09-06 10:13:10 +01:00
99bf774e94 update gh-pages deploy action 2022-09-03 22:15:59 +01:00
35b0fd1a85 specify branch in doc job 2022-09-03 22:05:28 +01:00
0b5b4dcbf3 reduce size of docs branch 2022-09-03 21:56:37 +01:00
c993055fc8 replace askama_escape in favor of v_htmlescape (#2824) 2022-08-30 09:34:46 +01:00
679f61cf37 bump msrv to 1.59 2022-08-27 13:14:16 +01:00
056de320f0 fix scope doc example
fixes #2843
2022-08-25 03:17:48 +01:00
f220719fae prepare awc release 3.0.1 2022-08-25 03:13:31 +01:00
c9f91796df awc: correctly handle redirections that begins with // (#2840) 2022-08-25 03:12:58 +01:00
ea764b1d57 add feature annotations to docs 2022-07-31 23:40:09 +01:00
19aa14a9d6 re-order HttpServer methods for better docs 2022-07-31 22:10:51 +01:00
10746fb2fb improve HttpServer docs 2022-07-31 21:58:15 +01:00
4bbe60b609 document h2 ping-pong 2022-07-24 16:42:35 +01:00
8ff489aa90 apply fix from #2369 2022-07-24 16:35:00 +01:00
e0a88cea8d remove unwindsafe assertions 2022-07-24 02:47:12 +01:00
d78ff283af prepare actix-test release 0.1.0 2022-07-24 02:13:46 +01:00
ce6d520215 prepare actix-http-test release 3.0.0 2022-07-24 02:11:21 +01:00
3e25742a41 prepare actix-files release 0.6.2 2022-07-23 16:37:59 +01:00
20f4cfe6b5 fix partial ranges for video content (#2817)
fixes #2815
2022-07-23 16:27:01 +01:00
6408291ab0 appease clippy by deriving Eq on a bunch of items (#2818) 2022-07-23 16:26:48 +01:00
8d260e599f clippy 2022-07-23 02:48:28 +01:00
14bcf72ec1 web utilizes const header names 2022-07-22 20:21:58 +01:00
6485434a33 update bump script 2022-07-22 20:19:15 +01:00
16c7c16463 reduce scope of once_cell change 2022-07-22 20:19:02 +01:00
9b0fdca6e9 Remove some unnecessary uses of once_cell::sync::Lazy (#2816) 2022-07-22 20:18:38 +01:00
8759d79b03 routes macro allowing multiple paths per handler (#2718)
* WIP: basic implementation for `routes` macro

* chore: changelog, docs, tests

* error on missing methods

* Apply suggestions from code review

Co-authored-by: Igor Aleksanov <popzxc@yandex.ru>

* update test stderr expectation

* add additional tests

* fix stderr output

* remove useless ResourceType

this is dead code from back when .to and .to_async were different ways to add a service

Co-authored-by: Igor Aleksanov <popzxc@yandex.ru>
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-07-04 04:31:49 +00:00
c0d5d7bdb5 add octal-ish CL test 2022-07-02 21:04:37 +01:00
40eab1f091 simplify simple decoder tests 2022-07-02 20:07:27 +01:00
75517cce82 install cargo hack in CI faster 2022-07-02 20:00:59 +01:00
9b51624b27 update cargo-cache to 0.8.2 2022-07-02 18:43:19 +01:00
8e2ae8cd40 install nextest faster 2022-07-02 18:38:08 +01:00
9a2f8450e0 install older cargo-edit 2022-07-02 17:40:03 +01:00
23ef51609e s/cargo-add/cargo-edit 2022-07-02 17:29:06 +01:00
f7d629a61a fix cargo-add in CI 2022-07-02 17:20:46 +01:00
e0845d9ad9 add msrv workarounds to ci 2022-07-02 17:12:24 +01:00
2f79daec16 only run tests on stable 2022-07-02 17:05:48 +01:00
f3f41a0cc7 prepare actix-http release 3.2.1 2022-07-02 16:50:54 +01:00
987067698b use sparse registry in CI 2022-07-01 12:45:26 +01:00
b62f1b4ef7 migrate deprecated method in docs 2022-07-01 12:40:00 +01:00
df5257c373 update trust dns resolver 2022-07-01 10:21:46 +01:00
226ea696ce update dev deps 2022-07-01 10:19:28 +01:00
e524fc86ea add HTTP/0.9 rejection test 2022-07-01 09:03:57 +01:00
7e990e423f add http/1.0 GET parsing tests 2022-07-01 08:24:45 +01:00
8f9a12ed5d fix parsing ambiguities for HTTP/1.0 requests (#2794)
* fix HRS vuln when first CL header is 0

* ignore TE headers in http/1.0 reqs

* update changelog

* disallow HTTP/1.0 requests without a CL header

* fix test

* broken fix for http1.0 post requests
2022-07-01 08:23:40 +01:00
c6eba2da9b prepare actix-http release 3.2.0 (#2801) 2022-07-01 06:16:17 +01:00
06c7945801 retain previously set vary headers when using compress (#2798)
* retain previously set vary headers when using compress
2022-06-30 09:19:16 +01:00
0dba6310c6 Expose option for setting TLS handshake timeout (#2752)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-06-27 02:57:21 +00:00
f7d7d92984 address clippy lints 2022-06-27 03:12:36 +01:00
3d6ea7fe9b Improve documentation for actix-web-actors (#2788) 2022-06-26 16:45:02 +00:00
8dbf7da89f Fix common grammar mistakes and add small documentation for AppConfig's Default implementation (#2793) 2022-06-25 14:01:06 +00:00
de92b3be2e fix unrecoverable Err(Overflow) in websocket frame parser (#2790) 2022-06-24 03:46:17 +00:00
5d0e8138ee Add getters for &ServiceRequest (#2786) 2022-06-22 21:02:03 +01:00
6b7196225e Bump up MSRV to 1.57 (#2789) 2022-06-22 12:08:06 +01:00
265fa0d050 Add link to MongoDB example in README (#2783) 2022-06-15 22:38:10 +01:00
062127a210 Revert "actix-http: Pull actix-web dev-dep from Git repo"
This reverts commit 3926416580.
2022-06-12 00:55:06 +09:00
3926416580 actix-http: Pull actix-web dev-dep from Git repo
The published version of actix-web depends on a buggy version of zstd crate,
temporarily use actix-web on git repo to avoid the build failure.

Signed-off-by: Yuki Okushi <jtitor@2k36.org>
2022-06-12 00:48:08 +09:00
43671ae4aa release 4.1 group (#2781) 2022-06-12 00:15:43 +09:00
264a703d94 revert broken fix in #2624 (#2779)
* revert broken fix in #2624

* update changelog
2022-06-11 13:43:13 +01:00
498fb954b3 migrate from deprecated sha-1 to sha1 (#2780)
closes #2778
2022-06-11 04:53:58 +01:00
2253eae2bb update msrv to 1.56 (#2777)
* update msrv to 1.56

* remove transitive dashmap dependency

closes #2747
2022-06-11 04:03:26 +01:00
8e76a1c775 Allow a path as a guard in route handler macro (#2771)
* Allow a path as a guard in route handler macro

* Update CHANGES.md

Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-06-06 18:53:23 +01:00
dce57a79c9 Implement ResponseError for Infallible (#2769) 2022-05-30 20:52:48 +01:00
6a5b370206 fix some typos (#2744)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-04-24 22:01:20 +00:00
b1c85ba85b Add ServiceConfig::default_service (#2743)
* Add `ServiceConfig::default_service`

based on https://github.com/actix/actix-web/pull/2338

* update changelog
2022-04-23 22:11:45 +01:00
9aab911600 Improve documentation for FromRequest::Future (#2734)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-04-23 20:57:11 +00:00
017e40f733 update optional extractor impl docs 2022-04-23 21:02:24 +01:00
45592b37b6 add Route::wrap (#2725)
* add `Route::wrap`

* add tests

* fix clippy

* fix doctests
2022-04-23 21:01:55 +01:00
8abcb94512 fix tokio-uring version 2022-04-23 14:37:03 +01:00
f2cacc4c9d clear conn_data on HttpRequest drop (#2742)
* clear conn_data on HttpRequest drop

fixes #2740

* update changelog

* fix doc test
2022-04-23 13:35:41 +01:00
56b9c0d08e remove payload unwindsafe impl assert 2022-04-23 12:31:32 +01:00
de9e41484a Add ServiceRequest::extract (#2647)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-04-02 19:46:26 +01:00
2fed978597 remove -http TestRequest doc test 2022-03-28 22:44:32 +01:00
40048a5811 rework actix_router::Quoter (#2709)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-03-28 20:58:35 +00:00
e942d3e3b1 update migration guide 2022-03-26 13:26:12 +00:00
09cffc093c Bump zstd to 0.11 (#2694)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-03-22 15:30:06 +00:00
c58f287044 Removed random superfluous whitespace (#2705) 2022-03-20 21:36:19 +00:00
7b27493e4c move coverage to own workflow 2022-03-10 16:17:49 +00:00
478b33b8a3 remove nightly io-uring job 2022-03-10 16:00:15 +00:00
592b40f914 move io-uring tests to own job 2022-03-10 15:03:55 +00:00
fe5279c77a use tracing in actix-router 2022-03-10 03:14:14 +00:00
80d222aa78 use tracing in actix-http 2022-03-10 03:12:29 +00:00
a03a2a0076 deprecate NamedFile::set_status_code 2022-03-10 02:54:06 +00:00
745e738955 fix negative impl assertion on 1.60+
see https://github.com/rust-lang/rust/issues/94791
2022-03-10 02:36:57 +00:00
1fd90f0b10 Implement getters for named file fields (#2689)
Co-authored-by: Janis Goldschmidt <github@aberrat.io>
2022-03-10 01:29:26 +00:00
a35804b89f update files tokio-uring to 0.3 2022-03-10 01:05:03 +00:00
5611b98c0d prepare actix-http release 3.0.4 2022-03-09 18:13:39 +00:00
dce9438518 document with ws feature 2022-03-09 18:11:12 +00:00
be986d96b3 bump regex requirement to 1.5.5 due to security advisory (#2687) 2022-03-08 17:42:42 +00:00
8ddb24b49b prepare awc release 3.0.0 (#2684) 2022-03-08 16:51:40 +00:00
87f627cd5d improve servicerequest docs 2022-03-07 16:48:04 +00:00
03456b8a33 update actix-web-in-http example 2022-03-05 23:43:31 +00:00
8c2fad3164 align hello-world examples 2022-03-05 23:15:33 +00:00
62fbd225bc prepare actix-http release 3.0.2 2022-03-05 22:26:19 +00:00
0fa4d999d9 fix(actix-http): encode correctly camel case header with n+2 hyphens (#2683)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-03-05 22:24:21 +00:00
da4c849f62 prepare actix-http release 3.0.1 2022-03-04 03:16:02 +00:00
49cd303c3b fix dispatcher panic when conbining pipelining and keepalive
fixes #2678
2022-03-04 03:12:38 +00:00
955c3ac0c4 Add support for audio files streaming (#2645) 2022-03-03 00:29:59 +00:00
56e5c19b85 add actix 0.13 support (#2675) 2022-03-02 17:53:47 +00:00
3f03af1c59 clippy 2022-03-02 03:25:30 +00:00
25c0673278 Update MIGRATION-4.0.md 2022-03-02 02:20:48 +00:00
e7a05f9892 fix(docs): TestRequest example fixed (#2643)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-03-01 00:02:08 +00:00
2f13e5f675 Update MIGRATION-4.0.md 2022-02-26 17:13:42 +00:00
9f964751f6 tweak migration doc 2022-02-25 21:40:23 +00:00
fcca515387 prepare actix-multipart release 0.4.0 2022-02-25 20:41:57 +00:00
075932d823 prepare actix-web-actors release 4.0.0 2022-02-25 20:41:33 +00:00
cb379c0e0c prepare actix-files release 0.6.0 2022-02-25 20:36:16 +00:00
d4a5d450de prepare actix-web release 4.0.1 2022-02-25 20:31:46 +00:00
542200cbc2 update readme 2022-02-25 19:11:46 +00:00
d0c08dbb7d prepare releases: actix-http 3.0.0 and actix-web 4.0.0 (#2663) 2022-02-25 18:46:35 +00:00
d0b5fb18d2 update migration guide on middleware 2022-02-22 17:40:38 +00:00
12fb3412a5 remove concurrency groups 2022-02-22 12:52:07 +00:00
2665357a0c fix ci groups 2022-02-22 12:47:57 +00:00
693271e571 add CI job concurrency groups 2022-02-22 12:41:08 +00:00
10ef9b0751 remove useless doctest main fns 2022-02-22 12:32:06 +00:00
ce00c88963 fix changelog typo 2022-02-22 11:46:51 +00:00
75e6ffb057 prepare actix-router release 0.5.0 (#2658) 2022-02-22 11:38:25 +00:00
ad38973767 move blocking error to web (#2660) 2022-02-22 08:45:28 +00:00
1c1d6477ef remove legacy ws test 2022-02-22 07:11:16 +00:00
53509a5361 ignore all http1 connection headers in h2 2022-02-22 07:07:12 +00:00
a6f27baff1 flesh out Responder docs 2022-02-22 07:07:12 +00:00
218e34ee17 fix http error debug impl 2022-02-22 07:07:12 +00:00
11bfa84926 rename simple_service to status_service (#2659) 2022-02-22 07:06:36 +00:00
5aa6f713c7 update errorhandlers migration guide 2022-02-22 06:23:01 +00:00
151a15da74 prepare actix-http release 3.0.0-rc.4 2022-02-22 00:21:49 +00:00
1ce58ecb30 fix dispatcher panic on pending flush
fixes thread panic in actix-http-3.0.0-rc.3 #2655
2022-02-22 00:19:48 +00:00
f940653981 Edits to the migration notes (#2654) 2022-02-19 17:05:54 +00:00
b291e29882 fix links 2022-02-18 03:41:10 +00:00
f843776f36 Fix links in README (#2653) 2022-02-18 03:34:12 +00:00
52f7d96358 tweak migration document 2022-02-17 19:13:03 +00:00
51e573b888 prepare actix-test release 0.1.0-beta.13 2022-02-16 03:13:41 +00:00
38e015432b prepare actix-http-test release 3.0.0-beta.13 2022-02-16 03:13:22 +00:00
f5895d5eff prepare actix-web-actors release 4.0.0-beta.12 2022-02-16 03:11:22 +00:00
a0c4bf8d1b prepare awc release 3.0.0-beta.21 2022-02-16 03:10:01 +00:00
594e3a6ef1 prepare actix-http release 3.0.0-rc.3 2022-02-16 03:07:12 +00:00
a808a26d8c bump actix-codec to 0.5 2022-02-15 20:49:10 +00:00
de62e8b025 add nextest to post-merge ci 2022-02-15 14:40:26 +00:00
3486edabcf update migrations guide re tokio v1 2022-02-15 00:54:12 +00:00
4c59a34513 Remove clone implementation for Path (#2639) 2022-02-10 10:29:00 +00:00
1b706b3069 update body type migration guide 2022-02-09 16:12:39 +00:00
a9f445875a update migration guide 2022-02-09 12:31:06 +00:00
e0f02c1d9e update migration guide 2022-02-08 16:53:09 +00:00
092dbba5b9 update migration guide 2022-02-08 15:24:35 +00:00
ff4b2d251f fix impl assertions 2022-02-08 14:32:57 +00:00
98faa61afe fix impl assertions 2022-02-08 13:37:01 +00:00
3f2db9e75c fix doc tests 2022-02-08 12:25:13 +00:00
074d18209d better document relationship with tokio 2022-02-08 10:21:47 +00:00
593fbde46a prepare actix-web release 4.0.0-rc.3 2022-02-08 09:31:48 +00:00
161861997c prepare actix-http release 3.0.0-rc.2 2022-02-08 09:31:20 +00:00
3d621677a5 clippy 2022-02-08 08:00:47 +00:00
0c144054cb make Condition generic over body type (#2635)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2022-02-08 07:50:05 +00:00
b0fbe0dfd8 fix workers doc 2022-02-08 06:58:33 +00:00
b653bf557f added note to v4 migration guide about worker thread update (#2634) 2022-02-07 19:04:03 +00:00
1d1a65282f RC refinements (#2625) 2022-02-04 20:37:33 +00:00
b0a363a7ae add migration note about fromrequest::configure 2022-02-04 18:48:22 +00:00
b4d3c2394d clean up migration guide 2022-02-04 18:22:38 +00:00
5ca42df89a fix stuck connection when handler doesn't read payload (#2624) 2022-02-03 07:03:39 +00:00
fc5ecdc30b fix changelog 2022-02-02 03:55:43 +00:00
7fe800c3ff prepare actix-web release 4.0.0-rc.2 2022-02-02 03:54:26 +00:00
075df88a07 update 4.0 migration guide 2022-02-02 03:42:07 +00:00
391d8a744a update 4.0 migratio guide 2022-02-02 03:13:11 +00:00
5b6cb681b9 update 4.0 migration guide 2022-02-02 03:09:33 +00:00
0957ec40b4 split migration file 2022-02-02 02:46:37 +00:00
ccf430d74a disable coverage job 2022-02-01 15:24:35 +00:00
c84c1f0f15 simplify macros feature 2022-02-01 14:39:49 +00:00
e9279dfbb8 Fix deprecated notice about client_shutdown (#2621) 2022-02-01 13:44:56 +00:00
a68239adaa bump zstd to 0.10 2022-02-01 13:35:32 +00:00
40a4b1ccd5 add macro feature (#2619)
Co-authored-by: Ibraheem Ahmed <ibrah1440@gmail.com>
2022-02-01 02:35:05 +00:00
7f5a8c0851 fix vmanifest 2022-02-01 00:33:41 +00:00
bcdde1d4ea move actix-web to own dir 2022-02-01 00:30:41 +00:00
30aa64ea32 update dep graphs 2022-02-01 00:23:58 +00:00
249 changed files with 7300 additions and 4175 deletions

View File

@ -6,9 +6,12 @@ lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dcli
ci-check-min = "hack --workspace check --no-default-features"
ci-check-default = "hack --workspace check"
ci-check-default-tests = "check --workspace --tests"
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,io-uring check"
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,experimental-io-uring check"
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
# testing
ci-doctest-default = "test --workspace --doc --no-fail-fast -- --nocapture"
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
# compile docs as docs.rs would
# RUSTDOCFLAGS="--cfg=docsrs" cargo +nightly doc --no-deps --workspace

View File

@ -1,4 +1,4 @@
name: CI (master only)
name: CI (post-merge)
on:
push:
@ -23,6 +23,7 @@ jobs:
CI: 1
CARGO_INCREMENTAL: 0
VCPKGRS_DYNAMIC: 1
CARGO_UNSTABLE_SPARSE_REGISTRY: true
steps:
- uses: actions/checkout@v2
@ -44,18 +45,15 @@ jobs:
profile: minimal
override: true
- name: Install cargo-hack
uses: taiki-e/install-action@cargo-hack
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0
- name: Install cargo-hack
uses: actions-rs/cargo@v1
with:
command: install
args: cargo-hack
- name: check minimal
uses: actions-rs/cargo@v1
with: { command: ci-check-min }
@ -78,23 +76,19 @@ jobs:
cargo test --lib --tests -p=actix-multipart --all-features
cargo test --lib --tests -p=actix-web-actors --all-features
- name: tests (io-uring)
if: matrix.target.os == 'ubuntu-latest'
timeout-minutes: 60
run: >
sudo bash -c "ulimit -Sl 512
&& ulimit -Hl 512
&& PATH=$PATH:/usr/share/rust/.cargo/bin
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
- name: Clear the cargo caches
run: |
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
cargo install cargo-cache --version 0.8.2 --no-default-features --features ci-autoclean
cargo-cache
ci_feature_powerset_check:
name: Verify Feature Combinations
runs-on: ubuntu-latest
env:
CI: 1
CARGO_INCREMENTAL: 0
steps:
- uses: actions/checkout@v2
@ -105,18 +99,15 @@ jobs:
profile: minimal
override: true
- name: Install cargo-hack
uses: taiki-e/install-action@cargo-hack
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0
- name: Install cargo-hack
uses: actions-rs/cargo@v1
with:
command: install
args: cargo-hack
- name: check feature combinations
uses: actions-rs/cargo@v1
with: { command: ci-check-all-feature-powerset }
@ -125,29 +116,35 @@ jobs:
uses: actions-rs/cargo@v1
with: { command: ci-check-all-feature-powerset-linux }
coverage:
name: coverage
nextest:
name: nextest
runs-on: ubuntu-latest
env:
CI: 1
CARGO_INCREMENTAL: 0
steps:
- uses: actions/checkout@v2
- name: Install stable
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable-x86_64-unknown-linux-gnu
toolchain: stable
profile: minimal
override: true
- name: Install nextest
uses: taiki-e/install-action@nextest
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0
uses: Swatinem/rust-cache@v1.3.0
- name: Generate coverage file
run: |
cargo install cargo-tarpaulin --vers "^0.13"
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
- name: Upload to Codecov
uses: codecov/codecov-action@v1
with: { file: cobertura.xml }
- name: Test with cargo-nextest
uses: actions-rs/cargo@v1
with:
command: nextest
args: run

View File

@ -16,7 +16,7 @@ jobs:
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
version:
- 1.54.0 # MSRV
- 1.59.0 # MSRV
- stable
name: ${{ matrix.target.name }} / ${{ matrix.version }}
@ -47,18 +47,22 @@ jobs:
profile: minimal
override: true
- name: Install cargo-hack
uses: taiki-e/install-action@cargo-hack
- name: workaround MSRV issues
if: matrix.version != 'stable'
run: |
cargo install cargo-edit --version=0.8.0
cargo add const-str@0.3 --dev -p=actix-web
cargo add const-str@0.3 --dev -p=awc
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0
- name: Install cargo-hack
uses: actions-rs/cargo@v1
with:
command: install
args: cargo-hack
- name: check minimal
uses: actions-rs/cargo@v1
with: { command: ci-check-min }
@ -81,19 +85,37 @@ jobs:
cargo test --lib --tests -p=actix-multipart --all-features
cargo test --lib --tests -p=actix-web-actors --all-features
- name: Clear the cargo caches
run: |
cargo install cargo-cache --version 0.8.2 --no-default-features --features ci-autoclean
cargo-cache
io-uring:
name: io-uring tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.3.0
- name: tests (io-uring)
if: matrix.target.os == 'ubuntu-latest'
timeout-minutes: 60
run: >
sudo bash -c "ulimit -Sl 512
&& ulimit -Hl 512
&& PATH=$PATH:/usr/share/rust/.cargo/bin
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
- name: Clear the cargo caches
run: |
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
cargo-cache
&& RUSTUP_TOOLCHAIN=stable cargo test --lib --tests -p=actix-files --all-features"
rustdoc:
name: doc tests

36
.github/workflows/coverage.yml vendored Normal file
View File

@ -0,0 +1,36 @@
# disabled because `cargo tarpaulin` currently segfaults
name: Coverage
on:
push:
branches: [master]
jobs:
# job currently (1st Feb 2022) segfaults
coverage:
name: coverage
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.2.0
- name: Generate coverage file
run: |
cargo install cargo-tarpaulin --vers "^0.13"
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
- name: Upload to Codecov
uses: codecov/codecov-action@v1
with: { file: cobertura.xml }

View File

@ -28,8 +28,7 @@ jobs:
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
- name: Deploy to GitHub Pages
uses: JamesIves/github-pages-deploy-action@3.7.1
uses: JamesIves/github-pages-deploy-action@v4.4.0
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BRANCH: gh-pages
FOLDER: target/doc
folder: target/doc
single-commit: true

3
.prettierrc.json Normal file
View File

@ -0,0 +1,3 @@
{
"proseWrap": "never"
}

1033
CHANGES.md

File diff suppressed because it is too large Load Diff

View File

@ -1,36 +1,6 @@
[package]
name = "actix-web"
version = "4.0.0-rc.1"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
keywords = ["actix", "http", "web", "framework", "async"]
categories = [
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket"
]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web.git"
license = "MIT OR Apache-2.0"
edition = "2018"
[package.metadata.docs.rs]
# features that docs.rs will build with
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
rustdoc-args = ["--cfg", "docsrs"]
[lib]
name = "actix_web"
path = "src/lib.rs"
[workspace]
resolver = "2"
members = [
".",
"actix-files",
"actix-http-test",
"actix-http",
@ -39,93 +9,10 @@ members = [
"actix-test",
"actix-web-actors",
"actix-web-codegen",
"actix-web",
"awc",
]
[features]
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
# Brotli algorithm content-encoding support
compress-brotli = ["actix-http/compress-brotli", "__compress"]
# Gzip and deflate algorithms content-encoding support
compress-gzip = ["actix-http/compress-gzip", "__compress"]
# Zstd algorithm content-encoding support
compress-zstd = ["actix-http/compress-zstd", "__compress"]
# support for cookies
cookies = ["cookie"]
# secure cookies feature
secure-cookies = ["cookie/secure"]
# openssl
openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
# rustls
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
# Internal (PRIVATE!) features used to aid testing and checking feature status.
# Don't rely on these whatsoever. They may disappear at anytime.
__compress = []
# io-uring feature only avaiable for Linux OSes.
experimental-io-uring = ["actix-server/io-uring"]
[dependencies]
actix-codec = "0.4.1"
actix-macros = "0.2.3"
actix-rt = "2.6"
actix-server = "2"
actix-service = "2.0.0"
actix-utils = "3.0.0"
actix-tls = { version = "3.0.0", default-features = false, optional = true }
actix-http = { version = "3.0.0-rc.1", features = ["http2", "ws"] }
actix-router = "0.5.0-rc.3"
actix-web-codegen = "0.5.0-rc.2"
ahash = "0.7"
bytes = "1"
cfg-if = "1"
cookie = { version = "0.16", features = ["percent-encode"], optional = true }
derive_more = "0.99.5"
encoding_rs = "0.8"
futures-core = { version = "0.3.7", default-features = false }
futures-util = { version = "0.3.7", default-features = false }
itoa = "1"
language-tags = "0.3"
once_cell = "1.5"
log = "0.4"
mime = "0.3"
pin-project-lite = "0.2.7"
regex = "1.4"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_urlencoded = "0.7"
smallvec = "1.6.1"
socket2 = "0.4.0"
time = { version = "0.3", default-features = false, features = ["formatting"] }
url = "2.1"
[dev-dependencies]
actix-files = "0.6.0-beta.16"
actix-test = { version = "0.1.0-beta.12", features = ["openssl", "rustls"] }
awc = { version = "3.0.0-beta.20", features = ["openssl"] }
brotli = "3.3.3"
const-str = "0.3"
criterion = { version = "0.3", features = ["html_reports"] }
env_logger = "0.9"
flate2 = "1.0.13"
futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
rand = "0.8"
rcgen = "0.8"
rustls-pemfile = "0.2"
static_assertions = "1"
tls-openssl = { package = "openssl", version = "0.10.9" }
tls-rustls = { package = "rustls", version = "0.20.0" }
zstd = "0.9"
[profile.dev]
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
debug = 0
@ -142,7 +29,7 @@ actix-http-test = { path = "actix-http-test" }
actix-multipart = { path = "actix-multipart" }
actix-router = { path = "actix-router" }
actix-test = { path = "actix-test" }
actix-web = { path = "." }
actix-web = { path = "actix-web" }
actix-web-actors = { path = "actix-web-actors" }
actix-web-codegen = { path = "actix-web-codegen" }
awc = { path = "awc" }
@ -155,35 +42,3 @@ awc = { path = "awc" }
# actix-utils = { path = "../actix-net/actix-utils" }
# actix-tls = { path = "../actix-net/actix-tls" }
# actix-server = { path = "../actix-net/actix-server" }
[[test]]
name = "test_server"
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
[[test]]
name = "compression"
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]
[[example]]
name = "basic"
required-features = ["compress-gzip"]
[[example]]
name = "uds"
required-features = ["compress-gzip"]
[[example]]
name = "on-connect"
required-features = []
[[bench]]
name = "server"
harness = false
[[bench]]
name = "service"
harness = false
[[bench]]
name = "responder"
harness = false

View File

@ -1,677 +0,0 @@
## Unreleased
- The default `NormalizePath` behavior now strips trailing slashes by default. This was
previously documented to be the case in v3 but the behavior now matches. The effect is that
routes defined with trailing slashes will become inaccessible when
using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
It is advised that the `new` method be used instead.
Before: `#[get("/test/")]`
After: `#[get("/test")]`
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
- The `type Config` of `FromRequest` was removed.
- Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
By default all compression algorithms are enabled.
To select algorithm you want to include with `middleware::Compress` use following flags:
- `compress-brotli`
- `compress-gzip`
- `compress-zstd`
If you have set in your `Cargo.toml` dedicated `actix-web` features and you still want
to have compression enabled. Please change features selection like bellow:
Before: `"compress"`
After: `"compress-brotli", "compress-gzip", "compress-zstd"`
## 3.0.0
- The return type for `ServiceRequest::app_data::<T>()` was changed from returning a `Data<T>` to
simply a `T`. To access a `Data<T>` use `ServiceRequest::app_data::<Data<T>>()`.
- Cookie handling has been offloaded to the `cookie` crate:
* `USERINFO_ENCODE_SET` is no longer exposed. Percent-encoding is still supported; check docs.
* Some types now require lifetime parameters.
- The time crate was updated to `v0.2`, a major breaking change to the time crate, which affects
any `actix-web` method previously expecting a time v0.1 input.
- Setting a cookie's SameSite property, explicitly, to `SameSite::None` will now
result in `SameSite=None` being sent with the response Set-Cookie header.
To create a cookie without a SameSite attribute, remove any calls setting same_site.
- actix-http support for Actors messages was moved to actix-http crate and is enabled
with feature `actors`
- content_length function is removed from actix-http.
You can set Content-Length by normally setting the response body or calling no_chunking function.
- `BodySize::Sized64` variant has been removed. `BodySize::Sized` now receives a
`u64` instead of a `usize`.
- Code that was using `path.<index>` to access a `web::Path<(A, B, C)>`s elements now needs to use
destructuring or `.into_inner()`. For example:
```rust
// Previously:
async fn some_route(path: web::Path<(String, String)>) -> String {
format!("Hello, {} {}", path.0, path.1)
}
// Now (this also worked before):
async fn some_route(path: web::Path<(String, String)>) -> String {
let (first_name, last_name) = path.into_inner();
format!("Hello, {} {}", first_name, last_name)
}
// Or (this wasn't previously supported):
async fn some_route(web::Path((first_name, last_name)): web::Path<(String, String)>) -> String {
format!("Hello, {} {}", first_name, last_name)
}
```
- `middleware::NormalizePath` can now also be configured to trim trailing slashes instead of always keeping one.
It will need `middleware::normalize::TrailingSlash` when being constructed with `NormalizePath::new(...)`,
or for an easier migration you can replace `wrap(middleware::NormalizePath)` with `wrap(middleware::NormalizePath::new(TrailingSlash::MergeOnly))`.
- `HttpServer::maxconn` is renamed to the more expressive `HttpServer::max_connections`.
- `HttpServer::maxconnrate` is renamed to the more expressive `HttpServer::max_connection_rate`.
## 2.0.0
- `HttpServer::start()` renamed to `HttpServer::run()`. It also possible to
`.await` on `run` method result, in that case it awaits server exit.
- `App::register_data()` renamed to `App::app_data()` and accepts any type `T: 'static`.
Stored data is available via `HttpRequest::app_data()` method at runtime.
- Extractor configuration must be registered with `App::app_data()` instead of `App::data()`
- Sync handlers has been removed. `.to_async()` method has been renamed to `.to()`
replace `fn` with `async fn` to convert sync handler to async
- `actix_http_test::TestServer` moved to `actix_web::test` module. To start
test server use `test::start()` or `test_start_with_config()` methods
- `ResponseError` trait has been reafctored. `ResponseError::error_response()` renders
http response.
- Feature `rust-tls` renamed to `rustls`
instead of
```rust
actix-web = { version = "2.0.0", features = ["rust-tls"] }
```
use
```rust
actix-web = { version = "2.0.0", features = ["rustls"] }
```
- Feature `ssl` renamed to `openssl`
instead of
```rust
actix-web = { version = "2.0.0", features = ["ssl"] }
```
use
```rust
actix-web = { version = "2.0.0", features = ["openssl"] }
```
- `Cors` builder now requires that you call `.finish()` to construct the middleware
## 1.0.1
- Cors middleware has been moved to `actix-cors` crate
instead of
```rust
use actix_web::middleware::cors::Cors;
```
use
```rust
use actix_cors::Cors;
```
- Identity middleware has been moved to `actix-identity` crate
instead of
```rust
use actix_web::middleware::identity::{Identity, CookieIdentityPolicy, IdentityService};
```
use
```rust
use actix_identity::{Identity, CookieIdentityPolicy, IdentityService};
```
## 1.0.0
- Extractor configuration. In version 1.0 this is handled with the new `Data` mechanism for both setting and retrieving the configuration
instead of
```rust
#[derive(Default)]
struct ExtractorConfig {
config: String,
}
impl FromRequest for YourExtractor {
type Config = ExtractorConfig;
type Result = Result<YourExtractor, Error>;
fn from_request(req: &HttpRequest, cfg: &Self::Config) -> Self::Result {
println!("use the config: {:?}", cfg.config);
...
}
}
App::new().resource("/route_with_config", |r| {
r.post().with_config(handler_fn, |cfg| {
cfg.0.config = "test".to_string();
})
})
```
use the HttpRequest to get the configuration like any other `Data` with `req.app_data::<C>()` and set it with the `data()` method on the `resource`
```rust
#[derive(Default)]
struct ExtractorConfig {
config: String,
}
impl FromRequest for YourExtractor {
type Error = Error;
type Future = Result<Self, Self::Error>;
type Config = ExtractorConfig;
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
let cfg = req.app_data::<ExtractorConfig>();
println!("config data?: {:?}", cfg.unwrap().role);
...
}
}
App::new().service(
resource("/route_with_config")
.data(ExtractorConfig {
config: "test".to_string(),
})
.route(post().to(handler_fn)),
)
```
- Resource registration. 1.0 version uses generalized resource
registration via `.service()` method.
instead of
```rust
App.new().resource("/welcome", |r| r.f(welcome))
```
use App's or Scope's `.service()` method. `.service()` method accepts
object that implements `HttpServiceFactory` trait. By default
actix-web provides `Resource` and `Scope` services.
```rust
App.new().service(
web::resource("/welcome")
.route(web::get().to(welcome))
.route(web::post().to(post_handler))
```
- Scope registration.
instead of
```rust
let app = App::new().scope("/{project_id}", |scope| {
scope
.resource("/path1", |r| r.f(|_| HttpResponse::Ok()))
.resource("/path2", |r| r.f(|_| HttpResponse::Ok()))
.resource("/path3", |r| r.f(|_| HttpResponse::MethodNotAllowed()))
});
```
use `.service()` for registration and `web::scope()` as scope object factory.
```rust
let app = App::new().service(
web::scope("/{project_id}")
.service(web::resource("/path1").to(|| HttpResponse::Ok()))
.service(web::resource("/path2").to(|| HttpResponse::Ok()))
.service(web::resource("/path3").to(|| HttpResponse::MethodNotAllowed()))
);
```
- `.with()`, `.with_async()` registration methods have been renamed to `.to()` and `.to_async()`.
instead of
```rust
App.new().resource("/welcome", |r| r.with(welcome))
```
use `.to()` or `.to_async()` methods
```rust
App.new().service(web::resource("/welcome").to(welcome))
```
- Passing arguments to handler with extractors, multiple arguments are allowed
instead of
```rust
fn welcome((body, req): (Bytes, HttpRequest)) -> ... {
...
}
```
use multiple arguments
```rust
fn welcome(body: Bytes, req: HttpRequest) -> ... {
...
}
```
- `.f()`, `.a()` and `.h()` handler registration methods have been removed.
Use `.to()` for handlers and `.to_async()` for async handlers. Handler function
must use extractors.
instead of
```rust
App.new().resource("/welcome", |r| r.f(welcome))
```
use App's `to()` or `to_async()` methods
```rust
App.new().service(web::resource("/welcome").to(welcome))
```
- `HttpRequest` does not provide access to request's payload stream.
instead of
```rust
fn index(req: &HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {
req
.payload()
.from_err()
.fold((), |_, chunk| {
...
})
.map(|_| HttpResponse::Ok().finish())
.responder()
}
```
use `Payload` extractor
```rust
fn index(stream: web::Payload) -> impl Future<Item=HttpResponse, Error=Error> {
stream
.from_err()
.fold((), |_, chunk| {
...
})
.map(|_| HttpResponse::Ok().finish())
}
```
- `State` is now `Data`. You register Data during the App initialization process
and then access it from handlers either using a Data extractor or using
HttpRequest's api.
instead of
```rust
App.with_state(T)
```
use App's `data` method
```rust
App.new()
.data(T)
```
and either use the Data extractor within your handler
```rust
use actix_web::web::Data;
fn endpoint_handler(Data<T>)){
...
}
```
.. or access your Data element from the HttpRequest
```rust
fn endpoint_handler(req: HttpRequest) {
let data: Option<Data<T>> = req.app_data::<T>();
}
```
- AsyncResponder is removed, use `.to_async()` registration method and `impl Future<>` as result type.
instead of
```rust
use actix_web::AsyncResponder;
fn endpoint_handler(...) -> impl Future<Item=HttpResponse, Error=Error>{
...
.responder()
}
```
.. simply omit AsyncResponder and the corresponding responder() finish method
- Middleware
instead of
```rust
let app = App::new()
.middleware(middleware::Logger::default())
```
use `.wrap()` method
```rust
let app = App::new()
.wrap(middleware::Logger::default())
.route("/index.html", web::get().to(index));
```
- `HttpRequest::body()`, `HttpRequest::urlencoded()`, `HttpRequest::json()`, `HttpRequest::multipart()`
method have been removed. Use `Bytes`, `String`, `Form`, `Json`, `Multipart` extractors instead.
instead of
```rust
fn index(req: &HttpRequest) -> Responder {
req.body()
.and_then(|body| {
...
})
}
```
use
```rust
fn index(body: Bytes) -> Responder {
...
}
```
- `actix_web::server` module has been removed. To start http server use `actix_web::HttpServer` type
- StaticFiles and NamedFile have been moved to a separate crate.
instead of `use actix_web::fs::StaticFile`
use `use actix_files::Files`
instead of `use actix_web::fs::Namedfile`
use `use actix_files::NamedFile`
- Multipart has been moved to a separate crate.
instead of `use actix_web::multipart::Multipart`
use `use actix_multipart::Multipart`
- Response compression is not enabled by default.
To enable, use `Compress` middleware, `App::new().wrap(Compress::default())`.
- Session middleware moved to actix-session crate
- Actors support have been moved to `actix-web-actors` crate
- Custom Error
Instead of error_response method alone, ResponseError now provides two methods: error_response and render_response respectively. Where, error_response creates the error response and render_response returns the error response to the caller.
Simplest migration from 0.7 to 1.0 shall include below method to the custom implementation of ResponseError:
```rust
fn render_response(&self) -> HttpResponse {
self.error_response()
}
```
## 0.7.15
- The `' '` character is not percent decoded anymore before matching routes. If you need to use it in
your routes, you should use `%20`.
instead of
```rust
fn main() {
let app = App::new().resource("/my index", |r| {
r.method(http::Method::GET)
.with(index);
});
}
```
use
```rust
fn main() {
let app = App::new().resource("/my%20index", |r| {
r.method(http::Method::GET)
.with(index);
});
}
```
- If you used `AsyncResult::async` you need to replace it with `AsyncResult::future`
## 0.7.4
- `Route::with_config()`/`Route::with_async_config()` always passes configuration objects as tuple
even for handler with one parameter.
## 0.7
- `HttpRequest` does not implement `Stream` anymore. If you need to read request payload
use `HttpMessage::payload()` method.
instead of
```rust
fn index(req: HttpRequest) -> impl Responder {
req
.from_err()
.fold(...)
....
}
```
use `.payload()`
```rust
fn index(req: HttpRequest) -> impl Responder {
req
.payload() // <- get request payload stream
.from_err()
.fold(...)
....
}
```
- [Middleware](https://actix.rs/actix-web/actix_web/middleware/trait.Middleware.html)
trait uses `&HttpRequest` instead of `&mut HttpRequest`.
- Removed `Route::with2()` and `Route::with3()` use tuple of extractors instead.
instead of
```rust
fn index(query: Query<..>, info: Json<MyStruct) -> impl Responder {}
```
use tuple of extractors and use `.with()` for registration:
```rust
fn index((query, json): (Query<..>, Json<MyStruct)) -> impl Responder {}
```
- `Handler::handle()` uses `&self` instead of `&mut self`
- `Handler::handle()` accepts reference to `HttpRequest<_>` instead of value
- Removed deprecated `HttpServer::threads()`, use
[HttpServer::workers()](https://actix.rs/actix-web/actix_web/server/struct.HttpServer.html#method.workers) instead.
- Renamed `client::ClientConnectorError::Connector` to
`client::ClientConnectorError::Resolver`
- `Route::with()` does not return `ExtractorConfig`, to configure
extractor use `Route::with_config()`
instead of
```rust
fn main() {
let app = App::new().resource("/index.html", |r| {
r.method(http::Method::GET)
.with(index)
.limit(4096); // <- limit size of the payload
});
}
```
use
```rust
fn main() {
let app = App::new().resource("/index.html", |r| {
r.method(http::Method::GET)
.with_config(index, |cfg| { // <- register handler
cfg.limit(4096); // <- limit size of the payload
})
});
}
```
- `Route::with_async()` does not return `ExtractorConfig`, to configure
extractor use `Route::with_async_config()`
## 0.6
- `Path<T>` extractor return `ErrorNotFound` on failure instead of `ErrorBadRequest`
- `ws::Message::Close` now includes optional close reason.
`ws::CloseCode::Status` and `ws::CloseCode::Empty` have been removed.
- `HttpServer::threads()` renamed to `HttpServer::workers()`.
- `HttpServer::start_ssl()` and `HttpServer::start_tls()` deprecated.
Use `HttpServer::bind_ssl()` and `HttpServer::bind_tls()` instead.
- `HttpRequest::extensions()` returns read only reference to the request's Extension
`HttpRequest::extensions_mut()` returns mutable reference.
- Instead of
`use actix_web::middleware::{
CookieSessionBackend, CookieSessionError, RequestSession,
Session, SessionBackend, SessionImpl, SessionStorage};`
use `actix_web::middleware::session`
`use actix_web::middleware::session{CookieSessionBackend, CookieSessionError,
RequestSession, Session, SessionBackend, SessionImpl, SessionStorage};`
- `FromRequest::from_request()` accepts mutable reference to a request
- `FromRequest::Result` has to implement `Into<Reply<Self>>`
- [`Responder::respond_to()`](
https://actix.rs/actix-web/actix_web/trait.Responder.html#tymethod.respond_to)
is generic over `S`
- Use `Query` extractor instead of HttpRequest::query()`.
```rust
fn index(q: Query<HashMap<String, String>>) -> Result<..> {
...
}
```
or
```rust
let q = Query::<HashMap<String, String>>::extract(req);
```
- Websocket operations are implemented as `WsWriter` trait.
you need to use `use actix_web::ws::WsWriter`
## 0.5
- `HttpResponseBuilder::body()`, `.finish()`, `.json()`
methods return `HttpResponse` instead of `Result<HttpResponse>`
- `actix_web::Method`, `actix_web::StatusCode`, `actix_web::Version`
moved to `actix_web::http` module
- `actix_web::header` moved to `actix_web::http::header`
- `NormalizePath` moved to `actix_web::http` module
- `HttpServer` moved to `actix_web::server`, added new `actix_web::server::new()` function,
shortcut for `actix_web::server::HttpServer::new()`
- `DefaultHeaders` middleware does not use separate builder, all builder methods moved to type itself
- `StaticFiles::new()`'s show_index parameter removed, use `show_files_listing()` method instead.
- `CookieSessionBackendBuilder` removed, all methods moved to `CookieSessionBackend` type
- `actix_web::httpcodes` module is deprecated, `HttpResponse::Ok()`, `HttpResponse::Found()` and other `HttpResponse::XXX()`
functions should be used instead
- `ClientRequestBuilder::body()` returns `Result<_, actix_web::Error>`
instead of `Result<_, http::Error>`
- `Application` renamed to a `App`
- `actix_web::Reply`, `actix_web::Resource` moved to `actix_web::dev`

105
README.md
View File

@ -1,105 +0,0 @@
<div align="center">
<h1>Actix Web</h1>
<p>
<strong>Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust</strong>
</p>
<p>
[![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web)
[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-rc.1)](https://docs.rs/actix-web/4.0.0-rc.1)
![MSRV](https://img.shields.io/badge/rustc-1.54+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg)
[![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-rc.1/status.svg)](https://deps.rs/crate/actix-web/4.0.0-rc.1)
<br />
[![CI](https://github.com/actix/actix-web/actions/workflows/ci.yml/badge.svg)](https://github.com/actix/actix-web/actions/workflows/ci.yml)
[![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web)
![downloads](https://img.shields.io/crates/d/actix-web.svg)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
</p>
</div>
## Features
- Supports _HTTP/1.x_ and _HTTP/2_
- Streaming and pipelining
- Powerful [request routing](https://actix.rs/docs/url-dispatch/) with optional macros
- Full [Tokio](https://tokio.rs) compatibility
- Keep-alive and slow requests handling
- Client/server [WebSockets](https://actix.rs/docs/websockets/) support
- Transparent content compression/decompression (br, gzip, deflate, zstd)
- Multipart streams
- Static assets
- SSL support using OpenSSL or Rustls
- Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
- Includes an async [HTTP client](https://docs.rs/awc/)
- Runs on stable Rust 1.54+
## Documentation
- [Website & User Guide](https://actix.rs)
- [Examples Repository](https://github.com/actix/examples)
- [API Documentation](https://docs.rs/actix-web)
- [API Documentation (master branch)](https://actix.rs/actix-web/actix_web)
## Example
Dependencies:
```toml
[dependencies]
actix-web = "4.0.0-rc.1"
```
Code:
```rust
use actix_web::{get, web, App, HttpServer, Responder};
#[get("/{id}/{name}/index.html")]
async fn index(params: web::Path<(u32, String)>) -> impl Responder {
let (id, name) = params.into_inner();
format!("Hello {}! id:{}", name, id)
}
#[actix_web::main] // or #[tokio::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(|| App::new().service(index))
.bind(("127.0.0.1", 8080))?
.run()
.await
}
```
### More examples
- [Basic Setup](https://github.com/actix/examples/tree/master/basics/basics/)
- [Application State](https://github.com/actix/examples/tree/master/basics/state/)
- [JSON Handling](https://github.com/actix/examples/tree/master/json/json/)
- [Multipart Streams](https://github.com/actix/examples/tree/master/forms/multipart/)
- [Diesel Integration](https://github.com/actix/examples/tree/master/database_interactions/diesel/)
- [r2d2 Integration](https://github.com/actix/examples/tree/master/database_interactions/r2d2/)
- [Simple WebSocket](https://github.com/actix/examples/tree/master/websockets/websocket/)
- [Tera Templates](https://github.com/actix/examples/tree/master/template_engines/tera/)
- [Askama Templates](https://github.com/actix/examples/tree/master/template_engines/askama/)
- [HTTPS using Rustls](https://github.com/actix/examples/tree/master/security/rustls/)
- [HTTPS using OpenSSL](https://github.com/actix/examples/tree/master/security/openssl/)
- [WebSocket Chat](https://github.com/actix/examples/tree/master/websockets/chat/)
You may consider checking out [this directory](https://github.com/actix/examples/tree/master/) for more examples.
## Benchmarks
One of the fastest web frameworks available according to the [TechEmpower Framework Benchmark](https://www.techempower.com/benchmarks/#section=data-r20&test=composite).
## License
This project is licensed under either of the following licenses, at your option:
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or [http://www.apache.org/licenses/LICENSE-2.0])
- MIT license ([LICENSE-MIT](LICENSE-MIT) or [http://opensource.org/licenses/MIT])
## Code of Conduct
Contribution to the actix-web repo is organized under the terms of the Contributor Covenant.
The Actix team promises to intervene to uphold that code of conduct.

1
README.md Symbolic link
View File

@ -0,0 +1 @@
actix-web/README.md

View File

@ -1,6 +1,28 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
## 0.6.2 - 2022-07-23
- Allow partial range responses for video content to start streaming sooner. [#2817]
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
[#2817]: https://github.com/actix/actix-web/pull/2817
## 0.6.1 - 2022-06-11
- Add `NamedFile::{modified, metadata, content_type, content_disposition, encoding}()` getters. [#2021]
- Update `tokio-uring` dependency to `0.3`.
- Audio files now use `Content-Disposition: inline` instead of `attachment`. [#2645]
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
[#2021]: https://github.com/actix/actix-web/pull/2021
[#2645]: https://github.com/actix/actix-web/pull/2645
## 0.6.0 - 2022-02-25
- No significant changes since `0.6.0-beta.16`.
## 0.6.0-beta.16 - 2022-01-31

View File

@ -1,6 +1,6 @@
[package]
name = "actix-files"
version = "0.6.0-beta.16"
version = "0.6.2"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"fakeshadow <24548779@qq.com>",
@ -22,12 +22,11 @@ path = "src/lib.rs"
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
[dependencies]
actix-http = "3.0.0-rc.1"
actix-http = "3"
actix-service = "2"
actix-utils = "3"
actix-web = { version = "4.0.0-rc.1", default-features = false }
actix-web = { version = "4", default-features = false }
askama_escape = "0.10"
bitflags = "1"
bytes = "1"
derive_more = "0.99.5"
@ -38,11 +37,15 @@ mime = "0.3"
mime_guess = "2.0.1"
percent-encoding = "2.1"
pin-project-lite = "0.2.7"
v_htmlescape= "0.15"
tokio-uring = { version = "0.2", optional = true, features = ["bytes"] }
# experimental-io-uring
[target.'cfg(target_os = "linux")'.dependencies]
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
actix-server = { version = "2.1", optional = true } # ensure matching tokio-uring versions
[dev-dependencies]
actix-rt = "2.2"
actix-test = "0.1.0-beta.12"
actix-web = "4.0.0-rc.1"
actix-rt = "2.7"
actix-test = "0.1"
actix-web = "4"
tempfile = "3.2"

View File

@ -3,16 +3,16 @@
> Static file serving for Actix Web
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.16)](https://docs.rs/actix-files/0.6.0-beta.16)
[![Version](https://img.shields.io/badge/rustc-1.54+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.2)](https://docs.rs/actix-files/0.6.2)
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-files.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.16/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.16)
[![dependency status](https://deps.rs/crate/actix-files/0.6.2/status.svg)](https://deps.rs/crate/actix-files/0.6.2)
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources
- [API Documentation](https://docs.rs/actix-files/)
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
- [API Documentation](https://docs.rs/actix-files)
- [Example Project](https://github.com/actix/examples/tree/master/basics/static-files)
- Minimum Supported Rust Version (MSRV): 1.54

View File

@ -81,7 +81,7 @@ async fn chunked_read_file_callback(
) -> Result<(File, Bytes), Error> {
use io::{Read as _, Seek as _};
let res = actix_web::rt::task::spawn_blocking(move || {
let res = actix_web::web::block(move || {
let mut buf = Vec::with_capacity(max_bytes);
file.seek(io::SeekFrom::Start(offset))?;
@ -94,8 +94,7 @@ async fn chunked_read_file_callback(
Ok((file, Bytes::from(buf)))
}
})
.await
.map_err(|_| actix_web::error::BlockingError)??;
.await??;
Ok(res)
}

View File

@ -1,8 +1,8 @@
use std::{fmt::Write, fs::DirEntry, io, path::Path, path::PathBuf};
use actix_web::{dev::ServiceResponse, HttpRequest, HttpResponse};
use askama_escape::{escape as escape_html_entity, Html};
use percent_encoding::{utf8_percent_encode, CONTROLS};
use v_htmlescape::escape as escape_html_entity;
/// A directory; responds with the generated directory listing.
#[derive(Debug)]
@ -59,7 +59,7 @@ macro_rules! encode_file_url {
/// ```
macro_rules! encode_file_name {
($entry:ident) => {
escape_html_entity(&$entry.file_name().to_string_lossy(), Html)
escape_html_entity(&$entry.file_name().to_string_lossy())
};
}
@ -75,7 +75,7 @@ pub(crate) fn directory_listing(
if dir.is_visible(&entry) {
let entry = entry.unwrap();
let p = match entry.path().strip_prefix(&dir.path) {
Ok(p) if cfg!(windows) => base.join(p).to_string_lossy().replace("\\", "/"),
Ok(p) if cfg!(windows) => base.join(p).to_string_lossy().replace('\\', "/"),
Ok(p) => base.join(p).to_string_lossy().into_owned(),
Err(_) => continue,
};

View File

@ -2,7 +2,7 @@ use actix_web::{http::StatusCode, ResponseError};
use derive_more::Display;
/// Errors which can occur when serving static files.
#[derive(Display, Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq, Display)]
pub enum FilesError {
/// Path is not a directory
#[allow(dead_code)]
@ -22,7 +22,7 @@ impl ResponseError for FilesError {
}
#[allow(clippy::enum_variant_names)]
#[derive(Display, Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq, Display)]
#[non_exhaustive]
pub enum UriSegmentError {
/// The segment started with the wrapped invalid character.

View File

@ -364,20 +364,43 @@ mod tests {
);
}
#[allow(deprecated)]
#[actix_rt::test]
async fn test_named_file_status_code_text() {
let mut file = NamedFile::open_async("Cargo.toml")
async fn status_code_customize_same_output() {
let file1 = NamedFile::open_async("Cargo.toml")
.await
.unwrap()
.set_status_code(StatusCode::NOT_FOUND);
let file2 = NamedFile::open_async("Cargo.toml")
.await
.unwrap()
.customize()
.with_status(StatusCode::NOT_FOUND);
let req = TestRequest::default().to_http_request();
let res1 = file1.respond_to(&req);
let res2 = file2.respond_to(&req);
assert_eq!(res1.status(), StatusCode::NOT_FOUND);
assert_eq!(res2.status(), StatusCode::NOT_FOUND);
}
#[actix_rt::test]
async fn test_named_file_status_code_text() {
let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
{
file.file();
let _f: &File = &file;
}
{
let _f: &mut File = &mut file;
}
let file = file.customize().with_status(StatusCode::NOT_FOUND);
let req = TestRequest::default().to_http_request();
let resp = file.respond_to(&req);
assert_eq!(

View File

@ -23,6 +23,7 @@ use actix_web::{
use bitflags::bitflags;
use derive_more::{Deref, DerefMut};
use futures_core::future::LocalBoxFuture;
use mime::Mime;
use mime_guess::from_path;
use crate::{encoding::equiv_utf8_text, range::HttpRange};
@ -76,8 +77,8 @@ pub struct NamedFile {
pub(crate) md: Metadata,
pub(crate) flags: Flags,
pub(crate) status_code: StatusCode,
pub(crate) content_type: mime::Mime,
pub(crate) content_disposition: header::ContentDisposition,
pub(crate) content_type: Mime,
pub(crate) content_disposition: ContentDisposition,
pub(crate) encoding: Option<ContentEncoding>,
}
@ -96,18 +97,18 @@ impl NamedFile {
///
/// # Examples
/// ```ignore
/// use std::{
/// io::{self, Write as _},
/// env,
/// fs::File
/// };
/// use actix_files::NamedFile;
/// use std::io::{self, Write};
/// use std::env;
/// use std::fs::File;
///
/// fn main() -> io::Result<()> {
/// let mut file = File::create("foo.txt")?;
/// file.write_all(b"Hello, world!")?;
/// let named_file = NamedFile::from_file(file, "bar.txt")?;
/// # std::fs::remove_file("foo.txt");
/// Ok(())
/// }
/// ```
pub fn from_file<P: AsRef<Path>>(file: File, path: P) -> io::Result<NamedFile> {
let path = path.as_ref().to_path_buf();
@ -128,7 +129,7 @@ impl NamedFile {
let ct = from_path(&path).first_or_octet_stream();
let disposition = match ct.type_() {
mime::IMAGE | mime::TEXT | mime::VIDEO => DispositionType::Inline,
mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline,
mime::APPLICATION => match ct.subtype() {
mime::JAVASCRIPT | mime::JSON => DispositionType::Inline,
name if name == "wasm" => DispositionType::Inline,
@ -209,11 +210,10 @@ impl NamedFile {
Self::from_file(file, path)
}
#[allow(rustdoc::broken_intra_doc_links)]
/// Attempts to open a file asynchronously in read-only mode.
///
/// When the `experimental-io-uring` crate feature is enabled, this will be async.
/// Otherwise, it will be just like [`open`][Self::open].
/// When the `experimental-io-uring` crate feature is enabled, this will be async. Otherwise, it
/// will behave just like `open`.
///
/// # Examples
/// ```
@ -238,13 +238,13 @@ impl NamedFile {
Self::from_file(file, path)
}
/// Returns reference to the underlying `File` object.
/// Returns reference to the underlying file object.
#[inline]
pub fn file(&self) -> &File {
&self.file
}
/// Retrieve the path of this file.
/// Returns the filesystem path to this file.
///
/// # Examples
/// ```
@ -262,16 +262,53 @@ impl NamedFile {
self.path.as_path()
}
/// Set response **Status Code**
/// Returns the time the file was last modified.
///
/// Returns `None` only on unsupported platforms; see [`std::fs::Metadata::modified()`].
/// Therefore, it is usually safe to unwrap this.
#[inline]
pub fn modified(&self) -> Option<SystemTime> {
self.modified
}
/// Returns the filesystem metadata associated with this file.
#[inline]
pub fn metadata(&self) -> &Metadata {
&self.md
}
/// Returns the `Content-Type` header that will be used when serving this file.
#[inline]
pub fn content_type(&self) -> &Mime {
&self.content_type
}
/// Returns the `Content-Disposition` that will be used when serving this file.
#[inline]
pub fn content_disposition(&self) -> &ContentDisposition {
&self.content_disposition
}
/// Returns the `Content-Encoding` that will be used when serving this file.
///
/// A return value of `None` indicates that the content is not already using a compressed
/// representation and may be subject to compression downstream.
#[inline]
pub fn content_encoding(&self) -> Option<ContentEncoding> {
self.encoding
}
/// Set response status code.
#[deprecated(since = "0.7.0", note = "Prefer `Responder::customize()`.")]
pub fn set_status_code(mut self, status: StatusCode) -> Self {
self.status_code = status;
self
}
/// Set the MIME Content-Type for serving this file. By default the Content-Type is inferred
/// from the filename extension.
/// Sets the `Content-Type` header that will be used when serving this file. By default the
/// `Content-Type` is inferred from the filename extension.
#[inline]
pub fn set_content_type(mut self, mime_type: mime::Mime) -> Self {
pub fn set_content_type(mut self, mime_type: Mime) -> Self {
self.content_type = mime_type;
self
}
@ -284,15 +321,15 @@ impl NamedFile {
/// filename is taken from the path provided in the `open` method after converting it to UTF-8
/// (using `to_string_lossy`).
#[inline]
pub fn set_content_disposition(mut self, cd: header::ContentDisposition) -> Self {
pub fn set_content_disposition(mut self, cd: ContentDisposition) -> Self {
self.content_disposition = cd;
self.flags.insert(Flags::CONTENT_DISPOSITION);
self
}
/// Disable `Content-Disposition` header.
/// Disables `Content-Disposition` header.
///
/// By default Content-Disposition` header is enabled.
/// By default, the `Content-Disposition` header is sent.
#[inline]
pub fn disable_content_disposition(mut self) -> Self {
self.flags.remove(Flags::CONTENT_DISPOSITION);
@ -491,11 +528,26 @@ impl NamedFile {
length = ranges[0].length;
offset = ranges[0].start;
// When a Content-Encoding header is present in a 206 partial content response
// for video content, it prevents browser video players from starting playback
// before loading the whole video and also prevents seeking.
//
// See: https://github.com/actix/actix-web/issues/2815
//
// The assumption of this fix is that the video player knows to not send an
// Accept-Encoding header for this request and that downstream middleware will
// not attempt compression for requests without it.
//
// TODO: Solve question around what to do if self.encoding is set and partial
// range is requested. Reject request? Ignoring self.encoding seems wrong, too.
// In practice, it should not come up.
if req.headers().contains_key(&header::ACCEPT_ENCODING) {
// don't allow compression middleware to modify partial content
res.insert_header((
header::CONTENT_ENCODING,
HeaderValue::from_static("identity"),
));
}
res.insert_header((
header::CONTENT_RANGE,

View File

@ -1,11 +1,11 @@
use actix_files::Files;
use actix_files::{Files, NamedFile};
use actix_web::{
http::{
header::{self, HeaderValue},
StatusCode,
},
test::{self, TestRequest},
App,
web, App,
};
#[actix_web::test]
@ -36,3 +36,31 @@ async fn test_utf8_file_contents() {
Some(&HeaderValue::from_static("text/plain")),
);
}
#[actix_web::test]
async fn partial_range_response_encoding() {
let srv = test::init_service(App::new().default_service(web::to(|| async {
NamedFile::open_async("./tests/test.binary").await.unwrap()
})))
.await;
// range request without accept-encoding returns no content-encoding header
let req = TestRequest::with_uri("/")
.append_header((header::RANGE, "bytes=10-20"))
.to_request();
let res = test::call_service(&srv, req).await;
assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT);
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
// range request with accept-encoding returns a content-encoding header
let req = TestRequest::with_uri("/")
.append_header((header::RANGE, "bytes=10-20"))
.append_header((header::ACCEPT_ENCODING, "identity"))
.to_request();
let res = test::call_service(&srv, req).await;
assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT);
assert_eq!(
res.headers().get(header::CONTENT_ENCODING).unwrap(),
"identity"
);
}

View File

@ -1,6 +1,27 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
- Minimum supported Rust version (MSRV) is now 1.59.
## 3.0.0 - 2022-07-24
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
- Added `TestServer::client_headers` method. [#2097]
- Update `actix-server` dependency to `2`.
- Update `actix-tls` dependency to `3`.
- Update `bytes` to `1.0`. [#1813]
- Minimum supported Rust version (MSRV) is now 1.57.
[#2442]: https://github.com/actix/actix-web/pull/2442
[#2097]: https://github.com/actix/actix-web/pull/2097
[#1813]: https://github.com/actix/actix-web/pull/1813
<details>
<summary>3.0.0 Pre-Releases</summary>
## 3.0.0-beta.13 - 2022-02-16
- No significant changes since `3.0.0-beta.12`.
## 3.0.0-beta.12 - 2022-01-31
@ -64,6 +85,7 @@
[#1813]: https://github.com/actix/actix-web/pull/1813
</details>
## 2.1.0 - 2020-11-25
- Add ability to set address for `TestServer`. [#1645]

View File

@ -1,6 +1,6 @@
[package]
name = "actix-http-test"
version = "3.0.0-beta.12"
version = "3.0.0"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Various helpers for Actix applications to use during testing"
keywords = ["http", "web", "framework", "async", "futures"]
@ -29,13 +29,13 @@ default = []
openssl = ["tls-openssl", "awc/openssl"]
[dependencies]
actix-service = "2.0.0"
actix-codec = "0.4.1"
actix-tls = "3.0.0"
actix-utils = "3.0.0"
actix-service = "2"
actix-codec = "0.5"
actix-tls = "3"
actix-utils = "3"
actix-rt = "2.2"
actix-server = "2"
awc = { version = "3.0.0-beta.20", default-features = false }
awc = { version = "3", default-features = false }
base64 = "0.13"
bytes = "1"
@ -51,5 +51,5 @@ tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
tokio = { version = "1.8.4", features = ["sync"] }
[dev-dependencies]
actix-web = { version = "4.0.0-rc.1", default-features = false, features = ["cookies"] }
actix-http = "3.0.0-rc.1"
actix-web = { version = "4", default-features = false, features = ["cookies"] }
actix-http = "3"

View File

@ -3,11 +3,11 @@
> Various helpers for Actix applications to use during testing.
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.12)](https://docs.rs/actix-http-test/3.0.0-beta.12)
[![Version](https://img.shields.io/badge/rustc-1.54+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0)](https://docs.rs/actix-http-test/3.0.0)
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
<br>
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.12/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.12)
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0)
[![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@ -1,6 +1,380 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
## 3.2.2 - 2022-09-11
### Changed
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
### Fixed
- Avoid possibility of dispatcher getting stuck while back-pressuring I/O. [#2369]
[#2369]: https://github.com/actix/actix-web/pull/2369
## 3.2.1 - 2022-07-02
### Fixed
- Fix parsing ambiguity in Transfer-Encoding and Content-Length headers for HTTP/1.0 requests. [#2794]
[#2794]: https://github.com/actix/actix-web/pull/2794
## 3.2.0 - 2022-06-30
### Changed
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
### Fixed
- Websocket parser no longer throws endless overflow errors after receiving an oversized frame. [#2790]
- Retain previously set Vary headers when using compression encoder. [#2798]
[#2790]: https://github.com/actix/actix-web/pull/2790
[#2798]: https://github.com/actix/actix-web/pull/2798
## 3.1.0 - 2022-06-11
### Changed
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
### Fixed
- Revert broken fix in [#2624] that caused erroneous 500 error responses. Temporarily re-introduces [#2357] bug. [#2779]
[#2624]: https://github.com/actix/actix-web/pull/2624
[#2357]: https://github.com/actix/actix-web/issues/2357
[#2779]: https://github.com/actix/actix-web/pull/2779
## 3.0.4 - 2022-03-09
### Fixed
- Document on docs.rs with `ws` feature enabled.
## 3.0.3 - 2022-03-08
### Fixed
- Allow spaces between header name and colon when parsing responses. [#2684]
[#2684]: https://github.com/actix/actix-web/pull/2684
## 3.0.2 - 2022-03-05
### Fixed
- Fix encoding camel-case header names with more than one hyphen. [#2683]
[#2683]: https://github.com/actix/actix-web/pull/2683
## 3.0.1 - 2022-03-04
- Fix panic in H1 dispatcher when pipelining is used with keep-alive. [#2678]
[#2678]: https://github.com/actix/actix-web/issues/2678
## 3.0.0 - 2022-02-25
### Dependencies
- Updated `actix-*` to Tokio v1-based versions. [#1813]
- Updated `bytes` to `1.0`. [#1813]
- Updated `h2` to `0.3`. [#1813]
- Updated `rustls` to `0.20.0`. [#2414]
- Updated `language-tags` to `0.3`.
- Updated `tokio` to `1`.
### Added
- Crate Features:
- `ws`; disabled by default. [#2618]
- `http2`; disabled by default. [#2618]
- `compress-brotli`; disabled by default. [#2618]
- `compress-gzip`; disabled by default. [#2618]
- `compress-zstd`; disabled by default. [#2618]
- Functions:
- `body::to_bytes` for async collecting message body into Bytes. [#2158]
- Traits:
- `TryIntoHeaderPair`; allows using typed and untyped headers in the same methods. [#1869]
- Types:
- `body::BoxBody`; a boxed message body with boxed errors. [#2183]
- `body::EitherBody` enum. [#2468]
- `body::None` struct. [#2468]
- Re-export `http` crate's `Error` type as `error::HttpError`. [#2171]
- Variants:
- `ContentEncoding::Zstd` along with . [#2244]
- `Protocol::Http3` for future compatibility and also mark `#[non_exhaustive]`. [00ba8d55]
- Methods:
- `ContentEncoding::to_header_value()`. [#2501]
- `header::QualityItem::{max, min}()`. [#2486]
- `header::QualityItem::zero()` that uses `Quality::ZERO`. [#2501]
- `HeaderMap::drain()` as an efficient draining iterator. [#1964]
- `HeaderMap::len_keys()` has the behavior of the old `len` method. [#1964]
- `MessageBody::boxed` trait method for wrapping boxing types efficiently. [#2520]
- `MessageBody::try_into_bytes` trait method, with default implementation, for optimizations on body types that complete in exactly one poll. [#2522]
- `Request::conn_data()`. [#2491]
- `Request::take_conn_data()`. [#2491]
- `Request::take_req_data()`. [#2487]
- `Response::{ok, bad_request, not_found, internal_server_error}()`. [#2159]
- `Response::into_body()` that consumes response and returns body type. [#2201]
- `Response::map_into_boxed_body()`. [#2468]
- `ResponseBuilder::append_header()` method which allows using typed and untyped headers. [#1869]
- `ResponseBuilder::insert_header()` method which allows using typed and untyped headers. [#1869]
- `ResponseHead::set_camel_case_headers()`. [#2587]
- `TestRequest::insert_header()` method which allows using typed and untyped headers. [#1869]
- Implementations:
- Implement `Clone for ws::HandshakeError`. [#2468]
- Implement `Clone` for `body::AnyBody<S> where S: Clone`. [#2448]
- Implement `Clone` for `RequestHead`. [#2487]
- Implement `Clone` for `ResponseHead`. [#2585]
- Implement `Copy` for `QualityItem<T> where T: Copy`. [#2501]
- Implement `Default` for `ContentEncoding`. [#1912]
- Implement `Default` for `HttpServiceBuilder`. [#2611]
- Implement `Default` for `KeepAlive`. [#2611]
- Implement `Default` for `Response`. [#2201]
- Implement `Default` for `ws::Codec`. [#1920]
- Implement `Display` for `header::Quality`. [#2486]
- Implement `Eq` for `header::ContentEncoding`. [#2501]
- Implement `ExactSizeIterator` and `FusedIterator` for all `HeaderMap` iterators. [#2470]
- Implement `From<Duration>` for `KeepAlive`. [#2611]
- Implement `From<Option<Duration>>` for `KeepAlive`. [#2611]
- Implement `From<Vec<u8>>` for `Response<Vec<u8>>`. [#2625]
- Implement `FromStr` for `ContentEncoding`. [#1912]
- Implement `Header` for `ContentEncoding`. [#1912]
- Implement `IntoHeaderValue` for `ContentEncoding`. [#1912]
- Implement `IntoIterator` for `HeaderMap`. [#1964]
- Implement `MessageBody` for `bytestring::ByteString`. [#2468]
- Implement `MessageBody` for `Pin<Box<T>> where T: MessageBody`. [#2152]
- Misc:
- Re-export `StatusCode`, `Method`, `Version` and `Uri` at the crate root. [#2171]
- Re-export `ContentEncoding` and `ConnectionType` at the crate root. [#2171]
- `Quality::ZERO` associated constant equivalent to `q=0`. [#2501]
- `header::Quality::{MAX, MIN}` associated constants equivalent to `q=1` and `q=0.001`, respectively. [#2486]
- Timeout for canceling HTTP/2 server side connection handshake. Configurable with `ServiceConfig::client_timeout`; defaults to 5 seconds. [#2483]
- `#[must_use]` for `ws::Codec` to prevent subtle bugs. [#1920]
### Changed
- Traits:
- Rename `IntoHeaderValue => TryIntoHeaderValue`. [#2510]
- `MessageBody` now has an associated `Error` type. [#2183]
- Types:
- `Protocol` enum is now marked `#[non_exhaustive]`.
- `error::DispatcherError` enum is now marked `#[non_exhaustive]`. [#2624]
- `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
- Error enums are marked `#[non_exhaustive]`. [#2161]
- Rename `PayloadStream` to `BoxedPayloadStream`. [#2545]
- The body type parameter of `Response` no longer has a default. [#2152]
- Enum Variants:
- Rename `ContentEncoding::{Br => Brotli}`. [#2501]
- `Payload` inner fields are now named. [#2545]
- `ws::Message::Text` now contains a `bytestring::ByteString`. [#1864]
- Methods:
- Rename `ServiceConfig::{client_timer_expire => client_request_deadline}`. [#2611]
- Rename `ServiceConfig::{client_disconnect_timer => client_disconnect_deadline}`. [#2611]
- Rename `h1::Codec::{keepalive => keep_alive}`. [#2611]
- Rename `h1::Codec::{keepalive_enabled => keep_alive_enabled}`. [#2611]
- Rename `h1::ClientCodec::{keepalive => keep_alive}`. [#2611]
- Rename `h1::ClientPayloadCodec::{keepalive => keep_alive}`. [#2611]
- Rename `header::EntityTag::{weak => new_weak, strong => new_strong}`. [#2565]
- Rename `TryIntoHeaderValue::{try_into => try_into_value}` to avoid ambiguity with std `TryInto` trait. [#1894]
- Deadline methods in `ServiceConfig` now return `std::time::Instant`s instead of Tokio's wrapper type. [#2611]
- Places in `Response` where `ResponseBody<B>` was received or returned now simply use `B`. [#2201]
- `encoding::Encoder::response` now returns `AnyBody<Encoder<B>>`. [#2448]
- `Extensions::insert` returns replaced item. [#1904]
- `HeaderMap::get_all` now returns a `std::slice::Iter`. [#2527]
- `HeaderMap::insert` now returns iterator of removed values. [#1964]
- `HeaderMap::len` now returns number of values instead of number of keys. [#1964]
- `HeaderMap::remove` now returns iterator of removed values. [#1964]
- `ResponseBuilder::body(B)` now returns `Response<EitherBody<B>>`. [#2468]
- `ResponseBuilder::content_type` now takes an `impl TryIntoHeaderValue` to support using typed `mime` types. [#1894]
- `ResponseBuilder::finish()` now returns `Response<EitherBody<()>>`. [#2468]
- `ResponseBuilder::json` now takes `impl Serialize`. [#2052]
- `ResponseBuilder::message_body` now returns a `Result`. [#2201]∑
- `ServiceConfig::keep_alive` now returns a `KeepAlive`. [#2611]
- `ws::hash_key` now returns array. [#2035]
- Trait Implementations:
- Implementation of `Stream` for `Payload` no longer requires the `Stream` variant be `Unpin`. [#2545]
- Implementation of `Future` for `h1::SendResponse` no longer requires the body type be `Unpin`. [#2545]
- Implementation of `Stream` for `encoding::Decoder` no longer requires the stream type be `Unpin`. [#2545]
- Implementation of `From` for error types now return a `Response<BoxBody>`. [#2468]
- Misc:
- `header` module is now public. [#2171]
- `uri` module is now public. [#2171]
- Request-local data container is no longer part of a `RequestHead`. Instead it is a distinct part of a `Request`. [#2487]
- All error trait bounds in server service builders have changed from `Into<Error>` to `Into<Response<BoxBody>>`. [#2253]
- All error trait bounds in message body and stream impls changed from `Into<Error>` to `Into<Box<dyn std::error::Error>>`. [#2253]
- Guarantee ordering of `header::GetAll` iterator to be same as insertion order. [#2467]
- Connection data set through the `on_connect_ext` callbacks is now accessible only from the new `Request::conn_data()` method. [#2491]
- Brotli (de)compression support is now provided by the `brotli` crate. [#2538]
- Minimum supported Rust version (MSRV) is now 1.54.
### Fixed
- A `Vary` header is now correctly sent along with compressed content. [#2501]
- HTTP/1.1 dispatcher correctly uses client request timeout. [#2611]
- Fixed issue where handlers that took payload but then dropped without reading it to EOF it would cause keep-alive connections to become stuck. [#2624]
- `ContentEncoding`'s `Identity` variant can now be parsed from a string. [#2501]
- `HttpServer::{listen_rustls(), bind_rustls()}` now honor the ALPN protocols in the configuration parameter. [#2226]
- Remove unnecessary `Into<Error>` bound on `Encoder` body types. [#2375]
- Remove unnecessary `Unpin` bound on `ResponseBuilder::streaming`. [#2253]
- `BodyStream` and `SizedStream` are no longer restricted to `Unpin` types. [#2152]
- Fixed slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
- Fixed quality parse error in Accept-Encoding header. [#2344]
### Removed
- Crate Features:
- `compress` feature. [#2065]
- `cookies` feature. [#2065]
- `trust-dns` feature. [#2425]
- `actors` optional feature and trait implementation for `actix` types. [#1969]
- Functions:
- `header::qitem` helper. Replaced with `header::QualityItem::max`. [#2486]
- Types:
- `body::Body`; replaced with `EitherBody` and `BoxBody`. [#2468]
- `body::ResponseBody`. [#2446]
- `ConnectError::SslHandshakeError` and re-export of `HandshakeError`. Due to the removal of this type from `tokio-openssl` crate. OpenSSL handshake error now returns `ConnectError::SslError`. [#1813]
- `error::Canceled` re-export. [#1994]
- `error::Result` type alias. [#2201]
- `error::BlockingError` [#2660]
- `InternalError` and all the error types it constructed were moved up to `actix-web`. [#2215]
- Typed HTTP headers; they have moved up to `actix-web`. [2094]
- Re-export of `http` crate's `HeaderMap` types in addition to ours. [#2171]
- Enum Variants:
- `body::BodySize::Empty`; an empty body can now only be represented as a `Sized(0)` variant. [#2446]
- `ContentEncoding::Auto`. [#2501]
- `EncoderError::Boxed`. [#2446]
- Methods:
- `ContentEncoding::is_compression()`. [#2501]
- `h1::Payload::readany()`. [#2545]
- `HttpMessage::cookie[s]()` trait methods. [#2065]
- `HttpServiceBuilder::new()`; use `default` instead. [#2611]
- `on_connect` (previously deprecated) methods have been removed; use `on_connect_ext`. [#1857]
- `Response::build_from()`. [#2159]
- `Response::error()` [#2205]
- `Response::take_body()` and old `Response::into_body()` method that casted body type. [#2201]
- `Response`'s status code builders. [#2159]
- `ResponseBuilder::{if_true, if_some}()` (previously deprecated). [#2148]
- `ResponseBuilder::{set, set_header}()`; use `ResponseBuilder::insert_header()`. [#1869]
- `ResponseBuilder::extensions[_mut]()`. [#2585]
- `ResponseBuilder::header()`; use `ResponseBuilder::append_header()`. [#1869]
- `ResponseBuilder::json()`. [#2148]
- `ResponseBuilder::json2()`. [#1903]
- `ResponseBuilder::streaming()`. [#2468]
- `ResponseHead::extensions[_mut]()`. [#2585]
- `ServiceConfig::{client_timer, keep_alive_timer}()`. [#2611]
- `TestRequest::with_hdr()`; use `TestRequest::default().insert_header()`. [#1869]
- `TestRequest::with_header()`; use `TestRequest::default().insert_header()`. [#1869]
- Trait implementations:
- Implementation of `Copy` for `ws::Codec`. [#1920]
- Implementation of `From<Option<usize>> for KeepAlive`; use `Duration`s instead. [#2611]
- Implementation of `From<serde_json::Value>` for `Body`. [#2148]
- Implementation of `From<usize> for KeepAlive`; use `Duration`s instead. [#2611]
- Implementation of `Future` for `Response`. [#2201]
- Implementation of `Future` for `ResponseBuilder`. [#2468]
- Implementation of `Into<Error>` for `Response<Body>`. [#2215]
- Implementation of `Into<Error>` for `ResponseBuilder`. [#2215]
- Implementation of `ResponseError` for `actix_utils::timeout::TimeoutError`. [#2127]
- Implementation of `ResponseError` for `CookieParseError`. [#2065]
- Implementation of `TryFrom<u16>` for `header::Quality`. [#2486]
- Misc:
- `http` module; most everything it contained is exported at the crate root. [#2488]
- `cookies` module (re-export). [#2065]
- `client` module. Connector types now live in `awc`. [#2425]
- `error` field from `Response`. [#2205]
- `downcast` and `downcast_get_type_id` macros. [#2291]
- Down-casting for `MessageBody` types; use standard `Any` trait. [#2183]
[#1813]: https://github.com/actix/actix-web/pull/1813
[#1845]: https://github.com/actix/actix-web/pull/1845
[#1857]: https://github.com/actix/actix-web/pull/1857
[#1864]: https://github.com/actix/actix-web/pull/1864
[#1869]: https://github.com/actix/actix-web/pull/1869
[#1878]: https://github.com/actix/actix-web/pull/1878
[#1894]: https://github.com/actix/actix-web/pull/1894
[#1903]: https://github.com/actix/actix-web/pull/1903
[#1904]: https://github.com/actix/actix-web/pull/1904
[#1912]: https://github.com/actix/actix-web/pull/1912
[#1920]: https://github.com/actix/actix-web/pull/1920
[#1964]: https://github.com/actix/actix-web/pull/1964
[#1969]: https://github.com/actix/actix-web/pull/1969
[#1981]: https://github.com/actix/actix-web/pull/1981
[#1994]: https://github.com/actix/actix-web/pull/1994
[#2035]: https://github.com/actix/actix-web/pull/2035
[#2052]: https://github.com/actix/actix-web/pull/2052
[#2065]: https://github.com/actix/actix-web/pull/2065
[#2094]: https://github.com/actix/actix-web/pull/2094
[#2127]: https://github.com/actix/actix-web/pull/2127
[#2148]: https://github.com/actix/actix-web/pull/2148
[#2152]: https://github.com/actix/actix-web/pull/2152
[#2158]: https://github.com/actix/actix-web/pull/2158
[#2159]: https://github.com/actix/actix-web/pull/2159
[#2161]: https://github.com/actix/actix-web/pull/2161
[#2171]: https://github.com/actix/actix-web/pull/2171
[#2183]: https://github.com/actix/actix-web/pull/2183
[#2196]: https://github.com/actix/actix-web/pull/2196
[#2201]: https://github.com/actix/actix-web/pull/2201
[#2205]: https://github.com/actix/actix-web/pull/2205
[#2215]: https://github.com/actix/actix-web/pull/2215
[#2244]: https://github.com/actix/actix-web/pull/2244
[#2250]: https://github.com/actix/actix-web/pull/2250
[#2253]: https://github.com/actix/actix-web/pull/2253
[#2291]: https://github.com/actix/actix-web/pull/2291
[#2344]: https://github.com/actix/actix-web/pull/2344
[#2364]: https://github.com/actix/actix-web/pull/2364
[#2375]: https://github.com/actix/actix-web/pull/2375
[#2377]: https://github.com/actix/actix-web/pull/2377
[#2414]: https://github.com/actix/actix-web/pull/2414
[#2425]: https://github.com/actix/actix-web/pull/2425
[#2442]: https://github.com/actix/actix-web/pull/2442
[#2446]: https://github.com/actix/actix-web/pull/2446
[#2448]: https://github.com/actix/actix-web/pull/2448
[#2456]: https://github.com/actix/actix-web/pull/2456
[#2467]: https://github.com/actix/actix-web/pull/2467
[#2468]: https://github.com/actix/actix-web/pull/2468
[#2470]: https://github.com/actix/actix-web/pull/2470
[#2474]: https://github.com/actix/actix-web/pull/2474
[#2483]: https://github.com/actix/actix-web/pull/2483
[#2486]: https://github.com/actix/actix-web/pull/2486
[#2487]: https://github.com/actix/actix-web/pull/2487
[#2488]: https://github.com/actix/actix-web/pull/2488
[#2491]: https://github.com/actix/actix-web/pull/2491
[#2497]: https://github.com/actix/actix-web/pull/2497
[#2501]: https://github.com/actix/actix-web/pull/2501
[#2510]: https://github.com/actix/actix-web/pull/2510
[#2520]: https://github.com/actix/actix-web/pull/2520
[#2522]: https://github.com/actix/actix-web/pull/2522
[#2527]: https://github.com/actix/actix-web/pull/2527
[#2538]: https://github.com/actix/actix-web/pull/2538
[#2545]: https://github.com/actix/actix-web/pull/2545
[#2565]: https://github.com/actix/actix-web/pull/2565
[#2585]: https://github.com/actix/actix-web/pull/2585
[#2587]: https://github.com/actix/actix-web/pull/2587
[#2611]: https://github.com/actix/actix-web/pull/2611
[#2618]: https://github.com/actix/actix-web/pull/2618
[#2624]: https://github.com/actix/actix-web/pull/2624
[#2625]: https://github.com/actix/actix-web/pull/2625
[#2660]: https://github.com/actix/actix-web/pull/2660
[00ba8d55]: https://github.com/actix/actix-web/commit/00ba8d55492284581695d824648590715a8bd386
<details>
<summary>3.0.0 Pre-Releases</summary>
## 3.0.0-rc.4 - 2022-02-22
### Fixed
- Fix h1 dispatcher panic. [1ce58ecb]
[1ce58ecb]: https://github.com/actix/actix-web/commit/1ce58ecb305c60e51db06e6c913b7a1344e229ca
## 3.0.0-rc.3 - 2022-02-16
- No significant changes since `3.0.0-rc.2`.
## 3.0.0-rc.2 - 2022-02-08
### Added
- Implement `From<Vec<u8>>` for `Response<Vec<u8>>`. [#2625]
### Changed
- `error::DispatcherError` enum is now marked `#[non_exhaustive]`. [#2624]
### Fixed
- Issue where handlers that took payload but then dropped without reading it to EOF it would cause keep-alive connections to become stuck. [#2624]
[#2624]: https://github.com/actix/actix-web/pull/2624
[#2625]: https://github.com/actix/actix-web/pull/2625
## 3.0.0-rc.1 - 2022-01-31
@ -80,7 +454,7 @@
## 3.0.0-beta.17 - 2021-12-27
### Changes
### Changed
- `HeaderMap::get_all` now returns a `std::slice::Iter`. [#2527]
- `Payload` inner fields are now named. [#2545]
- `impl Stream` for `Payload` no longer requires the `Stream` variant be `Unpin`. [#2545]
@ -303,7 +677,7 @@
- `Response::{ok, bad_request, not_found, internal_server_error}`. [#2159]
- Helper `body::to_bytes` for async collecting message body into Bytes. [#2158]
### Changes
### Changed
- The type parameter of `Response` no longer has a default. [#2152]
- The `Message` variant of `body::Body` is now `Pin<Box<dyn MessageBody>>`. [#2152]
- `BodyStream` and `SizedStream` are no longer restricted to Unpin types. [#2152]
@ -447,6 +821,8 @@
[#1864]: https://github.com/actix/actix-web/pull/1864
[#1878]: https://github.com/actix/actix-web/pull/1878
</details>
## 2.2.2 - 2022-01-21
### Changed

View File

@ -1,6 +1,6 @@
[package]
name = "actix-http"
version = "3.0.0-rc.1"
version = "3.2.2"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
@ -20,7 +20,7 @@ edition = "2018"
[package.metadata.docs.rs]
# features that docs.rs will build with
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
features = ["http2", "ws", "openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
[lib]
name = "actix_http"
@ -37,7 +37,7 @@ ws = [
"local-channel",
"base64",
"rand",
"sha-1",
"sha1",
]
# TLS via OpenSSL
@ -57,7 +57,7 @@ __compress = []
[dependencies]
actix-service = "2"
actix-codec = "0.4.1"
actix-codec = "0.5"
actix-utils = "3"
actix-rt = { version = "2.2", default-features = false }
@ -73,11 +73,11 @@ httparse = "1.5.1"
httpdate = "1.0.1"
itoa = "1"
language-tags = "0.3"
log = "0.4"
mime = "0.3"
percent-encoding = "2.1"
pin-project-lite = "0.2"
smallvec = "1.6.1"
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
# http2
h2 = { version = "0.3.9", optional = true }
@ -86,21 +86,21 @@ h2 = { version = "0.3.9", optional = true }
local-channel = { version = "0.1", optional = true }
base64 = { version = "0.13", optional = true }
rand = { version = "0.8", optional = true }
sha-1 = { version = "0.10", optional = true }
sha1 = { version = "0.10", optional = true }
# openssl/rustls
actix-tls = { version = "3.0.0", default-features = false, optional = true }
actix-tls = { version = "3", default-features = false, optional = true }
# compress-*
brotli = { version = "3.3.3", optional = true }
flate2 = { version = "1.0.13", optional = true }
zstd = { version = "0.9", optional = true }
zstd = { version = "0.11", optional = true }
[dev-dependencies]
actix-http-test = { version = "3.0.0-beta.12", features = ["openssl"] }
actix-http-test = { version = "3", features = ["openssl"] }
actix-server = "2"
actix-tls = { version = "3.0.0", features = ["openssl"] }
actix-web = "4.0.0-rc.1"
actix-tls = { version = "3", features = ["openssl"] }
actix-web = "4"
async-stream = "0.3"
criterion = { version = "0.3", features = ["html_reports"] }
@ -108,9 +108,10 @@ env_logger = "0.9"
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
memchr = "2.4"
once_cell = "1.9"
rcgen = "0.8"
rcgen = "0.9"
regex = "1.3"
rustls-pemfile = "0.2"
rustversion = "1"
rustls-pemfile = "1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
static_assertions = "1"
@ -120,7 +121,7 @@ tokio = { version = "1.8.4", features = ["net", "rt", "macros"] }
[[example]]
name = "ws"
required-features = ["rustls"]
required-features = ["ws", "rustls"]
[[bench]]
name = "write-camel-case"

View File

@ -3,11 +3,11 @@
> HTTP primitives for the Actix ecosystem.
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-rc.1)](https://docs.rs/actix-http/3.0.0-rc.1)
[![Version](https://img.shields.io/badge/rustc-1.54+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.2.2)](https://docs.rs/actix-http/3.2.2)
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-http/3.0.0-rc.1/status.svg)](https://deps.rs/crate/actix-http/3.0.0-rc.1)
[![dependency status](https://deps.rs/crate/actix-http/3.2.2/status.svg)](https://deps.rs/crate/actix-http/3.2.2)
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
@ -25,7 +25,7 @@ use actix_http::{HttpService, Response};
use actix_server::Server;
use futures_util::future;
use http::header::HeaderValue;
use log::info;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {

View File

@ -114,11 +114,12 @@ mod _original {
use std::mem::MaybeUninit;
pub fn parse_headers(src: &mut BytesMut) -> usize {
#![allow(clippy::uninit_assumed_init)]
#![allow(invalid_value, clippy::uninit_assumed_init)]
let mut headers: [HeaderIndex; MAX_HEADERS] =
unsafe { MaybeUninit::uninit().assume_init() };
#[allow(invalid_value)]
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
unsafe { MaybeUninit::uninit().assume_init() };

View File

@ -18,7 +18,8 @@ async fn main() -> std::io::Result<()> {
HttpService::build()
// pass the app to service builder
// map_config is used to map App's configuration to ServiceBuilder
.finish(map_config(app, |_| AppConfig::default()))
// h1 will configure server to only use HTTP/1.1
.h1(map_config(app, |_| AppConfig::default()))
.tcp()
})?
.run()

View File

@ -5,6 +5,7 @@ use actix_server::Server;
use bytes::BytesMut;
use futures_util::StreamExt as _;
use http::header::HeaderValue;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {
@ -22,7 +23,7 @@ async fn main() -> io::Result<()> {
body.extend_from_slice(&item?);
}
log::info!("request body: {:?}", body);
info!("request body: {:?}", body);
let res = Response::build(StatusCode::OK)
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))

View File

@ -1,9 +1,8 @@
use std::{convert::Infallible, io, time::Duration};
use actix_http::{
header::HeaderValue, HttpMessage, HttpService, Request, Response, StatusCode,
};
use actix_http::{header::HeaderValue, HttpService, Request, Response, StatusCode};
use actix_server::Server;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {
@ -18,12 +17,12 @@ async fn main() -> io::Result<()> {
ext.insert(42u32);
})
.finish(|req: Request| async move {
log::info!("{:?}", req);
info!("{:?}", req);
let mut res = Response::build(StatusCode::OK);
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
let forty_two = req.extensions().get::<u32>().unwrap().to_string();
let forty_two = req.conn_data::<u32>().unwrap().to_string();
res.insert_header((
"x-forty-two",
HeaderValue::from_str(&forty_two).unwrap(),

View File

@ -12,6 +12,7 @@ use actix_http::{body::BodyStream, HttpService, Response};
use actix_server::Server;
use async_stream::stream;
use bytes::Bytes;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {
@ -21,7 +22,7 @@ async fn main() -> io::Result<()> {
.bind("streaming-error", ("127.0.0.1", 8080), || {
HttpService::build()
.finish(|req| async move {
log::info!("{:?}", req);
info!("{:?}", req);
let res = Response::ok();
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {

View File

@ -17,6 +17,7 @@ use actix_server::Server;
use bytes::{Bytes, BytesMut};
use bytestring::ByteString;
use futures_core::{ready, Stream};
use tracing::{info, trace};
#[actix_rt::main]
async fn main() -> io::Result<()> {
@ -34,13 +35,13 @@ async fn main() -> io::Result<()> {
}
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
log::info!("handshaking");
info!("handshaking");
let mut res = ws::handshake(req.head())?;
// handshake will always fail under HTTP/2
log::info!("responding");
Ok(res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))?)
info!("responding");
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
}
struct Heartbeat {
@ -61,7 +62,7 @@ impl Stream for Heartbeat {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
log::trace!("poll");
trace!("poll");
ready!(self.as_mut().interval.poll_tick(cx));

View File

@ -80,7 +80,7 @@ mod tests {
use futures_core::ready;
use futures_util::{stream, FutureExt as _};
use pin_project_lite::pin_project;
use static_assertions::{assert_impl_all, assert_not_impl_all};
use static_assertions::{assert_impl_all, assert_not_impl_any};
use super::*;
use crate::body::to_bytes;
@ -91,10 +91,10 @@ mod tests {
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
assert_not_impl_all!(BodyStream<stream::Empty<Bytes>>: MessageBody);
assert_not_impl_all!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
assert_not_impl_any!(BodyStream<stream::Empty<Bytes>>: MessageBody);
assert_not_impl_any!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
// crate::Error is not Clone
assert_not_impl_all!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
assert_not_impl_any!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
#[actix_rt::test]
async fn skips_empty_chunks() {

View File

@ -105,14 +105,13 @@ impl MessageBody for BoxBody {
#[cfg(test)]
mod tests {
use static_assertions::{assert_impl_all, assert_not_impl_all};
use static_assertions::{assert_impl_all, assert_not_impl_any};
use super::*;
use crate::body::to_bytes;
assert_impl_all!(BoxBody: MessageBody, fmt::Debug, Unpin);
assert_not_impl_all!(BoxBody: Send, Sync, Unpin);
assert_impl_all!(BoxBody: fmt::Debug, MessageBody, Unpin);
assert_not_impl_any!(BoxBody: Send, Sync);
#[actix_rt::test]
async fn nested_boxed_body() {

View File

@ -10,6 +10,17 @@ use super::{BodySize, BoxBody, MessageBody};
use crate::Error;
pin_project! {
/// An "either" type specialized for body types.
///
/// It is common, in middleware especially, to conditionally return an inner service's unknown/
/// generic body `B` type or return early with a new response. This type's "right" variant
/// defaults to `BoxBody` since error responses are the common case.
///
/// For example, middleware will often have `type Response = ServiceResponse<EitherBody<B>>`.
/// This means that the inner service's response body type maps to the `Left` variant and the
/// middleware's own error responses use the default `Right` variant of `BoxBody`. Of course,
/// there's no reason it couldn't use `EitherBody<B, String>` instead if its alternative
/// responses have a known type.
#[project = EitherBodyProj]
#[derive(Debug, Clone)]
pub enum EitherBody<L, R = BoxBody> {
@ -22,7 +33,10 @@ pin_project! {
}
impl<L> EitherBody<L, BoxBody> {
/// Creates new `EitherBody` using left variant and boxed right variant.
/// Creates new `EitherBody` left variant with a boxed right variant.
///
/// If the expected `R` type will be inferred and is not `BoxBody` then use the
/// [`left`](Self::left) constructor instead.
#[inline]
pub fn new(body: L) -> Self {
Self::Left { body }

View File

@ -19,7 +19,7 @@ use super::{BodySize, BoxBody};
/// It is not usually necessary to create custom body types, this trait is already [implemented for
/// a large number of sensible body types](#foreign-impls) including:
/// - Empty body: `()`
/// - Text-based: `String`, `&'static str`, `ByteString`.
/// - Text-based: `String`, `&'static str`, [`ByteString`](https://docs.rs/bytestring/1).
/// - Byte-based: `Bytes`, `BytesMut`, `Vec<u8>`, `&'static [u8]`;
/// - Streams: [`BodyStream`](super::BodyStream), [`SizedStream`](super::SizedStream)
///
@ -481,6 +481,7 @@ mod tests {
assert_poll_next_none!(pl);
}
#[allow(clippy::let_unit_value)]
#[actix_rt::test]
async fn test_unit() {
let pl = ();

View File

@ -76,7 +76,7 @@ mod tests {
use actix_rt::pin;
use actix_utils::future::poll_fn;
use futures_util::stream;
use static_assertions::{assert_impl_all, assert_not_impl_all};
use static_assertions::{assert_impl_all, assert_not_impl_any};
use super::*;
use crate::body::to_bytes;
@ -87,10 +87,10 @@ mod tests {
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
assert_not_impl_all!(SizedStream<stream::Empty<Bytes>>: MessageBody);
assert_not_impl_all!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
assert_not_impl_any!(SizedStream<stream::Empty<Bytes>>: MessageBody);
assert_not_impl_any!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
// crate::Error is not Clone
assert_not_impl_all!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
assert_not_impl_any!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
#[actix_rt::test]
async fn skips_empty_chunks() {

View File

@ -211,6 +211,7 @@ where
/// Finish service configuration and create a HTTP service for HTTP/2 protocol.
#[cfg(feature = "http2")]
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
pub fn h2<F, B>(self, service: F) -> crate::h2::H2Service<T, S, B>
where
F: IntoServiceFactory<S, Request>,

View File

@ -35,7 +35,7 @@ impl Default for ServiceConfig {
}
impl ServiceConfig {
/// Create instance of `ServiceConfig`
/// Create instance of `ServiceConfig`.
pub fn new(
keep_alive: KeepAlive,
client_request_timeout: Duration,
@ -104,8 +104,13 @@ impl ServiceConfig {
self.0.date_service.now()
}
pub(crate) fn write_date_header(&self, dst: &mut BytesMut, camel_case: bool) {
let mut buf: [u8; 39] = [0; 39];
/// Writes date header to `dst` buffer.
///
/// Low-level method that utilizes the built-in efficient date service, requiring fewer syscalls
/// than normal. Note that a CRLF (`\r\n`) is included in what is written.
#[doc(hidden)]
pub fn write_date_header(&self, dst: &mut BytesMut, camel_case: bool) {
let mut buf: [u8; 37] = [0; 37];
buf[..6].copy_from_slice(if camel_case { b"Date: " } else { b"date: " });
@ -113,7 +118,7 @@ impl ServiceConfig {
.date_service
.with_date(|date| buf[6..35].copy_from_slice(&date.bytes));
buf[35..].copy_from_slice(b"\r\n\r\n");
buf[35..].copy_from_slice(b"\r\n");
dst.extend_from_slice(&buf);
}

View File

@ -19,7 +19,7 @@ use zstd::stream::write::Decoder as ZstdDecoder;
use crate::{
encoding::Writer,
error::{BlockingError, PayloadError},
error::PayloadError,
header::{ContentEncoding, HeaderMap, CONTENT_ENCODING},
};
@ -47,14 +47,17 @@ where
ContentEncoding::Brotli => Some(ContentDecoder::Brotli(Box::new(
brotli::DecompressorWriter::new(Writer::new(), 8_096),
))),
#[cfg(feature = "compress-gzip")]
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
ZlibDecoder::new(Writer::new()),
))),
#[cfg(feature = "compress-gzip")]
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(GzDecoder::new(
Writer::new(),
)))),
#[cfg(feature = "compress-zstd")]
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
ZstdDecoder::new(Writer::new()).expect(
@ -98,8 +101,12 @@ where
loop {
if let Some(ref mut fut) = this.fut {
let (chunk, decoder) =
ready!(Pin::new(fut).poll(cx)).map_err(|_| BlockingError)??;
let (chunk, decoder) = ready!(Pin::new(fut).poll(cx)).map_err(|_| {
PayloadError::Io(io::Error::new(
io::ErrorKind::Other,
"Blocking task was cancelled unexpectedly",
))
})??;
*this.decoder = Some(decoder);
this.fut.take();
@ -159,10 +166,13 @@ where
enum ContentDecoder {
#[cfg(feature = "compress-gzip")]
Deflate(Box<ZlibDecoder<Writer>>),
#[cfg(feature = "compress-gzip")]
Gzip(Box<GzDecoder<Writer>>),
#[cfg(feature = "compress-brotli")]
Brotli(Box<brotli::DecompressorWriter<Writer>>),
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
#[cfg(feature = "compress-zstd")]

View File

@ -17,13 +17,13 @@ use pin_project_lite::pin_project;
#[cfg(feature = "compress-gzip")]
use flate2::write::{GzEncoder, ZlibEncoder};
use tracing::trace;
#[cfg(feature = "compress-zstd")]
use zstd::stream::write::Encoder as ZstdEncoder;
use super::Writer;
use crate::{
body::{self, BodySize, MessageBody},
error::BlockingError,
header::{self, ContentEncoding, HeaderValue, CONTENT_ENCODING},
ResponseHead, StatusCode,
};
@ -173,7 +173,12 @@ where
if let Some(ref mut fut) = this.fut {
let mut encoder = ready!(Pin::new(fut).poll(cx))
.map_err(|_| EncoderError::Blocking(BlockingError))?
.map_err(|_| {
EncoderError::Io(io::Error::new(
io::ErrorKind::Other,
"Blocking task was cancelled unexpectedly",
))
})?
.map_err(EncoderError::Io)?;
let chunk = encoder.take();
@ -252,7 +257,7 @@ fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
head.headers_mut()
.insert(header::CONTENT_ENCODING, encoding.to_header_value());
head.headers_mut()
.insert(header::VARY, HeaderValue::from_static("accept-encoding"));
.append(header::VARY, HeaderValue::from_static("accept-encoding"));
head.no_chunking(false);
}
@ -352,7 +357,7 @@ impl ContentEncoder {
ContentEncoder::Brotli(ref mut encoder) => match encoder.write_all(data) {
Ok(_) => Ok(()),
Err(err) => {
log::trace!("Error decoding br encoding: {}", err);
trace!("Error decoding br encoding: {}", err);
Err(err)
}
},
@ -361,7 +366,7 @@ impl ContentEncoder {
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
Ok(_) => Ok(()),
Err(err) => {
log::trace!("Error decoding gzip encoding: {}", err);
trace!("Error decoding gzip encoding: {}", err);
Err(err)
}
},
@ -370,7 +375,7 @@ impl ContentEncoder {
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
Ok(_) => Ok(()),
Err(err) => {
log::trace!("Error decoding deflate encoding: {}", err);
trace!("Error decoding deflate encoding: {}", err);
Err(err)
}
},
@ -379,7 +384,7 @@ impl ContentEncoder {
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
Ok(_) => Ok(()),
Err(err) => {
log::trace!("Error decoding ztsd encoding: {}", err);
trace!("Error decoding ztsd encoding: {}", err);
Err(err)
}
},
@ -400,12 +405,11 @@ fn new_brotli_compressor() -> Box<brotli::CompressorWriter<Writer>> {
#[derive(Debug, Display)]
#[non_exhaustive]
pub enum EncoderError {
/// Wrapped body stream error.
#[display(fmt = "body")]
Body(Box<dyn StdError>),
#[display(fmt = "blocking")]
Blocking(BlockingError),
/// Generic I/O error.
#[display(fmt = "io")]
Io(io::Error),
}
@ -414,7 +418,6 @@ impl StdError for EncoderError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self {
EncoderError::Body(err) => Some(&**err),
EncoderError::Blocking(err) => Some(err),
EncoderError::Io(err) => Some(err),
}
}

View File

@ -51,7 +51,7 @@ impl Error {
Self::new(Kind::SendResponse)
}
#[allow(unused)] // reserved for future use (TODO: remove allow when being used)
#[allow(unused)] // available for future use
pub(crate) fn new_io() -> Self {
Self::new(Kind::Io)
}
@ -108,8 +108,10 @@ pub(crate) enum Kind {
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// TODO: more detail
f.write_str("actix_http::Error")
f.debug_struct("actix_http::Error")
.field("kind", &self.inner.kind)
.field("cause", &self.inner.cause)
.finish()
}
}
@ -250,12 +252,6 @@ impl From<ParseError> for Response<BoxBody> {
}
}
/// A set of errors that can occur running blocking tasks in thread pool.
#[derive(Debug, Display, Error)]
#[display(fmt = "Blocking thread pool is gone")]
// TODO: non-exhaustive
pub struct BlockingError;
/// A set of errors that can occur during payload parsing.
#[derive(Debug, Display)]
#[non_exhaustive]
@ -293,13 +289,14 @@ impl std::error::Error for PayloadError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
PayloadError::Incomplete(None) => None,
PayloadError::Incomplete(Some(err)) => Some(err as &dyn std::error::Error),
PayloadError::Incomplete(Some(err)) => Some(err),
PayloadError::EncodingCorrupted => None,
PayloadError::Overflow => None,
PayloadError::UnknownLength => None,
#[cfg(feature = "http2")]
PayloadError::Http2Payload(err) => Some(err as &dyn std::error::Error),
PayloadError::Io(err) => Some(err as &dyn std::error::Error),
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
PayloadError::Http2Payload(err) => Some(err),
PayloadError::Io(err) => Some(err),
}
}
}
@ -323,15 +320,6 @@ impl From<io::Error> for PayloadError {
}
}
impl From<BlockingError> for PayloadError {
fn from(_: BlockingError) -> Self {
PayloadError::Io(io::Error::new(
io::ErrorKind::Other,
"Operation is canceled",
))
}
}
impl From<PayloadError> for Error {
fn from(err: PayloadError) -> Self {
Self::new_payload().with_cause(err)
@ -340,6 +328,7 @@ impl From<PayloadError> for Error {
/// A set of errors that can occur during dispatching HTTP requests.
#[derive(Debug, Display, From)]
#[non_exhaustive]
pub enum DispatchError {
/// Service error.
#[display(fmt = "Service Error")]
@ -363,6 +352,7 @@ pub enum DispatchError {
/// HTTP/2 error.
#[display(fmt = "{}", _0)]
#[cfg(feature = "http2")]
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
H2(h2::Error),
/// The first request did not complete within the specified timeout.
@ -373,6 +363,10 @@ pub enum DispatchError {
#[display(fmt = "Connection shutdown timeout")]
DisconnectTimeout,
/// Handler dropped payload before reading EOF.
#[display(fmt = "Handler dropped payload before reading EOF")]
HandlerDroppedPayload,
/// Internal error.
#[display(fmt = "Internal error")]
InternalError,
@ -381,7 +375,6 @@ pub enum DispatchError {
impl StdError for DispatchError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self {
// TODO: error source extraction?
DispatchError::Service(_res) => None,
DispatchError::Body(err) => Some(&**err),
DispatchError::Io(err) => Some(err),
@ -397,7 +390,7 @@ impl StdError for DispatchError {
/// A set of error that can occur during parsing content type.
#[derive(Debug, Display, Error)]
#[cfg_attr(test, derive(PartialEq))]
#[cfg_attr(test, derive(PartialEq, Eq))]
#[non_exhaustive]
pub enum ContentTypeError {
/// Can not parse content type

View File

@ -1,6 +1,7 @@
use std::{io, task::Poll};
use bytes::{Buf as _, Bytes, BytesMut};
use tracing::{debug, trace};
macro_rules! byte (
($rdr:ident) => ({
@ -14,7 +15,7 @@ macro_rules! byte (
})
);
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub(super) enum ChunkedState {
Size,
SizeLws,
@ -76,7 +77,7 @@ impl ChunkedState {
Poll::Ready(Ok(ChunkedState::Size))
}
None => {
log::debug!("chunk size would overflow u64");
debug!("chunk size would overflow u64");
Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size line: Size is too big",
@ -124,7 +125,7 @@ impl ChunkedState {
rem: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
log::trace!("Chunked read, remaining={:?}", rem);
trace!("Chunked read, remaining={:?}", rem);
let len = rdr.len() as u64;
if len == 0 {

View File

@ -128,7 +128,10 @@ impl Decoder for ClientCodec {
type Error = ParseError;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
debug_assert!(!self.inner.payload.is_some(), "Payload decoder is set");
debug_assert!(
self.inner.payload.is_none(),
"Payload decoder should not be set"
);
if let Some((req, payload)) = self.inner.decoder.decode(src)? {
if let Some(conn_type) = req.conn_type() {

View File

@ -125,11 +125,13 @@ impl Decoder for Codec {
self.flags.set(Flags::HEAD, head.method == Method::HEAD);
self.version = head.version;
self.conn_type = head.connection_type();
if self.conn_type == ConnectionType::KeepAlive
&& !self.flags.contains(Flags::KEEP_ALIVE_ENABLED)
{
self.conn_type = ConnectionType::Close
}
match payload {
PayloadType::None => self.payload = None,
PayloadType::Payload(pl) => self.payload = Some(pl),

View File

@ -6,7 +6,7 @@ use http::{
header::{self, HeaderName, HeaderValue},
Method, StatusCode, Uri, Version,
};
use log::{debug, error, trace};
use tracing::{debug, error, trace};
use super::chunked::ChunkedState;
use crate::{error::ParseError, header::HeaderMap, ConnectionType, Request, ResponseHead};
@ -46,6 +46,23 @@ pub(crate) enum PayloadLength {
None,
}
impl PayloadLength {
/// Returns true if variant is `None`.
fn is_none(&self) -> bool {
matches!(self, Self::None)
}
/// Returns true if variant is represents zero-length (not none) payload.
fn is_zero(&self) -> bool {
matches!(
self,
PayloadLength::Payload(PayloadType::Payload(PayloadDecoder {
kind: Kind::Length(0)
}))
)
}
}
pub(crate) trait MessageType: Sized {
fn set_connection_type(&mut self, conn_type: Option<ConnectionType>);
@ -59,6 +76,7 @@ pub(crate) trait MessageType: Sized {
&mut self,
slice: &Bytes,
raw_headers: &[HeaderIndex],
version: Version,
) -> Result<PayloadLength, ParseError> {
let mut ka = None;
let mut has_upgrade_websocket = false;
@ -87,21 +105,23 @@ pub(crate) trait MessageType: Sized {
return Err(ParseError::Header);
}
header::CONTENT_LENGTH => match value.to_str() {
Ok(s) if s.trim().starts_with('+') => {
debug!("illegal Content-Length: {:?}", s);
header::CONTENT_LENGTH => match value.to_str().map(str::trim) {
Ok(val) if val.starts_with('+') => {
debug!("illegal Content-Length: {:?}", val);
return Err(ParseError::Header);
}
Ok(s) => {
if let Ok(len) = s.parse::<u64>() {
if len != 0 {
Ok(val) => {
if let Ok(len) = val.parse::<u64>() {
// accept 0 lengths here and remove them in `decode` after all
// headers have been processed to prevent request smuggling issues
content_length = Some(len);
}
} else {
debug!("illegal Content-Length: {:?}", s);
debug!("illegal Content-Length: {:?}", val);
return Err(ParseError::Header);
}
}
Err(_) => {
debug!("illegal Content-Length: {:?}", value);
return Err(ParseError::Header);
@ -114,22 +134,23 @@ pub(crate) trait MessageType: Sized {
return Err(ParseError::Header);
}
header::TRANSFER_ENCODING => {
header::TRANSFER_ENCODING if version == Version::HTTP_11 => {
seen_te = true;
if let Ok(s) = value.to_str().map(str::trim) {
if s.eq_ignore_ascii_case("chunked") {
if let Ok(val) = value.to_str().map(str::trim) {
if val.eq_ignore_ascii_case("chunked") {
chunked = true;
} else if s.eq_ignore_ascii_case("identity") {
} else if val.eq_ignore_ascii_case("identity") {
// allow silently since multiple TE headers are already checked
} else {
debug!("illegal Transfer-Encoding: {:?}", s);
debug!("illegal Transfer-Encoding: {:?}", val);
return Err(ParseError::Header);
}
} else {
return Err(ParseError::Header);
}
}
// connection keep-alive state
header::CONNECTION => {
ka = if let Ok(conn) = value.to_str().map(str::trim) {
@ -146,6 +167,7 @@ pub(crate) trait MessageType: Sized {
None
};
}
header::UPGRADE => {
if let Ok(val) = value.to_str().map(str::trim) {
if val.eq_ignore_ascii_case("websocket") {
@ -153,19 +175,23 @@ pub(crate) trait MessageType: Sized {
}
}
}
header::EXPECT => {
let bytes = value.as_bytes();
if bytes.len() >= 4 && &bytes[0..4] == b"100-" {
expect = true;
}
}
_ => {}
}
headers.append(name, value);
}
}
self.set_connection_type(ka);
if expect {
self.set_expect()
}
@ -209,15 +235,16 @@ impl MessageType for Request {
let (len, method, uri, ver, h_len) = {
// SAFETY:
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
// safe because the type we are claiming to have initialized here is a
// bunch of `MaybeUninit`s, which do not require initialization.
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is safe because the
// type we are claiming to have initialized here is a bunch of `MaybeUninit`s, which
// do not require initialization.
let mut parsed = unsafe {
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
.assume_init()
};
let mut req = httparse::Request::new(&mut []);
match req.parse_with_uninit_headers(src, &mut parsed)? {
httparse::Status::Complete(len) => {
let method = Method::from_bytes(req.method.unwrap().as_bytes())
@ -232,6 +259,7 @@ impl MessageType for Request {
(len, method, uri, version, req.headers.len())
}
httparse::Status::Partial => {
return if src.len() >= MAX_BUFFER_SIZE {
trace!("MAX_BUFFER_SIZE unprocessed data reached, closing");
@ -247,7 +275,22 @@ impl MessageType for Request {
let mut msg = Request::new();
// convert headers
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
let mut length =
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
// disallow HTTP/1.0 POST requests that do not contain a Content-Length headers
// see https://datatracker.ietf.org/doc/html/rfc1945#section-7.2.2
if ver == Version::HTTP_10 && method == Method::POST && length.is_none() {
debug!("no Content-Length specified for HTTP/1.0 POST request");
return Err(ParseError::Header);
}
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
// Protects against some request smuggling attacks.
// See https://github.com/actix/actix-web/issues/2767.
if length.is_zero() {
length = PayloadLength::None;
}
// payload decoder
let decoder = match length {
@ -291,22 +334,35 @@ impl MessageType for ResponseHead {
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
let (len, ver, status, h_len) = {
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
// SAFETY:
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is safe because the
// type we are claiming to have initialized here is a bunch of `MaybeUninit`s, which
// do not require initialization.
let mut parsed = unsafe {
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
.assume_init()
};
let mut res = httparse::Response::new(&mut parsed);
match res.parse(src)? {
let mut res = httparse::Response::new(&mut []);
let mut config = httparse::ParserConfig::default();
config.allow_spaces_after_header_name_in_responses(true);
match config.parse_response_with_uninit_headers(&mut res, src, &mut parsed)? {
httparse::Status::Complete(len) => {
let version = if res.version.unwrap() == 1 {
Version::HTTP_11
} else {
Version::HTTP_10
};
let status = StatusCode::from_u16(res.code.unwrap())
.map_err(|_| ParseError::Status)?;
HeaderIndex::record(src, res.headers, &mut headers);
(len, version, status, res.headers.len())
}
httparse::Status::Partial => {
return if src.len() >= MAX_BUFFER_SIZE {
error!("MAX_BUFFER_SIZE unprocessed data reached, closing");
@ -322,7 +378,15 @@ impl MessageType for ResponseHead {
msg.version = ver;
// convert headers
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
let mut length =
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
// Protects against some request smuggling attacks.
// See https://github.com/actix/actix-web/issues/2767.
if length.is_zero() {
length = PayloadLength::None;
}
// message payload
let decoder = if let PayloadLength::Payload(pl) = length {
@ -358,9 +422,6 @@ pub(crate) const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
pub(crate) const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
[EMPTY_HEADER_INDEX; MAX_HEADERS];
pub(crate) const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
[httparse::EMPTY_HEADER; MAX_HEADERS];
impl HeaderIndex {
pub(crate) fn record(
bytes: &[u8],
@ -379,7 +440,7 @@ impl HeaderIndex {
}
}
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
/// Chunk type yielded while decoding a payload.
pub enum PayloadItem {
Chunk(Bytes),
@ -389,7 +450,7 @@ pub enum PayloadItem {
/// Decoder that can handle different payload types.
///
/// If a message body does not use `Transfer-Encoding`, it should include a `Content-Length`.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PayloadDecoder {
kind: Kind,
}
@ -415,7 +476,7 @@ impl PayloadDecoder {
}
}
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq)]
enum Kind {
/// A reader used when a `Content-Length` header is passed with a positive integer.
Length(u64),
@ -594,14 +655,100 @@ mod tests {
}
#[test]
fn test_parse_post() {
let mut buf = BytesMut::from("POST /test2 HTTP/1.0\r\n\r\n");
fn parse_h09_reject() {
let mut buf = BytesMut::from(
"GET /test1 HTTP/0.9\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
reader.decode(&mut buf).unwrap_err();
let mut buf = BytesMut::from(
"POST /test2 HTTP/0.9\r\n\
Content-Length: 3\r\n\
\r\n
abc",
);
let mut reader = MessageDecoder::<Request>::default();
reader.decode(&mut buf).unwrap_err();
}
#[test]
fn parse_h10_get() {
let mut buf = BytesMut::from(
"GET /test1 HTTP/1.0\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert_eq!(req.version(), Version::HTTP_10);
assert_eq!(*req.method(), Method::GET);
assert_eq!(req.path(), "/test1");
let mut buf = BytesMut::from(
"GET /test2 HTTP/1.0\r\n\
Content-Length: 0\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert_eq!(req.version(), Version::HTTP_10);
assert_eq!(*req.method(), Method::GET);
assert_eq!(req.path(), "/test2");
let mut buf = BytesMut::from(
"GET /test3 HTTP/1.0\r\n\
Content-Length: 3\r\n\
\r\n
abc",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert_eq!(req.version(), Version::HTTP_10);
assert_eq!(*req.method(), Method::GET);
assert_eq!(req.path(), "/test3");
}
#[test]
fn parse_h10_post() {
let mut buf = BytesMut::from(
"POST /test1 HTTP/1.0\r\n\
Content-Length: 3\r\n\
\r\n\
abc",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert_eq!(req.version(), Version::HTTP_10);
assert_eq!(*req.method(), Method::POST);
assert_eq!(req.path(), "/test1");
let mut buf = BytesMut::from(
"POST /test2 HTTP/1.0\r\n\
Content-Length: 0\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert_eq!(req.version(), Version::HTTP_10);
assert_eq!(*req.method(), Method::POST);
assert_eq!(req.path(), "/test2");
let mut buf = BytesMut::from(
"POST /test3 HTTP/1.0\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let err = reader.decode(&mut buf).unwrap_err();
assert!(err.to_string().contains("Header"))
}
#[test]
@ -697,121 +844,98 @@ mod tests {
#[test]
fn test_conn_default_1_0() {
let mut buf = BytesMut::from("GET /test HTTP/1.0\r\n\r\n");
let req = parse_ready!(&mut buf);
let req = parse_ready!(&mut BytesMut::from("GET /test HTTP/1.0\r\n\r\n"));
assert_eq!(req.head().connection_type(), ConnectionType::Close);
}
#[test]
fn test_conn_default_1_1() {
let mut buf = BytesMut::from("GET /test HTTP/1.1\r\n\r\n");
let req = parse_ready!(&mut buf);
let req = parse_ready!(&mut BytesMut::from("GET /test HTTP/1.1\r\n\r\n"));
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
}
#[test]
fn test_conn_close() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
connection: close\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::Close);
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
connection: Close\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::Close);
}
#[test]
fn test_conn_close_1_0() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.0\r\n\
connection: close\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::Close);
}
#[test]
fn test_conn_keep_alive_1_0() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.0\r\n\
connection: keep-alive\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.0\r\n\
connection: Keep-Alive\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
}
#[test]
fn test_conn_keep_alive_1_1() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
connection: keep-alive\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
}
#[test]
fn test_conn_other_1_0() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.0\r\n\
connection: other\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::Close);
}
#[test]
fn test_conn_other_1_1() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
connection: other\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
}
#[test]
fn test_conn_upgrade() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
upgrade: websockets\r\n\
connection: upgrade\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert!(req.upgrade());
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
upgrade: Websockets\r\n\
connection: Upgrade\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert!(req.upgrade());
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
@ -819,59 +943,62 @@ mod tests {
#[test]
fn test_conn_upgrade_connect_method() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"CONNECT /test HTTP/1.1\r\n\
content-type: text/plain\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert!(req.upgrade());
}
#[test]
fn test_headers_content_length_err_1() {
let mut buf = BytesMut::from(
fn test_headers_bad_content_length() {
// string CL
expect_parse_err!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
content-length: line\r\n\r\n",
);
));
expect_parse_err!(&mut buf)
// negative CL
expect_parse_err!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
content-length: -1\r\n\r\n",
));
}
#[test]
fn test_headers_content_length_err_2() {
fn octal_ish_cl_parsed_as_decimal() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
content-length: -1\r\n\r\n",
"POST /test HTTP/1.1\r\n\
content-length: 011\r\n\r\n",
);
expect_parse_err!(&mut buf);
let mut reader = MessageDecoder::<Request>::default();
let (_req, pl) = reader.decode(&mut buf).unwrap().unwrap();
assert!(matches!(
pl,
PayloadType::Payload(pl) if pl == PayloadDecoder::length(11)
));
}
#[test]
fn test_invalid_header() {
let mut buf = BytesMut::from(
expect_parse_err!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
test line\r\n\r\n",
);
expect_parse_err!(&mut buf);
));
}
#[test]
fn test_invalid_name() {
let mut buf = BytesMut::from(
expect_parse_err!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
test[]: line\r\n\r\n",
);
expect_parse_err!(&mut buf);
));
}
#[test]
fn test_http_request_bad_status_line() {
let mut buf = BytesMut::from("getpath \r\n\r\n");
expect_parse_err!(&mut buf);
expect_parse_err!(&mut BytesMut::from("getpath \r\n\r\n"));
}
#[test]
@ -911,11 +1038,10 @@ mod tests {
#[test]
fn test_http_request_parser_utf8() {
let mut buf = BytesMut::from(
let req = parse_ready!(&mut BytesMut::from(
"GET /test HTTP/1.1\r\n\
x-test: тест\r\n\r\n",
);
let req = parse_ready!(&mut buf);
));
assert_eq!(
req.headers().get("x-test").unwrap().as_bytes(),
@ -925,24 +1051,18 @@ mod tests {
#[test]
fn test_http_request_parser_two_slashes() {
let mut buf = BytesMut::from("GET //path HTTP/1.1\r\n\r\n");
let req = parse_ready!(&mut buf);
let req = parse_ready!(&mut BytesMut::from("GET //path HTTP/1.1\r\n\r\n"));
assert_eq!(req.path(), "//path");
}
#[test]
fn test_http_request_parser_bad_method() {
let mut buf = BytesMut::from("!12%()+=~$ /get HTTP/1.1\r\n\r\n");
expect_parse_err!(&mut buf);
expect_parse_err!(&mut BytesMut::from("!12%()+=~$ /get HTTP/1.1\r\n\r\n"));
}
#[test]
fn test_http_request_parser_bad_version() {
let mut buf = BytesMut::from("GET //get HT/11\r\n\r\n");
expect_parse_err!(&mut buf);
expect_parse_err!(&mut BytesMut::from("GET //get HT/11\r\n\r\n"));
}
#[test]
@ -959,29 +1079,66 @@ mod tests {
#[test]
fn hrs_multiple_content_length() {
let mut buf = BytesMut::from(
expect_parse_err!(&mut BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 4\r\n\
Content-Length: 2\r\n\
\r\n\
abcd",
);
));
expect_parse_err!(&mut buf);
expect_parse_err!(&mut BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 0\r\n\
Content-Length: 2\r\n\
\r\n\
ab",
));
}
#[test]
fn hrs_content_length_plus() {
let mut buf = BytesMut::from(
expect_parse_err!(&mut BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: +3\r\n\
\r\n\
000",
));
}
#[test]
fn hrs_te_http10() {
// in HTTP/1.0 transfer encoding is ignored and must therefore contain a CL header
expect_parse_err!(&mut BytesMut::from(
"POST / HTTP/1.0\r\n\
Host: example.com\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
3\r\n\
aaa\r\n\
0\r\n\
",
));
}
#[test]
fn hrs_cl_and_te_http10() {
// in HTTP/1.0 transfer encoding is simply ignored so it's fine to have both
let mut buf = BytesMut::from(
"GET / HTTP/1.0\r\n\
Host: example.com\r\n\
Content-Length: 3\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
000",
);
expect_parse_err!(&mut buf);
parse_ready!(&mut buf);
}
#[test]

View File

@ -15,6 +15,7 @@ use bitflags::bitflags;
use bytes::{Buf, BytesMut};
use futures_core::ready;
use pin_project_lite::pin_project;
use tracing::{error, trace};
use crate::{
body::{BodySize, BoxBody, MessageBody},
@ -151,7 +152,8 @@ pin_project! {
error: Option<DispatchError>,
#[pin]
state: State<S, B, X>,
pub(super) state: State<S, B, X>,
// when Some(_) dispatcher is in state of receiving request payload
payload: Option<PayloadSender>,
messages: VecDeque<DispatcherMessage>,
@ -174,7 +176,7 @@ enum DispatcherMessage {
pin_project! {
#[project = StateProj]
enum State<S, B, X>
pub(super) enum State<S, B, X>
where
S: Service<Request>,
X: Service<Request, Response = Request>,
@ -194,7 +196,7 @@ where
X: Service<Request, Response = Request>,
B: MessageBody,
{
fn is_none(&self) -> bool {
pub(super) fn is_none(&self) -> bool {
matches!(self, State::None)
}
}
@ -335,7 +337,7 @@ where
while written < len {
match io.as_mut().poll_write(cx, &write_buf[written..])? {
Poll::Ready(0) => {
log::error!("write zero; closing");
error!("write zero; closing");
return Poll::Ready(Err(io::Error::new(io::ErrorKind::WriteZero, "")));
}
@ -374,8 +376,6 @@ where
DispatchError::Io(err)
})?;
this.flags.set(Flags::KEEP_ALIVE, this.codec.keep_alive());
Ok(size)
}
@ -458,7 +458,12 @@ where
}
// all messages are dealt with
None => return Ok(PollResponse::DoNothing),
None => {
// start keep-alive if last request allowed it
this.flags.set(Flags::KEEP_ALIVE, this.codec.keep_alive());
return Ok(PollResponse::DoNothing);
}
},
StateProj::ServiceCall { fut } => {
@ -564,7 +569,7 @@ where
}
StateProj::ExpectCall { fut } => {
log::trace!(" calling expect service");
trace!(" calling expect service");
match fut.poll(cx) {
// expect resolved. write continue to buffer and set InnerDispatcher state
@ -694,6 +699,7 @@ where
let mut updated = false;
// decode from read buf as many full requests as possible
loop {
match this.codec.decode(this.read_buf) {
Ok(Some(msg)) => {
@ -746,7 +752,7 @@ where
if let Some(ref mut payload) = this.payload {
payload.feed_data(chunk);
} else {
log::error!("Internal server error: unexpected payload chunk");
error!("Internal server error: unexpected payload chunk");
this.flags.insert(Flags::READ_DISCONNECT);
this.messages.push_back(DispatcherMessage::Error(
Response::internal_server_error().drop_body(),
@ -760,7 +766,7 @@ where
if let Some(mut payload) = this.payload.take() {
payload.feed_eof();
} else {
log::error!("Internal server error: unexpected eof");
error!("Internal server error: unexpected eof");
this.flags.insert(Flags::READ_DISCONNECT);
this.messages.push_back(DispatcherMessage::Error(
Response::internal_server_error().drop_body(),
@ -777,7 +783,7 @@ where
Ok(None) => break,
Err(ParseError::Io(err)) => {
log::trace!("I/O error: {}", &err);
trace!("I/O error: {}", &err);
self.as_mut().client_disconnected();
this = self.as_mut().project();
*this.error = Some(DispatchError::Io(err));
@ -785,7 +791,7 @@ where
}
Err(ParseError::TooLarge) => {
log::trace!("request head was too big; returning 431 response");
trace!("request head was too big; returning 431 response");
if let Some(mut payload) = this.payload.take() {
payload.set_error(PayloadError::Overflow);
@ -805,7 +811,7 @@ where
}
Err(err) => {
log::trace!("parse error {}", &err);
trace!("parse error {}", &err);
if let Some(mut payload) = this.payload.take() {
payload.set_error(PayloadError::EncodingCorrupted);
@ -836,10 +842,7 @@ where
if timer.as_mut().poll(cx).is_ready() {
// timeout on first request (slow request) return 408
log::trace!(
"timed out on slow request; \
replying with 408 and closing connection"
);
trace!("timed out on slow request; replying with 408 and closing connection");
let _ = self.as_mut().send_error_response(
Response::with_body(StatusCode::REQUEST_TIMEOUT, ()),
@ -868,15 +871,21 @@ where
"dispatcher should not be in keep-alive phase if state is not none: {:?}",
this.state,
);
debug_assert!(
this.write_buf.is_empty(),
"dispatcher should not be in keep-alive phase if write_buf is not empty",
);
// Assert removed by @robjtede on account of issue #2655. There are cases where an I/O
// flush can be pending after entering the keep-alive state causing the subsequent flush
// wake up to panic here. This appears to be a Linux-only problem. Leaving original code
// below for posterity because a simple and reliable test could not be found to trigger
// the behavior.
// debug_assert!(
// this.write_buf.is_empty(),
// "dispatcher should not be in keep-alive phase if write_buf is not empty",
// );
// keep-alive timer has timed out
if timer.as_mut().poll(cx).is_ready() {
// no tasks at hand
log::trace!("timer timed out; closing connection");
trace!("timer timed out; closing connection");
this.flags.insert(Flags::SHUTDOWN);
if let Some(deadline) = this.config.client_disconnect_deadline() {
@ -906,7 +915,7 @@ where
// timed-out during shutdown; drop connection
if timer.as_mut().poll(cx).is_ready() {
log::trace!("timed-out during shutdown");
trace!("timed-out during shutdown");
return Err(DispatchError::DisconnectTimeout);
}
}
@ -967,9 +976,11 @@ where
//
// A Request head too large to parse is only checked on `httparse::Status::Partial`.
if this.payload.is_none() {
// When dispatcher has a payload the responsibility of wake up it would be shift
// to h1::payload::Payload.
match this.payload {
// When dispatcher has a payload the responsibility of wake ups is shifted to
// `h1::payload::Payload` unless the payload is needing a read, in which case it
// might not have access to the waker and could result in the dispatcher
// getting stuck until timeout.
//
// Reason:
// Self wake up when there is payload would waste poll and/or result in
@ -980,7 +991,8 @@ where
// read anymore. At this case read_buf could always remain beyond
// MAX_BUFFER_SIZE and self wake up would be busy poll dispatcher and
// waste resources.
cx.waker().wake_by_ref();
Some(ref p) if p.need_read(cx) != PayloadStatus::Read => {}
_ => cx.waker().wake_by_ref(),
}
return Ok(false);
@ -1065,12 +1077,12 @@ where
match this.inner.project() {
DispatcherStateProj::Upgrade { fut: upgrade } => upgrade.poll(cx).map_err(|err| {
log::error!("Upgrade handler error: {}", err);
error!("Upgrade handler error: {}", err);
DispatchError::Upgrade
}),
DispatcherStateProj::Normal { mut inner } => {
log::trace!("start flags: {:?}", &inner.flags);
trace!("start flags: {:?}", &inner.flags);
trace_timer_states(
"start",
@ -1177,7 +1189,7 @@ where
// client is gone
if inner.flags.contains(Flags::WRITE_DISCONNECT) {
log::trace!("client is gone; disconnecting");
trace!("client is gone; disconnecting");
return Poll::Ready(Ok(()));
}
@ -1186,14 +1198,14 @@ where
// read half is closed; we do not process any responses
if inner_p.flags.contains(Flags::READ_DISCONNECT) && state_is_none {
log::trace!("read half closed; start shutdown");
trace!("read half closed; start shutdown");
inner_p.flags.insert(Flags::SHUTDOWN);
}
// keep-alive and stream errors
if state_is_none && inner_p.write_buf.is_empty() {
if let Some(err) = inner_p.error.take() {
log::error!("stream error: {}", &err);
error!("stream error: {}", &err);
return Poll::Ready(Err(err));
}
@ -1222,7 +1234,7 @@ where
Poll::Pending
};
log::trace!("end flags: {:?}", &inner.flags);
trace!("end flags: {:?}", &inner.flags);
poll
}
@ -1237,17 +1249,17 @@ fn trace_timer_states(
ka_timer: &TimerState,
shutdown_timer: &TimerState,
) {
log::trace!("{} timers:", label);
trace!("{} timers:", label);
if head_timer.is_enabled() {
log::trace!(" head {}", &head_timer);
trace!(" head {}", &head_timer);
}
if ka_timer.is_enabled() {
log::trace!(" keep-alive {}", &ka_timer);
trace!(" keep-alive {}", &ka_timer);
}
if shutdown_timer.is_enabled() {
log::trace!(" shutdown {}", &shutdown_timer);
trace!(" shutdown {}", &shutdown_timer);
}
}

View File

@ -1,6 +1,6 @@
use std::{future::Future, str, task::Poll, time::Duration};
use actix_rt::time::sleep;
use actix_rt::{pin, time::sleep};
use actix_service::fn_service;
use actix_utils::future::{ready, Ready};
use bytes::Bytes;
@ -53,6 +53,14 @@ fn echo_path_service(
})
}
fn drop_payload_service(
) -> impl Service<Request, Response = Response<&'static str>, Error = Error> {
fn_service(|mut req: Request| async move {
let _ = req.take_payload();
Ok::<_, Error>(Response::with_body(StatusCode::OK, "payload dropped"))
})
}
fn echo_payload_service() -> impl Service<Request, Response = Response<Bytes>, Error = Error> {
fn_service(|mut req: Request| {
Box::pin(async move {
@ -89,7 +97,7 @@ async fn late_request() {
None,
OnConnectData::default(),
);
actix_rt::pin!(h1);
pin!(h1);
lazy(|cx| {
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -156,7 +164,7 @@ async fn oneshot_connection() {
None,
OnConnectData::default(),
);
actix_rt::pin!(h1);
pin!(h1);
lazy(|cx| {
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -173,13 +181,16 @@ async fn oneshot_connection() {
stabilize_date_header(&mut res);
let res = &res[..];
let exp = b"\
HTTP/1.1 200 OK\r\n\
content-length: 5\r\n\
connection: close\r\n\
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
/abcd\
";
let exp = http_msg(
r"
HTTP/1.1 200 OK
content-length: 5
connection: close
date: Thu, 01 Jan 1970 12:34:56 UTC
/abcd
",
);
assert_eq!(
res,
@ -188,7 +199,7 @@ async fn oneshot_connection() {
response: {:?}\n\
expected: {:?}",
String::from_utf8_lossy(res),
String::from_utf8_lossy(exp)
String::from_utf8_lossy(&exp)
);
})
.await;
@ -214,7 +225,7 @@ async fn keep_alive_timeout() {
None,
OnConnectData::default(),
);
actix_rt::pin!(h1);
pin!(h1);
lazy(|cx| {
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -293,7 +304,7 @@ async fn keep_alive_follow_up_req() {
None,
OnConnectData::default(),
);
actix_rt::pin!(h1);
pin!(h1);
lazy(|cx| {
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -413,7 +424,7 @@ async fn req_parse_err() {
OnConnectData::default(),
);
actix_rt::pin!(h1);
pin!(h1);
match h1.as_mut().poll(cx) {
Poll::Pending => panic!(),
@ -459,7 +470,7 @@ async fn pipelining_ok_then_ok() {
OnConnectData::default(),
);
actix_rt::pin!(h1);
pin!(h1);
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -529,7 +540,7 @@ async fn pipelining_ok_then_bad() {
OnConnectData::default(),
);
actix_rt::pin!(h1);
pin!(h1);
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -601,7 +612,7 @@ async fn expect_handling() {
",
);
actix_rt::pin!(h1);
pin!(h1);
assert!(h1.as_mut().poll(cx).is_pending());
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -678,7 +689,7 @@ async fn expect_eager() {
",
);
actix_rt::pin!(h1);
pin!(h1);
assert!(h1.as_mut().poll(cx).is_ready());
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
@ -761,7 +772,7 @@ async fn upgrade_handling() {
",
);
actix_rt::pin!(h1);
pin!(h1);
assert!(h1.as_mut().poll(cx).is_ready());
assert!(matches!(&h1.inner, DispatcherState::Upgrade { .. }));
@ -771,3 +782,195 @@ async fn upgrade_handling() {
})
.await;
}
// fix in #2624 reverted temporarily
// complete fix tracked in #2745
#[ignore]
#[actix_rt::test]
async fn handler_drop_payload() {
let _ = env_logger::try_init();
let mut buf = TestBuffer::new(http_msg(
r"
POST /drop-payload HTTP/1.1
Content-Length: 3
abc
",
));
let services = HttpFlow::new(
drop_payload_service(),
ExpectHandler,
None::<UpgradeHandler>,
);
let h1 = Dispatcher::new(
buf.clone(),
services,
ServiceConfig::default(),
None,
OnConnectData::default(),
);
pin!(h1);
lazy(|cx| {
assert!(h1.as_mut().poll(cx).is_pending());
// polls: manual
assert_eq!(h1.poll_count, 1);
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
stabilize_date_header(&mut res);
let res = &res[..];
let exp = http_msg(
r"
HTTP/1.1 200 OK
content-length: 15
date: Thu, 01 Jan 1970 12:34:56 UTC
payload dropped
",
);
assert_eq!(
res,
exp,
"\nexpected response not in write buffer:\n\
response: {:?}\n\
expected: {:?}",
String::from_utf8_lossy(res),
String::from_utf8_lossy(&exp)
);
if let DispatcherStateProj::Normal { inner } = h1.as_mut().project().inner.project() {
assert!(inner.state.is_none());
}
})
.await;
lazy(|cx| {
// add message that claims to have payload longer than provided
buf.extend_read_buf(http_msg(
r"
POST /drop-payload HTTP/1.1
Content-Length: 200
abc
",
));
assert!(h1.as_mut().poll(cx).is_pending());
// polls: manual => manual
assert_eq!(h1.poll_count, 2);
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
stabilize_date_header(&mut res);
let res = &res[..];
// expect response immediately even though request side has not finished reading payload
let exp = http_msg(
r"
HTTP/1.1 200 OK
content-length: 15
date: Thu, 01 Jan 1970 12:34:56 UTC
payload dropped
",
);
assert_eq!(
res,
exp,
"\nexpected response not in write buffer:\n\
response: {:?}\n\
expected: {:?}",
String::from_utf8_lossy(res),
String::from_utf8_lossy(&exp)
);
})
.await;
lazy(|cx| {
assert!(h1.as_mut().poll(cx).is_ready());
// polls: manual => manual => manual
assert_eq!(h1.poll_count, 3);
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
stabilize_date_header(&mut res);
let res = &res[..];
// expect that unrequested error response is sent back since connection could not be cleaned
let exp = http_msg(
r"
HTTP/1.1 500 Internal Server Error
content-length: 0
connection: close
date: Thu, 01 Jan 1970 12:34:56 UTC
",
);
assert_eq!(
res,
exp,
"\nexpected response not in write buffer:\n\
response: {:?}\n\
expected: {:?}",
String::from_utf8_lossy(res),
String::from_utf8_lossy(&exp)
);
})
.await;
}
fn http_msg(msg: impl AsRef<str>) -> BytesMut {
let mut msg = msg
.as_ref()
.trim()
.split('\n')
.into_iter()
.map(|line| [line.trim_start(), "\r"].concat())
.collect::<Vec<_>>()
.join("\n");
// remove trailing \r
msg.pop();
if !msg.is_empty() && !msg.contains("\r\n\r\n") {
msg.push_str("\r\n\r\n");
}
BytesMut::from(msg.as_bytes())
}
#[test]
fn http_msg_creates_msg() {
assert_eq!(http_msg(r""), "");
assert_eq!(
http_msg(
r"
POST / HTTP/1.1
Content-Length: 3
abc
"
),
"POST / HTTP/1.1\r\nContent-Length: 3\r\n\r\nabc"
);
assert_eq!(
http_msg(
r"
GET / HTTP/1.1
Content-Length: 3
"
),
"GET / HTTP/1.1\r\nContent-Length: 3\r\n\r\n"
);
}

View File

@ -210,14 +210,14 @@ pub(crate) trait MessageType: Sized {
dst.advance_mut(pos);
}
// optimized date header, set_date writes \r\n
if !has_date {
// optimized date header, write_date_header writes its own \r\n
config.write_date_header(dst, camel_case);
} else {
// msg eof
dst.extend_from_slice(b"\r\n");
}
// end-of-headers marker
dst.extend_from_slice(b"\r\n");
Ok(())
}
@ -517,6 +517,7 @@ unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
if let Some(c @ b'a'..=b'z') = iter.next() {
buffer[index] = c & 0b1101_1111;
}
index += 1;
}
index += 1;
@ -528,7 +529,7 @@ mod tests {
use std::rc::Rc;
use bytes::Bytes;
use http::header::AUTHORIZATION;
use http::header::{AUTHORIZATION, UPGRADE_INSECURE_REQUESTS};
use super::*;
use crate::{
@ -559,6 +560,9 @@ mod tests {
head.headers
.insert(CONTENT_TYPE, HeaderValue::from_static("plain/text"));
head.headers
.insert(UPGRADE_INSECURE_REQUESTS, HeaderValue::from_static("1"));
let mut head = RequestHeadType::Owned(head);
let _ = head.encode_headers(
@ -574,6 +578,7 @@ mod tests {
assert!(data.contains("Connection: close\r\n"));
assert!(data.contains("Content-Type: plain/text\r\n"));
assert!(data.contains("Date: date\r\n"));
assert!(data.contains("Upgrade-Insecure-Requests: 1\r\n"));
let _ = head.encode_headers(
&mut bytes,

View File

@ -16,7 +16,7 @@ use crate::error::PayloadError;
/// max buffer size 32k
pub(crate) const MAX_BUFFER_SIZE: usize = 32_768;
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq)]
pub enum PayloadStatus {
Read,
Pause,
@ -252,18 +252,15 @@ impl Inner {
#[cfg(test)]
mod tests {
use std::panic::{RefUnwindSafe, UnwindSafe};
use actix_utils::future::poll_fn;
use static_assertions::{assert_impl_all, assert_not_impl_any};
use super::*;
assert_impl_all!(Payload: Unpin);
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
assert_not_impl_any!(Payload: Send, Sync);
assert_impl_all!(Inner: Unpin, Send, Sync);
assert_not_impl_any!(Inner: UnwindSafe, RefUnwindSafe);
#[actix_rt::test]
async fn test_unread_data() {

View File

@ -13,6 +13,7 @@ use actix_service::{
};
use actix_utils::future::ready;
use futures_core::future::LocalBoxFuture;
use tracing::error;
use crate::{
body::{BoxBody, MessageBody},
@ -133,6 +134,7 @@ mod openssl {
U::InitError: fmt::Debug,
{
/// Create OpenSSL based service.
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
pub fn openssl(
self,
acceptor: SslAcceptor,
@ -195,6 +197,7 @@ mod rustls {
U::InitError: fmt::Debug,
{
/// Create Rustls based service.
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
pub fn rustls(
self,
config: ServerConfig,
@ -305,13 +308,13 @@ where
Box::pin(async move {
let expect = expect
.await
.map_err(|e| log::error!("Init http expect service error: {:?}", e))?;
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
let upgrade = match upgrade {
Some(upgrade) => {
let upgrade = upgrade
.await
.map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
Some(upgrade)
}
None => None,
@ -319,7 +322,7 @@ where
let service = service
.await
.map_err(|e| log::error!("Init http service error: {:?}", e))?;
.map_err(|e| error!("Init http service error: {:?}", e))?;
Ok(H1ServiceHandler::new(
cfg,
@ -357,7 +360,7 @@ where
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self._poll_ready(cx).map_err(|err| {
log::error!("HTTP/1 service readiness error: {:?}", err);
error!("HTTP/1 service readiness error: {:?}", err);
DispatchError::Service(err)
})
}

View File

@ -1,6 +1,7 @@
use std::{fmt, future::Future, pin::Pin, task::Context};
use actix_rt::time::{Instant, Sleep};
use tracing::trace;
#[derive(Debug)]
pub(super) enum TimerState {
@ -24,7 +25,7 @@ impl TimerState {
pub(super) fn set(&mut self, timer: Sleep, line: u32) {
if matches!(self, Self::Disabled) {
log::trace!("setting disabled timer from line {}", line);
trace!("setting disabled timer from line {}", line);
}
*self = Self::Active {
@ -39,11 +40,11 @@ impl TimerState {
pub(super) fn clear(&mut self, line: u32) {
if matches!(self, Self::Disabled) {
log::trace!("trying to clear a disabled timer from line {}", line);
trace!("trying to clear a disabled timer from line {}", line);
}
if matches!(self, Self::Inactive) {
log::trace!("trying to clear an inactive timer from line {}", line);
trace!("trying to clear an inactive timer from line {}", line);
}
*self = Self::Inactive;

View File

@ -19,13 +19,15 @@ use h2::{
server::{Connection, SendResponse},
Ping, PingPong,
};
use log::{error, trace};
use pin_project_lite::pin_project;
use tracing::{error, trace, warn};
use crate::{
body::{BodySize, BoxBody, MessageBody},
config::ServiceConfig,
header::{HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING},
header::{
HeaderName, HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING, UPGRADE,
},
service::HttpFlow,
Extensions, OnConnectData, Payload, Request, Response, ResponseHead,
};
@ -65,7 +67,7 @@ where
timer
})
.unwrap_or_else(|| Box::pin(sleep(dur))),
on_flight: false,
in_flight: false,
ping_pong: conn.ping_pong().unwrap(),
});
@ -82,9 +84,14 @@ where
}
struct H2PingPong {
timer: Pin<Box<Sleep>>,
on_flight: bool,
/// Handle to send ping frames from the peer.
ping_pong: PingPong,
/// True when a ping has been sent and is waiting for a reply.
in_flight: bool,
/// Timeout for pong response.
timer: Pin<Box<Sleep>>,
}
impl<T, S, B, X, U> Future for Dispatcher<T, S, B, X, U>
@ -141,7 +148,7 @@ where
DispatchError::SendResponse(err) => {
trace!("Error sending HTTP/2 response: {:?}", err)
}
DispatchError::SendData(err) => log::warn!("{:?}", err),
DispatchError::SendData(err) => warn!("{:?}", err),
DispatchError::ResponseBody(err) => {
error!("Response payload stream error: {:?}", err)
}
@ -150,26 +157,28 @@ where
});
}
Poll::Ready(None) => return Poll::Ready(Ok(())),
Poll::Pending => match this.ping_pong.as_mut() {
Some(ping_pong) => loop {
if ping_pong.on_flight {
// When have on flight ping pong. poll pong and and keep alive timer.
// on success pong received update keep alive timer to determine the next timing of
// ping pong.
if ping_pong.in_flight {
// When there is an in-flight ping-pong, poll pong and and keep-alive
// timer. On successful pong received, update keep-alive timer to
// determine the next timing of ping pong.
match ping_pong.ping_pong.poll_pong(cx)? {
Poll::Ready(_) => {
ping_pong.on_flight = false;
ping_pong.in_flight = false;
let dead_line = this.config.keep_alive_deadline().unwrap();
ping_pong.timer.as_mut().reset(dead_line.into());
}
Poll::Pending => {
return ping_pong.timer.as_mut().poll(cx).map(|_| Ok(()))
return ping_pong.timer.as_mut().poll(cx).map(|_| Ok(()));
}
}
} else {
// When there is no on flight ping pong. keep alive timer is used to wait for next
// timing of ping pong. Therefore at this point it serves as an interval instead.
// When there is no in-flight ping-pong, keep-alive timer is used to
// wait for next timing of ping-pong. Therefore, at this point it serves
// as an interval instead.
ready!(ping_pong.timer.as_mut().poll(cx));
ping_pong.ping_pong.send_ping(Ping::opaque())?;
@ -177,7 +186,7 @@ where
let dead_line = this.config.keep_alive_deadline().unwrap();
ping_pong.timer.as_mut().reset(dead_line.into());
ping_pong.on_flight = true;
ping_pong.in_flight = true;
}
},
None => return Poll::Pending,
@ -285,13 +294,13 @@ fn prepare_response(
_ => {}
}
let _ = match size {
BodySize::None | BodySize::Stream => None,
match size {
BodySize::None | BodySize::Stream => {}
BodySize::Sized(0) => {
#[allow(clippy::declare_interior_mutable_const)]
const HV_ZERO: HeaderValue = HeaderValue::from_static("0");
res.headers_mut().insert(CONTENT_LENGTH, HV_ZERO)
res.headers_mut().insert(CONTENT_LENGTH, HV_ZERO);
}
BodySize::Sized(len) => {
@ -300,19 +309,28 @@ fn prepare_response(
res.headers_mut().insert(
CONTENT_LENGTH,
HeaderValue::from_str(buf.format(*len)).unwrap(),
)
);
}
};
// copy headers
for (key, value) in head.headers.iter() {
match *key {
// TODO: consider skipping other headers according to:
match key {
// omit HTTP/1.x only headers according to:
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
// omit HTTP/1.x only headers
CONNECTION | TRANSFER_ENCODING => continue,
CONTENT_LENGTH if skip_len => continue,
DATE => has_date = true,
&CONNECTION | &TRANSFER_ENCODING | &UPGRADE => continue,
&CONTENT_LENGTH if skip_len => continue,
&DATE => has_date = true,
// omit HTTP/1.x only headers according to:
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
hdr if hdr == HeaderName::from_static("keep-alive")
|| hdr == HeaderName::from_static("proxy-connection") =>
{
continue
}
_ => {}
}

View File

@ -103,11 +103,9 @@ where
#[cfg(test)]
mod tests {
use std::panic::{RefUnwindSafe, UnwindSafe};
use static_assertions::assert_impl_all;
use super::*;
assert_impl_all!(Payload: Unpin, Send, Sync, UnwindSafe, RefUnwindSafe);
assert_impl_all!(Payload: Unpin, Send, Sync);
}

View File

@ -14,7 +14,7 @@ use actix_service::{
};
use actix_utils::future::ready;
use futures_core::{future::LocalBoxFuture, ready};
use log::error;
use tracing::{error, trace};
use crate::{
body::{BoxBody, MessageBody},
@ -117,6 +117,7 @@ mod openssl {
B: MessageBody + 'static,
{
/// Create OpenSSL based service.
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
pub fn openssl(
self,
acceptor: SslAcceptor,
@ -164,6 +165,7 @@ mod rustls {
B: MessageBody + 'static,
{
/// Create Rustls based service.
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
pub fn rustls(
self,
mut config: ServerConfig,
@ -355,7 +357,7 @@ where
}
Err(err) => {
log::trace!("H2 handshake error: {}", err);
trace!("H2 handshake error: {}", err);
Poll::Ready(Err(err))
}
},

View File

@ -12,7 +12,7 @@ use crate::header::{Charset, HTTP_VALUE};
/// - A character sequence representing the actual value (`value`), separated by single quotes.
///
/// It is defined in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExtendedValue {
/// The character set that is used to encode the `value` to a string.
pub charset: Charset,

View File

@ -147,7 +147,7 @@ mod tests {
// copy of encoding from actix-web headers
#[allow(clippy::enum_variant_names)] // allow Encoding prefix on EncodingExt
#[derive(Clone, PartialEq, Debug)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Encoding {
Chunked,
Brotli,

View File

@ -3,6 +3,7 @@
//! ## Crate Features
//! | Feature | Functionality |
//! | ------------------- | ------------------------------------------- |
//! | `http2` | HTTP/2 support via [h2]. |
//! | `openssl` | TLS support via [OpenSSL]. |
//! | `rustls` | TLS support via [rustls]. |
//! | `compress-brotli` | Payload compression support: Brotli. |
@ -10,6 +11,7 @@
//! | `compress-zstd` | Payload compression support: Zstd. |
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
//!
//! [h2]: https://crates.io/crates/h2
//! [OpenSSL]: https://crates.io/crates/openssl
//! [rustls]: https://crates.io/crates/rustls
//! [trust-dns]: https://crates.io/crates/trust-dns
@ -23,6 +25,7 @@
)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_cfg))]
pub use ::http::{uri, uri::Uri};
pub use ::http::{Method, StatusCode, Version};
@ -37,6 +40,7 @@ pub mod error;
mod extensions;
pub mod h1;
#[cfg(feature = "http2")]
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
pub mod h2;
pub mod header;
mod helpers;
@ -51,6 +55,7 @@ mod responses;
mod service;
pub mod test;
#[cfg(feature = "ws")]
#[cfg_attr(docsrs, doc(cfg(feature = "ws")))]
pub mod ws;
pub use self::builder::HttpServiceBuilder;
@ -67,6 +72,9 @@ pub use self::payload::{BoxedPayloadStream, Payload, PayloadStream};
pub use self::requests::{Request, RequestHead, RequestHeadType};
pub use self::responses::{Response, ResponseBuilder, ResponseHead};
pub use self::service::HttpService;
#[cfg(any(feature = "openssl", feature = "rustls"))]
#[cfg_attr(docsrs, doc(cfg(any(feature = "openssl", feature = "rustls"))))]
pub use self::service::TlsAcceptorConfig;
/// A major HTTP protocol version.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]

View File

@ -3,7 +3,7 @@ use std::{cell::RefCell, ops, rc::Rc};
use bitflags::bitflags;
/// Represents various types of connection
#[derive(Copy, Clone, PartialEq, Debug)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ConnectionType {
/// Close connection after response.
Close,

View File

@ -13,7 +13,8 @@ use crate::error::PayloadError;
/// A boxed payload stream.
pub type BoxedPayloadStream = Pin<Box<dyn Stream<Item = Result<Bytes, PayloadError>>>>;
#[deprecated(since = "4.0.0", note = "Renamed to `BoxedPayloadStream`.")]
#[doc(hidden)]
#[deprecated(since = "3.0.0", note = "Renamed to `BoxedPayloadStream`.")]
pub type PayloadStream = BoxedPayloadStream;
#[cfg(not(feature = "http2"))]
@ -96,12 +97,10 @@ where
#[cfg(test)]
mod tests {
use std::panic::{RefUnwindSafe, UnwindSafe};
use static_assertions::{assert_impl_all, assert_not_impl_any};
use super::*;
assert_impl_all!(Payload: Unpin);
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
assert_not_impl_any!(Payload: Send, Sync);
}

View File

@ -144,7 +144,7 @@ impl ResponseBuilder {
self
}
/// Set connection type to Upgrade
/// Set connection type to `Upgrade`.
#[inline]
pub fn upgrade<V>(&mut self, value: V) -> &mut Self
where
@ -161,7 +161,7 @@ impl ResponseBuilder {
self
}
/// Force close connection, even if it is marked as keep-alive
/// Force-close connection, even if it is marked as keep-alive.
#[inline]
pub fn force_close(&mut self) -> &mut Self {
if let Some(parts) = self.inner() {

View File

@ -237,7 +237,7 @@ mod tests {
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream
stream
.write_all(b"GET /camel HTTP/1.1\r\nConnection: Close\r\n\r\n")
.unwrap();
let mut data = vec![];
@ -251,7 +251,7 @@ mod tests {
assert!(memmem::find(&data, b"content-length").is_none());
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream
stream
.write_all(b"GET /lower HTTP/1.1\r\nConnection: Close\r\n\r\n")
.unwrap();
let mut data = vec![];

View File

@ -285,6 +285,24 @@ impl From<&'static [u8]> for Response<&'static [u8]> {
}
}
impl From<Vec<u8>> for Response<Vec<u8>> {
fn from(val: Vec<u8>) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<&Vec<u8>> for Response<Vec<u8>> {
fn from(val: &Vec<u8>) -> Self {
let mut res = Response::with_body(StatusCode::OK, val.clone());
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<String> for Response<String> {
fn from(val: String) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);

View File

@ -15,6 +15,7 @@ use actix_service::{
};
use futures_core::{future::LocalBoxFuture, ready};
use pin_project_lite::pin_project;
use tracing::error;
use crate::{
body::{BoxBody, MessageBody},
@ -180,6 +181,25 @@ where
}
}
/// Configuration options used when accepting TLS connection.
#[cfg(any(feature = "openssl", feature = "rustls"))]
#[cfg_attr(docsrs, doc(cfg(any(feature = "openssl", feature = "rustls"))))]
#[derive(Debug, Default)]
pub struct TlsAcceptorConfig {
pub(crate) handshake_timeout: Option<std::time::Duration>,
}
#[cfg(any(feature = "openssl", feature = "rustls"))]
impl TlsAcceptorConfig {
/// Set TLS handshake timeout duration.
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
Self {
handshake_timeout: Some(dur),
// ..self
}
}
}
#[cfg(feature = "openssl")]
mod openssl {
use actix_service::ServiceFactoryExt as _;
@ -219,6 +239,7 @@ mod openssl {
U::InitError: fmt::Debug,
{
/// Create OpenSSL based service.
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
pub fn openssl(
self,
acceptor: SslAcceptor,
@ -229,7 +250,29 @@ mod openssl {
Error = TlsError<SslError, DispatchError>,
InitError = (),
> {
Acceptor::new(acceptor)
self.openssl_with_config(acceptor, TlsAcceptorConfig::default())
}
/// Create OpenSSL based service with custom TLS acceptor configuration.
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
pub fn openssl_with_config(
self,
acceptor: SslAcceptor,
tls_acceptor_config: TlsAcceptorConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<SslError, DispatchError>,
InitError = (),
> {
let mut acceptor = Acceptor::new(acceptor);
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
acceptor.set_handshake_timeout(handshake_timeout);
}
acceptor
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
@ -291,9 +334,26 @@ mod rustls {
U::InitError: fmt::Debug,
{
/// Create Rustls based service.
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
pub fn rustls(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
self.rustls_with_config(config, TlsAcceptorConfig::default())
}
/// Create Rustls based service with custom TLS acceptor configuration.
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
pub fn rustls_with_config(
self,
mut config: ServerConfig,
tls_acceptor_config: TlsAcceptorConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
@ -305,7 +365,13 @@ mod rustls {
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
Acceptor::new(config)
let mut acceptor = Acceptor::new(config);
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
acceptor.set_handshake_timeout(handshake_timeout);
}
acceptor
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
@ -369,13 +435,13 @@ where
Box::pin(async move {
let expect = expect
.await
.map_err(|e| log::error!("Init http expect service error: {:?}", e))?;
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
let upgrade = match upgrade {
Some(upgrade) => {
let upgrade = upgrade
.await
.map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
Some(upgrade)
}
None => None,
@ -383,7 +449,7 @@ where
let service = service
.await
.map_err(|e| log::error!("Init http service error: {:?}", e))?;
.map_err(|e| error!("Init http service error: {:?}", e))?;
Ok(HttpServiceHandler::new(
cfg,
@ -490,7 +556,7 @@ where
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self._poll_ready(cx).map_err(|err| {
log::error!("HTTP service readiness error: {:?}", err);
error!("HTTP service readiness error: {:?}", err);
DispatchError::Service(err)
})
}
@ -666,7 +732,7 @@ where
self.poll(cx)
}
Err(err) => {
log::trace!("H2 handshake error: {}", err);
tracing::trace!("H2 handshake error: {}", err);
Poll::Ready(Err(err))
}
}

View File

@ -19,29 +19,7 @@ use crate::{
Request,
};
/// Test `Request` builder
///
/// ```ignore
/// # use http::{header, StatusCode};
/// # use actix_web::*;
/// use actix_web::test::TestRequest;
///
/// fn index(req: &HttpRequest) -> Response {
/// if let Some(hdr) = req.headers().get(header::CONTENT_TYPE) {
/// Response::Ok().into()
/// } else {
/// Response::BadRequest().into()
/// }
/// }
///
/// let resp = TestRequest::default().insert_header("content-type", "text/plain")
/// .run(&index)
/// .unwrap();
/// assert_eq!(resp.status(), StatusCode::OK);
///
/// let resp = TestRequest::default().run(&index).unwrap();
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
/// ```
/// Test `Request` builder.
pub struct TestRequest(Option<Inner>);
struct Inner {

View File

@ -2,6 +2,7 @@ use actix_codec::{Decoder, Encoder};
use bitflags::bitflags;
use bytes::{Bytes, BytesMut};
use bytestring::ByteString;
use tracing::error;
use super::{
frame::Parser,
@ -10,7 +11,7 @@ use super::{
};
/// A WebSocket message.
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq)]
pub enum Message {
/// Text message.
Text(ByteString),
@ -35,7 +36,7 @@ pub enum Message {
}
/// A WebSocket frame.
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq)]
pub enum Frame {
/// Text frame. Note that the codec does not validate UTF-8 encoding.
Text(Bytes),
@ -57,7 +58,7 @@ pub enum Frame {
}
/// A WebSocket continuation item.
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq)]
pub enum Item {
FirstText(Bytes),
FirstBinary(Bytes),
@ -253,7 +254,7 @@ impl Decoder for Codec {
}
}
_ => {
log::error!("Unfinished fragment {:?}", opcode);
error!("Unfinished fragment {:?}", opcode);
Err(ProtocolError::ContinuationFragment(opcode))
}
};

View File

@ -73,8 +73,8 @@ mod inner {
use actix_service::{IntoService, Service};
use futures_core::stream::Stream;
use local_channel::mpsc;
use log::debug;
use pin_project_lite::pin_project;
use tracing::debug;
use actix_codec::{AsyncRead, AsyncWrite, Decoder, Encoder, Framed};

View File

@ -1,7 +1,7 @@
use std::convert::TryFrom;
use bytes::{Buf, BufMut, BytesMut};
use log::debug;
use tracing::debug;
use super::{
mask::apply_mask,
@ -17,7 +17,6 @@ impl Parser {
fn parse_metadata(
src: &[u8],
server: bool,
max_size: usize,
) -> Result<Option<(usize, bool, OpCode, usize, Option<[u8; 4]>)>, ProtocolError> {
let chunk_len = src.len();
@ -60,20 +59,12 @@ impl Parser {
return Ok(None);
}
let len = u64::from_be_bytes(TryFrom::try_from(&src[idx..idx + 8]).unwrap());
if len > max_size as u64 {
return Err(ProtocolError::Overflow);
}
idx += 8;
len as usize
} else {
len as usize
};
// check for max allowed size
if length > max_size {
return Err(ProtocolError::Overflow);
}
let mask = if server {
if chunk_len < idx + 4 {
return Ok(None);
@ -98,8 +89,7 @@ impl Parser {
max_size: usize,
) -> Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError> {
// try to parse ws frame metadata
let (idx, finished, opcode, length, mask) =
match Parser::parse_metadata(src, server, max_size)? {
let (idx, finished, opcode, length, mask) = match Parser::parse_metadata(src, server)? {
None => return Ok(None),
Some(res) => res,
};
@ -112,6 +102,13 @@ impl Parser {
// remove prefix
src.advance(idx);
// check for max allowed size
if length > max_size {
// drop the payload
src.advance(length);
return Err(ProtocolError::Overflow);
}
// no need for body
if length == 0 {
return Ok(Some((finished, opcode, None)));
@ -339,6 +336,30 @@ mod tests {
}
}
#[test]
fn test_parse_frame_max_size_recoverability() {
let mut buf = BytesMut::new();
// The first text frame with length == 2, payload doesn't matter.
buf.extend(&[0b0000_0001u8, 0b0000_0010u8, 0b0000_0000u8, 0b0000_0000u8]);
// Next binary frame with length == 2 and payload == `[0x1111_1111u8, 0x1111_1111u8]`.
buf.extend(&[0b0000_0010u8, 0b0000_0010u8, 0b1111_1111u8, 0b1111_1111u8]);
assert_eq!(buf.len(), 8);
assert!(matches!(
Parser::parse(&mut buf, false, 1),
Err(ProtocolError::Overflow)
));
assert_eq!(buf.len(), 4);
let frame = extract(Parser::parse(&mut buf, false, 2));
assert!(!frame.finished);
assert_eq!(frame.opcode, OpCode::Binary);
assert_eq!(
frame.payload,
Bytes::from(vec![0b1111_1111u8, 0b1111_1111u8])
);
assert_eq!(buf.len(), 0);
}
#[test]
fn test_ping_frame() {
let mut buf = BytesMut::new();

View File

@ -47,40 +47,6 @@ pub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {
mod tests {
use super::*;
// legacy test from old apply mask test. kept for now for back compat test.
// TODO: remove it and favor the other test.
#[test]
fn test_apply_mask_legacy() {
let mask = [0x6d, 0xb6, 0xb2, 0x80];
let unmasked = vec![
0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0x74, 0xf9,
0x12, 0x03,
];
// Check masking with proper alignment.
{
let mut masked = unmasked.clone();
apply_mask_fallback(&mut masked, mask);
let mut masked_fast = unmasked.clone();
apply_mask(&mut masked_fast, mask);
assert_eq!(masked, masked_fast);
}
// Check masking without alignment.
{
let mut masked = unmasked.clone();
apply_mask_fallback(&mut masked[1..], mask);
let mut masked_fast = unmasked;
apply_mask(&mut masked_fast[1..], mask);
assert_eq!(masked, masked_fast);
}
}
#[test]
fn test_apply_mask() {
let mask = [0x6d, 0xb6, 0xb2, 0x80];

View File

@ -67,7 +67,7 @@ pub enum ProtocolError {
}
/// WebSocket handshake errors
#[derive(Debug, Clone, Copy, PartialEq, Display, Error)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, Error)]
pub enum HandshakeError {
/// Only get method is allowed.
#[display(fmt = "Method not allowed.")]

View File

@ -3,6 +3,8 @@ use std::{
fmt,
};
use tracing::error;
/// Operation codes defined in [RFC 6455 §11.8].
///
/// [RFC 6455]: https://datatracker.ietf.org/doc/html/rfc6455#section-11.8
@ -58,7 +60,7 @@ impl From<OpCode> for u8 {
Ping => 9,
Pong => 10,
Bad => {
log::error!("Attempted to convert invalid opcode to u8. This is a bug.");
error!("Attempted to convert invalid opcode to u8. This is a bug.");
8 // if this somehow happens, a close frame will help us tear down quickly
}
}

View File

@ -2,13 +2,13 @@
extern crate tls_openssl as openssl;
use std::{convert::Infallible, io};
use std::{convert::Infallible, io, time::Duration};
use actix_http::{
body::{BodyStream, BoxBody, SizedStream},
error::PayloadError,
header::{self, HeaderValue},
Error, HttpService, Method, Request, Response, StatusCode, Version,
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
};
use actix_http_test::test_server;
use actix_service::{fn_service, ServiceFactoryExt};
@ -66,7 +66,7 @@ fn tls_config() -> SslAcceptor {
}
#[actix_rt::test]
async fn test_h2() -> io::Result<()> {
async fn h2() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok()))
@ -81,7 +81,7 @@ async fn test_h2() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h2_1() -> io::Result<()> {
async fn h2_1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.finish(|req: Request| {
@ -89,7 +89,10 @@ async fn test_h2_1() -> io::Result<()> {
assert_eq!(req.version(), Version::HTTP_2);
ok::<_, Error>(Response::ok())
})
.openssl(tls_config())
.openssl_with_config(
tls_config(),
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
)
.map_err(|_| ())
})
.await;
@ -100,7 +103,7 @@ async fn test_h2_1() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h2_body() -> io::Result<()> {
async fn h2_body() -> io::Result<()> {
let data = "HELLOWORLD".to_owned().repeat(64 * 1024); // 640 KiB
let mut srv = test_server(move || {
HttpService::build()
@ -122,7 +125,7 @@ async fn test_h2_body() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h2_content_length() {
async fn h2_content_length() {
let srv = test_server(move || {
HttpService::build()
.h2(|req: Request| {
@ -164,7 +167,7 @@ async fn test_h2_content_length() {
}
#[actix_rt::test]
async fn test_h2_headers() {
async fn h2_headers() {
let data = STR.repeat(10);
let data2 = data.clone();
@ -229,7 +232,7 @@ const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn test_h2_body2() {
async fn h2_body2() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -247,7 +250,7 @@ async fn test_h2_body2() {
}
#[actix_rt::test]
async fn test_h2_head_empty() {
async fn h2_head_empty() {
let mut srv = test_server(move || {
HttpService::build()
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -271,7 +274,7 @@ async fn test_h2_head_empty() {
}
#[actix_rt::test]
async fn test_h2_head_binary() {
async fn h2_head_binary() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -294,7 +297,7 @@ async fn test_h2_head_binary() {
}
#[actix_rt::test]
async fn test_h2_head_binary2() {
async fn h2_head_binary2() {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -313,7 +316,7 @@ async fn test_h2_head_binary2() {
}
#[actix_rt::test]
async fn test_h2_body_length() {
async fn h2_body_length() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| async {
@ -338,7 +341,7 @@ async fn test_h2_body_length() {
}
#[actix_rt::test]
async fn test_h2_body_chunked_explicit() {
async fn h2_body_chunked_explicit() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
@ -366,7 +369,7 @@ async fn test_h2_body_chunked_explicit() {
}
#[actix_rt::test]
async fn test_h2_response_http_error_handling() {
async fn h2_response_http_error_handling() {
let mut srv = test_server(move || {
HttpService::build()
.h2(fn_service(|_| {
@ -406,7 +409,7 @@ impl From<BadRequest> for Response<BoxBody> {
}
#[actix_rt::test]
async fn test_h2_service_error() {
async fn h2_service_error() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| err::<Response<BoxBody>, _>(BadRequest))
@ -424,7 +427,7 @@ async fn test_h2_service_error() {
}
#[actix_rt::test]
async fn test_h2_on_connect() {
async fn h2_on_connect() {
let srv = test_server(move || {
HttpService::build()
.on_connect_ext(|_, data| {

View File

@ -8,13 +8,14 @@ use std::{
net::{SocketAddr, TcpStream as StdTcpStream},
sync::Arc,
task::Poll,
time::Duration,
};
use actix_http::{
body::{BodyStream, BoxBody, SizedStream},
error::PayloadError,
header::{self, HeaderName, HeaderValue},
Error, HttpService, Method, Request, Response, StatusCode, Version,
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
};
use actix_http_test::test_server;
use actix_rt::pin;
@ -106,7 +107,7 @@ pub fn get_negotiated_alpn_protocol(
}
#[actix_rt::test]
async fn test_h1() -> io::Result<()> {
async fn h1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h1(|_| ok::<_, Error>(Response::ok()))
@ -120,7 +121,7 @@ async fn test_h1() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h2() -> io::Result<()> {
async fn h2() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok()))
@ -134,7 +135,7 @@ async fn test_h2() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h1_1() -> io::Result<()> {
async fn h1_1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h1(|req: Request| {
@ -152,7 +153,7 @@ async fn test_h1_1() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h2_1() -> io::Result<()> {
async fn h2_1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.finish(|req: Request| {
@ -160,7 +161,10 @@ async fn test_h2_1() -> io::Result<()> {
assert_eq!(req.version(), Version::HTTP_2);
ok::<_, Error>(Response::ok())
})
.rustls(tls_config())
.rustls_with_config(
tls_config(),
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
)
})
.await;
@ -170,7 +174,7 @@ async fn test_h2_1() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h2_body1() -> io::Result<()> {
async fn h2_body1() -> io::Result<()> {
let data = "HELLOWORLD".to_owned().repeat(64 * 1024);
let mut srv = test_server(move || {
HttpService::build()
@ -191,7 +195,7 @@ async fn test_h2_body1() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_h2_content_length() {
async fn h2_content_length() {
let srv = test_server(move || {
HttpService::build()
.h2(|req: Request| {
@ -212,6 +216,7 @@ async fn test_h2_content_length() {
let value = HeaderValue::from_static("0");
{
#[allow(clippy::single_element_loop)]
for &i in &[0] {
let req = srv
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
@ -226,6 +231,7 @@ async fn test_h2_content_length() {
// assert_eq!(response.headers().get(&header), None);
}
#[allow(clippy::single_element_loop)]
for &i in &[1] {
let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i)))
@ -245,7 +251,7 @@ async fn test_h2_content_length() {
}
#[actix_rt::test]
async fn test_h2_headers() {
async fn h2_headers() {
let data = STR.repeat(10);
let data2 = data.clone();
@ -309,7 +315,7 @@ const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn test_h2_body2() {
async fn h2_body2() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -326,7 +332,7 @@ async fn test_h2_body2() {
}
#[actix_rt::test]
async fn test_h2_head_empty() {
async fn h2_head_empty() {
let mut srv = test_server(move || {
HttpService::build()
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -352,7 +358,7 @@ async fn test_h2_head_empty() {
}
#[actix_rt::test]
async fn test_h2_head_binary() {
async fn h2_head_binary() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -377,7 +383,7 @@ async fn test_h2_head_binary() {
}
#[actix_rt::test]
async fn test_h2_head_binary2() {
async fn h2_head_binary2() {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
@ -398,7 +404,7 @@ async fn test_h2_head_binary2() {
}
#[actix_rt::test]
async fn test_h2_body_length() {
async fn h2_body_length() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
@ -420,7 +426,7 @@ async fn test_h2_body_length() {
}
#[actix_rt::test]
async fn test_h2_body_chunked_explicit() {
async fn h2_body_chunked_explicit() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
@ -447,7 +453,7 @@ async fn test_h2_body_chunked_explicit() {
}
#[actix_rt::test]
async fn test_h2_response_http_error_handling() {
async fn h2_response_http_error_handling() {
let mut srv = test_server(move || {
HttpService::build()
.h2(fn_factory_with_config(|_: ()| {
@ -486,7 +492,7 @@ impl From<BadRequest> for Response<BoxBody> {
}
#[actix_rt::test]
async fn test_h2_service_error() {
async fn h2_service_error() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| err::<Response<BoxBody>, _>(BadRequest))
@ -503,7 +509,7 @@ async fn test_h2_service_error() {
}
#[actix_rt::test]
async fn test_h1_service_error() {
async fn h1_service_error() {
let mut srv = test_server(move || {
HttpService::build()
.h1(|_| err::<Response<BoxBody>, _>(BadRequest))
@ -524,7 +530,7 @@ const HTTP1_1_ALPN_PROTOCOL: &[u8] = b"http/1.1";
const CUSTOM_ALPN_PROTOCOL: &[u8] = b"custom";
#[actix_rt::test]
async fn test_alpn_h1() -> io::Result<()> {
async fn alpn_h1() -> io::Result<()> {
let srv = test_server(move || {
let mut config = tls_config();
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
@ -546,7 +552,7 @@ async fn test_alpn_h1() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_alpn_h2() -> io::Result<()> {
async fn alpn_h2() -> io::Result<()> {
let srv = test_server(move || {
let mut config = tls_config();
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
@ -572,7 +578,7 @@ async fn test_alpn_h2() -> io::Result<()> {
}
#[actix_rt::test]
async fn test_alpn_h2_1() -> io::Result<()> {
async fn alpn_h2_1() -> io::Result<()> {
let srv = test_server(move || {
let mut config = tls_config();
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());

View File

@ -850,7 +850,8 @@ async fn not_modified_spec_h1() {
Some(&header::HeaderValue::from_static("4")),
);
// server does not prevent payload from being sent but clients may choose not to read it
// TODO: this is probably a bug, especially since CL header can differ in length from the body
// TODO: this is probably a bug in the client, especially since CL header can differ in length
// from the body
assert!(!srv.load_body(res).await.unwrap().is_empty());
// TODO: add stream response tests

View File

@ -1,6 +1,11 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
## 0.4.0 - 2022-02-25
- No significant changes since `0.4.0-beta.13`.
## 0.4.0-beta.13 - 2022-01-31

View File

@ -1,6 +1,6 @@
[package]
name = "actix-multipart"
version = "0.4.0-beta.13"
version = "0.4.0"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Multipart form support for Actix Web"
keywords = ["http", "web", "framework", "async", "futures"]
@ -14,8 +14,8 @@ name = "actix_multipart"
path = "src/lib.rs"
[dependencies]
actix-utils = "3.0.0"
actix-web = { version = "4.0.0-rc.1", default-features = false }
actix-utils = "3"
actix-web = { version = "4", default-features = false }
bytes = "1"
derive_more = "0.99.5"
@ -28,7 +28,7 @@ twoway = "0.2"
[dev-dependencies]
actix-rt = "2.2"
actix-http = "3.0.0-rc.1"
actix-http = "3"
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
tokio = { version = "1.8.4", features = ["sync"] }
tokio-stream = "0.1"

View File

@ -3,11 +3,11 @@
> Multipart form support for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.13)](https://docs.rs/actix-multipart/0.4.0-beta.13)
[![Version](https://img.shields.io/badge/rustc-1.54+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0)](https://docs.rs/actix-multipart/0.4.0)
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.13/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.13)
[![dependency status](https://deps.rs/crate/actix-multipart/0.4.0/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0)
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@ -1,8 +1,80 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
## 0.5.0 - 2022-02-22
### Added
- Add `Path::as_str`. [#2590]
- Add `ResourceDef::set_name`. [#373][net#373]
- Add `RouterBuilder::push`. [#2612]
- Implement `IntoPatterns` for `bytestring::ByteString`. [#372][net#372]
- Introduce `ResourceDef::join`. [#380][net#380]
- Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373][net#373]
- `Resource` is now implemented for `&mut Path<_>` and `RefMut<Path<_>>`. [#2568]
- Support `build_resource_path` on multi-pattern resources. [#2356]
- Support multi-pattern prefixes and joins. [#2356]
### Changed
- Change signature of `ResourceDef::capture_match_info_fn` to remove `user_data` parameter. [#2612]
- Deprecate `Path::path`. [#2590]
- Disallow prefix routes with tail segments. [#379][net#379]
- Enforce path separators on dynamic prefixes. [#378][net#378]
- Minimum supported Rust version (MSRV) is now 1.54.
- Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
- Prefix segments with trailing slashes define a trailing empty segment. [#2355]
- `Quoter::requote` now returns `Option<Vec<u8>>`. [#2613]
- Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372][net#372]
- Rename `Path::{len => segment_count}` to be more descriptive of its purpose. [#370][net#370]
- Rename `ResourceDef::{is_prefix_match => find_match}`. [#373][net#373]
- Rename `ResourceDef::{match_path => capture_match_info}`. [#373][net#373]
- Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373][net#373]
- Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371][net#371]
- Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371][net#371]
- Rename `Router::{*_checked => *_fn}`. [#373][net#373]
- Replace `Option<U>` with `U` in `Router` API. [#2612]
- `Resource` trait now uses an associated type, `Path`, instead of a generic parameter. [#2568]
- `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356]
- `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373][net#373]
- Return type of `ResourceDef::name` is now `Option<&str>`. [#373][net#373]
- Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373][net#373]
### Fixed
- Fix `ResourceDef`'s `PartialEq` implementation. [#373][net#373]
- Fix segment interpolation leaving `Path` in unintended state after matching. [#368][net#368]
- Improve malformed path error message. [#384][net#384]
- `PathDeserializer` now decodes all percent encoded characters in dynamic segments. [#2566]
- Relax bounds on `Router::recognize*` and `ResourceDef::capture_match_info`. [#2612]
- Static patterns in multi-patterns are no longer interpreted as regex. [#366][net#366]
### Removed
- `ResourceDef::name_mut`. [#373][net#373]
- Unused `ResourceInfo`. [#2612]
[#2355]: https://github.com/actix/actix-web/pull/2355
[#2356]: https://github.com/actix/actix-web/pull/2356
[#2566]: https://github.com/actix/actix-net/pull/2566
[#2568]: https://github.com/actix/actix-web/pull/2568
[#2590]: https://github.com/actix/actix-web/pull/2590
[#2612]: https://github.com/actix/actix-web/pull/2612
[#2613]: https://github.com/actix/actix-web/pull/2613
[net#366]: https://github.com/actix/actix-net/pull/366
[net#368]: https://github.com/actix/actix-net/pull/368
[net#368]: https://github.com/actix/actix-net/pull/368
[net#370]: https://github.com/actix/actix-net/pull/370
[net#371]: https://github.com/actix/actix-net/pull/371
[net#372]: https://github.com/actix/actix-net/pull/372
[net#373]: https://github.com/actix/actix-net/pull/373
[net#378]: https://github.com/actix/actix-net/pull/378
[net#379]: https://github.com/actix/actix-net/pull/379
[net#380]: https://github.com/actix/actix-net/pull/380
[net#384]: https://github.com/actix/actix-net/pull/384
<details>
<summary>0.5.0 Pre-Releases</summary>
## 0.5.0-rc.3 - 2022-01-31
- Remove unused `ResourceInfo`. [#2612]
- Add `RouterBuilder::push`. [#2612]
@ -41,10 +113,10 @@
## 0.5.0-beta.2 - 2021-09-09
- Introduce `ResourceDef::join`. [#380]
- Disallow prefix routes with tail segments. [#379]
- Enforce path separators on dynamic prefixes. [#378]
- Improve malformed path error message. [#384]
- Introduce `ResourceDef::join`. [#380][net#380]
- Disallow prefix routes with tail segments. [#379][net#379]
- Enforce path separators on dynamic prefixes. [#378][net#378]
- Improve malformed path error message. [#384][net#384]
- Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
- Prefix segments with trailing slashes define a trailing empty segment. [#2355]
- Support multi-pattern prefixes and joins. [#2356]
@ -52,52 +124,54 @@
- Support `build_resource_path` on multi-pattern resources. [#2356]
- Minimum supported Rust version (MSRV) is now 1.51.
[#378]: https://github.com/actix/actix-net/pull/378
[#379]: https://github.com/actix/actix-net/pull/379
[#380]: https://github.com/actix/actix-net/pull/380
[#384]: https://github.com/actix/actix-net/pull/384
[net#378]: https://github.com/actix/actix-net/pull/378
[net#379]: https://github.com/actix/actix-net/pull/379
[net#380]: https://github.com/actix/actix-net/pull/380
[net#384]: https://github.com/actix/actix-net/pull/384
[#2355]: https://github.com/actix/actix-web/pull/2355
[#2356]: https://github.com/actix/actix-web/pull/2356
## 0.5.0-beta.1 - 2021-07-20
- Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366]
- Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373]
- Fix segment interpolation leaving `Path` in unintended state after matching. [#368]
- Fix `ResourceDef` `PartialEq` implementation. [#373]
- Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372]
- Implement `IntoPatterns` for `bytestring::ByteString`. [#372]
- Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370]
- Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371]
- `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373]
- Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371]
- Rename `ResourceDef::{is_prefix_match => find_match}`. [#373]
- Rename `ResourceDef::{match_path => capture_match_info}`. [#373]
- Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373]
- Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373]
- Rename `Router::{*_checked => *_fn}`. [#373]
- Return type of `ResourceDef::name` is now `Option<&str>`. [#373]
- Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373]
- Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366][net#366]
- Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373][net#373]
- Fix segment interpolation leaving `Path` in unintended state after matching. [#368][net#368]
- Fix `ResourceDef` `PartialEq` implementation. [#373][net#373]
- Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372][net#372]
- Implement `IntoPatterns` for `bytestring::ByteString`. [#372][net#372]
- Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370][net#370]
- Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371][net#371]
- `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373][net#373]
- Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371][net#371]
- Rename `ResourceDef::{is_prefix_match => find_match}`. [#373][net#373]
- Rename `ResourceDef::{match_path => capture_match_info}`. [#373][net#373]
- Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373][net#373]
- Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373][net#373]
- Rename `Router::{*_checked => *_fn}`. [#373][net#373]
- Return type of `ResourceDef::name` is now `Option<&str>`. [#373][net#373]
- Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373][net#373]
[#368]: https://github.com/actix/actix-net/pull/368
[#366]: https://github.com/actix/actix-net/pull/366
[#368]: https://github.com/actix/actix-net/pull/368
[#370]: https://github.com/actix/actix-net/pull/370
[#371]: https://github.com/actix/actix-net/pull/371
[#372]: https://github.com/actix/actix-net/pull/372
[#373]: https://github.com/actix/actix-net/pull/373
[net#368]: https://github.com/actix/actix-net/pull/368
[net#366]: https://github.com/actix/actix-net/pull/366
[net#368]: https://github.com/actix/actix-net/pull/368
[net#370]: https://github.com/actix/actix-net/pull/370
[net#371]: https://github.com/actix/actix-net/pull/371
[net#372]: https://github.com/actix/actix-net/pull/372
[net#373]: https://github.com/actix/actix-net/pull/373
</details>
## 0.4.0 - 2021-06-06
- When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357]
- Path tail patterns now match new lines (`\n`) in request URL. [#360]
- Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359]
- Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345]
- When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357][net#357]
- Path tail patterns now match new lines (`\n`) in request URL. [#360][net#360]
- Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359][net#359]
- Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345][net#345]
[#345]: https://github.com/actix/actix-net/pull/345
[#357]: https://github.com/actix/actix-net/pull/357
[#359]: https://github.com/actix/actix-net/pull/359
[#360]: https://github.com/actix/actix-net/pull/360
[net#345]: https://github.com/actix/actix-net/pull/345
[net#357]: https://github.com/actix/actix-net/pull/357
[net#359]: https://github.com/actix/actix-net/pull/359
[net#360]: https://github.com/actix/actix-net/pull/360
## 0.3.0 - 2019-12-31
@ -105,15 +179,15 @@
## 0.2.7 - 2021-02-06
- Add `Router::recognize_checked` [#247]
- Add `Router::recognize_checked` [#247][net#247]
[#247]: https://github.com/actix/actix-net/pull/247
[net#247]: https://github.com/actix/actix-net/pull/247
## 0.2.6 - 2021-01-09
- Use `bytestring` version range compatible with Bytes v1.0. [#246]
- Use `bytestring` version range compatible with Bytes v1.0. [#246][net#246]
[#246]: https://github.com/actix/actix-net/pull/246
[net#246]: https://github.com/actix/actix-net/pull/246
## 0.2.5 - 2020-09-20

View File

@ -1,6 +1,6 @@
[package]
name = "actix-router"
version = "0.5.0-rc.3"
version = "0.5.0"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
@ -21,18 +21,21 @@ default = ["http"]
[dependencies]
bytestring = ">=0.1.5, <2"
firestorm = "0.5"
http = { version = "0.2.3", optional = true }
log = "0.4"
http = { version = "0.2.5", optional = true }
regex = "1.5"
serde = "1"
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
[dev-dependencies]
criterion = { version = "0.3", features = ["html_reports"] }
firestorm = { version = "0.5", features = ["enable_system_time"] }
http = "0.2.5"
serde = { version = "1", features = ["derive"] }
percent-encoding = "2.1"
[[bench]]
name = "router"
harness = false
[[bench]]
name = "quoter"
harness = false

View File

@ -0,0 +1,52 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use std::borrow::Cow;
fn compare_quoters(c: &mut Criterion) {
let mut group = c.benchmark_group("Compare Quoters");
let quoter = actix_router::Quoter::new(b"", b"");
let path_quoted = (0..=0x7f)
.map(|c| format!("%{:02X}", c))
.collect::<String>();
let path_unquoted = ('\u{00}'..='\u{7f}').collect::<String>();
group.bench_function("quoter_unquoted", |b| {
b.iter(|| {
for _ in 0..10 {
black_box(quoter.requote(path_unquoted.as_bytes()));
}
});
});
group.bench_function("percent_encode_unquoted", |b| {
b.iter(|| {
for _ in 0..10 {
let decode = percent_encoding::percent_decode(path_unquoted.as_bytes());
black_box(Into::<Cow<'_, [u8]>>::into(decode));
}
});
});
group.bench_function("quoter_quoted", |b| {
b.iter(|| {
for _ in 0..10 {
black_box(quoter.requote(path_quoted.as_bytes()));
}
});
});
group.bench_function("percent_encode_quoted", |b| {
b.iter(|| {
for _ in 0..10 {
let decode = percent_encoding::percent_decode(path_quoted.as_bytes());
black_box(Into::<Cow<'_, [u8]>>::into(decode));
}
});
});
group.finish();
}
criterion_group!(benches, compare_quoters);
criterion_main!(benches);

View File

@ -145,7 +145,8 @@ macro_rules! register {
concat!("/user/keys"),
concat!("/user/keys/", $p1),
];
std::array::IntoIter::new(arr)
IntoIterator::into_iter(arr)
}};
}
@ -158,7 +159,7 @@ fn call() -> impl Iterator<Item = &'static str> {
"/repos/rust-lang/rust/releases/1.51.0",
];
std::array::IntoIter::new(arr)
IntoIterator::into_iter(arr)
}
fn compare_routers(c: &mut Criterion) {

View File

@ -1,169 +0,0 @@
macro_rules! register {
(brackets) => {{
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
}};
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
let arr = [
concat!("/authorizations"),
concat!("/authorizations/", $p1),
concat!("/applications/", $p1, "/tokens/", $p2),
concat!("/events"),
concat!("/repos/", $p1, "/", $p2, "/events"),
concat!("/networks/", $p1, "/", $p2, "/events"),
concat!("/orgs/", $p1, "/events"),
concat!("/users/", $p1, "/received_events"),
concat!("/users/", $p1, "/received_events/public"),
concat!("/users/", $p1, "/events"),
concat!("/users/", $p1, "/events/public"),
concat!("/users/", $p1, "/events/orgs/", $p2),
concat!("/feeds"),
concat!("/notifications"),
concat!("/repos/", $p1, "/", $p2, "/notifications"),
concat!("/notifications/threads/", $p1),
concat!("/notifications/threads/", $p1, "/subscription"),
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
concat!("/users/", $p1, "/starred"),
concat!("/user/starred"),
concat!("/user/starred/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
concat!("/users/", $p1, "/subscriptions"),
concat!("/user/subscriptions"),
concat!("/repos/", $p1, "/", $p2, "/subscription"),
concat!("/user/subscriptions/", $p1, "/", $p2),
concat!("/users/", $p1, "/gists"),
concat!("/gists"),
concat!("/gists/", $p1),
concat!("/gists/", $p1, "/star"),
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
concat!("/issues"),
concat!("/user/issues"),
concat!("/orgs/", $p1, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
concat!("/repos/", $p1, "/", $p2, "/assignees"),
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
concat!("/repos/", $p1, "/", $p2, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
concat!("/emojis"),
concat!("/gitignore/templates"),
concat!("/gitignore/templates/", $p1),
concat!("/meta"),
concat!("/rate_limit"),
concat!("/users/", $p1, "/orgs"),
concat!("/user/orgs"),
concat!("/orgs/", $p1),
concat!("/orgs/", $p1, "/members"),
concat!("/orgs/", $p1, "/members", $p2),
concat!("/orgs/", $p1, "/public_members"),
concat!("/orgs/", $p1, "/public_members/", $p2),
concat!("/orgs/", $p1, "/teams"),
concat!("/teams/", $p1),
concat!("/teams/", $p1, "/members"),
concat!("/teams/", $p1, "/members", $p2),
concat!("/teams/", $p1, "/repos"),
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
concat!("/user/teams"),
concat!("/repos/", $p1, "/", $p2, "/pulls"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
concat!("/user/repos"),
concat!("/users/", $p1, "/repos"),
concat!("/orgs/", $p1, "/repos"),
concat!("/repositories"),
concat!("/repos/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/contributors"),
concat!("/repos/", $p1, "/", $p2, "/languages"),
concat!("/repos/", $p1, "/", $p2, "/teams"),
concat!("/repos/", $p1, "/", $p2, "/tags"),
concat!("/repos/", $p1, "/", $p2, "/branches"),
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
concat!("/repos/", $p1, "/", $p2, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/readme"),
concat!("/repos/", $p1, "/", $p2, "/keys"),
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
concat!("/repos/", $p1, "/", $p2, "/downloads"),
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
concat!("/repos/", $p1, "/", $p2, "/forks"),
concat!("/repos/", $p1, "/", $p2, "/hooks"),
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases"),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
concat!("/search/repositories"),
concat!("/search/code"),
concat!("/search/issues"),
concat!("/search/users"),
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
concat!("/legacy/repos/search/", $p1),
concat!("/legacy/user/search/", $p1),
concat!("/legacy/user/email/", $p1),
concat!("/users/", $p1),
concat!("/user"),
concat!("/users"),
concat!("/user/emails"),
concat!("/users/", $p1, "/followers"),
concat!("/user/followers"),
concat!("/users/", $p1, "/following"),
concat!("/user/following"),
concat!("/user/following/", $p1),
concat!("/users/", $p1, "/following", $p2),
concat!("/users/", $p1, "/keys"),
concat!("/user/keys"),
concat!("/user/keys/", $p1),
];
arr.to_vec()
}};
}
static PATHS: [&str; 5] = [
"/authorizations",
"/user/repos",
"/repos/rust-lang/rust/stargazers",
"/orgs/rust-lang/public_members/nikomatsakis",
"/repos/rust-lang/rust/releases/1.51.0",
];
fn main() {
let mut router = actix_router::Router::<bool>::build();
for route in register!(brackets) {
router.path(route, true);
}
let actix = router.finish();
if firestorm::enabled() {
firestorm::bench("target", || {
for &route in &PATHS {
let mut path = actix_router::Path::new(route);
actix.recognize(&mut path).unwrap();
}
})
.unwrap();
}
}

View File

@ -7,7 +7,7 @@ use crate::path::{Path, PathIter};
use crate::{Quoter, ResourcePath};
thread_local! {
static FULL_QUOTER: Quoter = Quoter::new(b"+/%", b"");
static FULL_QUOTER: Quoter = Quoter::new(b"", b"");
}
macro_rules! unsupported_type {

View File

@ -1,7 +1,6 @@
use std::borrow::Cow;
use std::ops::{DerefMut, Index};
use firestorm::profile_method;
use serde::de;
use crate::{de::PathDeserializer, Resource, ResourcePath};
@ -52,7 +51,6 @@ impl<T: ResourcePath> Path<T> {
/// Returns full path as a string.
#[inline]
pub fn as_str(&self) -> &str {
profile_method!(as_str);
self.path.path()
}
@ -61,7 +59,6 @@ impl<T: ResourcePath> Path<T> {
/// Returns empty string if no more is to be processed.
#[inline]
pub fn unprocessed(&self) -> &str {
profile_method!(unprocessed);
// clamp skip to path length
let skip = (self.skip as usize).min(self.as_str().len());
&self.path.path()[skip..]
@ -72,8 +69,6 @@ impl<T: ResourcePath> Path<T> {
#[deprecated(since = "0.6.0", note = "Use `.as_str()` or `.unprocessed()`.")]
#[inline]
pub fn path(&self) -> &str {
profile_method!(path);
let skip = self.skip as usize;
let path = self.path.path();
if skip <= path.len() {
@ -86,8 +81,6 @@ impl<T: ResourcePath> Path<T> {
/// Set new path.
#[inline]
pub fn set(&mut self, path: T) {
profile_method!(set);
self.skip = 0;
self.path = path;
self.segments.clear();
@ -96,8 +89,6 @@ impl<T: ResourcePath> Path<T> {
/// Reset state.
#[inline]
pub fn reset(&mut self) {
profile_method!(reset);
self.skip = 0;
self.segments.clear();
}
@ -105,13 +96,10 @@ impl<T: ResourcePath> Path<T> {
/// Skip first `n` chars in path.
#[inline]
pub fn skip(&mut self, n: u16) {
profile_method!(skip);
self.skip += n;
}
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
profile_method!(add);
match value {
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
PathItem::Segment(begin, end) => self.segments.push((
@ -127,8 +115,6 @@ impl<T: ResourcePath> Path<T> {
name: impl Into<Cow<'static, str>>,
value: impl Into<Cow<'static, str>>,
) {
profile_method!(add_static);
self.segments
.push((name.into(), PathItem::Static(value.into())));
}
@ -147,8 +133,6 @@ impl<T: ResourcePath> Path<T> {
/// Get matched parameter by name without type conversion
pub fn get(&self, name: &str) -> Option<&str> {
profile_method!(get);
for (seg_name, val) in self.segments.iter() {
if name == seg_name {
return match val {
@ -167,8 +151,6 @@ impl<T: ResourcePath> Path<T> {
///
/// If keyed parameter is not available empty string is used as default value.
pub fn query(&self, key: &str) -> &str {
profile_method!(query);
if let Some(s) = self.get(key) {
s
} else {
@ -186,7 +168,6 @@ impl<T: ResourcePath> Path<T> {
/// Try to deserialize matching parameters to a specified type `U`
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
profile_method!(load);
de::Deserialize::deserialize(PathDeserializer::new(self))
}
}

View File

@ -1,132 +1,89 @@
#[allow(dead_code)]
const GEN_DELIMS: &[u8] = b":/?#[]@";
#[allow(dead_code)]
const SUB_DELIMS_WITHOUT_QS: &[u8] = b"!$'()*,";
#[allow(dead_code)]
const SUB_DELIMS: &[u8] = b"!$'()*,+?=;";
#[allow(dead_code)]
const RESERVED: &[u8] = b":/?#[]@!$'()*,+?=;";
#[allow(dead_code)]
const UNRESERVED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
ABCDEFGHIJKLMNOPQRSTUVWXYZ
1234567890
-._~";
const ALLOWED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
ABCDEFGHIJKLMNOPQRSTUVWXYZ
1234567890
-._~
!$'()*,";
const QS: &[u8] = b"+&=;b";
/// A quoter
/// Partial percent-decoding.
///
/// Performs percent-decoding on a slice but can selectively skip decoding certain sequences.
///
/// # Examples
/// ```
/// # use actix_router::Quoter;
/// // + is set as a protected character and will not be decoded...
/// let q = Quoter::new(&[], b"+");
///
/// // ...but the other encoded characters (like the hyphen below) will.
/// assert_eq!(q.requote(b"/a%2Db%2Bc").unwrap(), b"/a-b%2Bc");
/// ```
pub struct Quoter {
/// Simple bit-map of safe values in the 0-127 ASCII range.
safe_table: [u8; 16],
/// Simple bit-map of protected values in the 0-127 ASCII range.
protected_table: [u8; 16],
protected_table: AsciiBitmap,
}
impl Quoter {
pub fn new(safe: &[u8], protected: &[u8]) -> Quoter {
let mut quoter = Quoter {
safe_table: [0; 16],
protected_table: [0; 16],
};
// prepare safe table
for ch in 0..128 {
if ALLOWED.contains(&ch) {
set_bit(&mut quoter.safe_table, ch);
}
if QS.contains(&ch) {
set_bit(&mut quoter.safe_table, ch);
}
}
for &ch in safe {
set_bit(&mut quoter.safe_table, ch)
}
/// Constructs a new `Quoter` instance given a set of protected ASCII bytes.
///
/// The first argument is ignored but is kept for backward compatibility.
///
/// # Panics
/// Panics if any of the `protected` bytes are not in the 0-127 ASCII range.
pub fn new(_: &[u8], protected: &[u8]) -> Quoter {
let mut protected_table = AsciiBitmap::default();
// prepare protected table
for &ch in protected {
set_bit(&mut quoter.safe_table, ch);
set_bit(&mut quoter.protected_table, ch);
protected_table.set_bit(ch);
}
quoter
Quoter { protected_table }
}
/// Decodes safe percent-encoded sequences from `val`.
/// Decodes the next escape sequence, if any, and advances `val`.
#[inline(always)]
fn decode_next<'a>(&self, val: &mut &'a [u8]) -> Option<(&'a [u8], u8)> {
for i in 0..val.len() {
if let (prev, [b'%', p1, p2, rem @ ..]) = val.split_at(i) {
if let Some(ch) = hex_pair_to_char(*p1, *p2)
// ignore protected ascii bytes
.filter(|&ch| !(ch < 128 && self.protected_table.bit_at(ch)))
{
*val = rem;
return Some((prev, ch));
}
}
}
None
}
/// Partially percent-decodes the given bytes.
///
/// Returns `None` when no modification to the original byte string was required.
/// Escape sequences of the protected set are *not* decoded.
///
/// Non-ASCII bytes are accepted as valid input.
/// Returns `None` when no modification to the original bytes was required.
///
/// Behavior for invalid/incomplete percent-encoding sequences is unspecified and may include
/// removing the invalid sequence from the output or passing it as-is.
/// Invalid/incomplete percent-encoding sequences are passed unmodified.
pub fn requote(&self, val: &[u8]) -> Option<Vec<u8>> {
let mut has_pct = 0;
let mut pct = [b'%', 0, 0];
let mut idx = 0;
let mut cloned: Option<Vec<u8>> = None;
let mut remaining = val;
let len = val.len();
// early return indicates that no percent-encoded sequences exist and we can skip allocation
let (pre, decoded_char) = self.decode_next(&mut remaining)?;
while idx < len {
let ch = val[idx];
// decoded output will always be shorter than the input
let mut decoded = Vec::<u8>::with_capacity(val.len());
if has_pct != 0 {
pct[has_pct] = val[idx];
has_pct += 1;
// push first segment and decoded char
decoded.extend_from_slice(pre);
decoded.push(decoded_char);
if has_pct == 3 {
has_pct = 0;
let buf = cloned.as_mut().unwrap();
if let Some(ch) = hex_pair_to_char(pct[1], pct[2]) {
if ch < 128 {
if bit_at(&self.protected_table, ch) {
buf.extend_from_slice(&pct);
idx += 1;
continue;
// decode and push rest of segments and decoded chars
while let Some((prev, ch)) = self.decode_next(&mut remaining) {
// this ugly conditional achieves +50% perf in cases where this is a tight loop.
if !prev.is_empty() {
decoded.extend_from_slice(prev);
}
decoded.push(ch);
}
if bit_at(&self.safe_table, ch) {
buf.push(ch);
idx += 1;
continue;
}
}
decoded.extend_from_slice(remaining);
buf.push(ch);
} else {
buf.extend_from_slice(&pct[..]);
}
}
} else if ch == b'%' {
has_pct = 1;
if cloned.is_none() {
let mut c = Vec::with_capacity(len);
c.extend_from_slice(&val[..idx]);
cloned = Some(c);
}
} else if let Some(ref mut cloned) = cloned {
cloned.push(ch)
}
idx += 1;
}
cloned
Some(decoded)
}
pub(crate) fn requote_str_lossy(&self, val: &str) -> Option<String> {
@ -135,24 +92,6 @@ impl Quoter {
}
}
/// Converts an ASCII character in the hex-encoded set (`0-9`, `A-F`, `a-f`) to its integer
/// representation from `0x0``0xF`.
///
/// - `0x30 ('0') => 0x0`
/// - `0x39 ('9') => 0x9`
/// - `0x41 ('a') => 0xA`
/// - `0x61 ('A') => 0xA`
/// - `0x46 ('f') => 0xF`
/// - `0x66 ('F') => 0xF`
fn from_ascii_hex(v: u8) -> Option<u8> {
match v {
b'0'..=b'9' => Some(v - 0x30), // ord('0') == 0x30
b'A'..=b'F' => Some(v - 0x41 + 10), // ord('A') == 0x41
b'a'..=b'f' => Some(v - 0x61 + 10), // ord('a') == 0x61
_ => None,
}
}
/// Decode a ASCII hex-encoded pair to an integer.
///
/// Returns `None` if either portion of the decoded pair does not evaluate to a valid hex value.
@ -160,64 +99,52 @@ fn from_ascii_hex(v: u8) -> Option<u8> {
/// - `0x33 ('3'), 0x30 ('0') => 0x30 ('0')`
/// - `0x34 ('4'), 0x31 ('1') => 0x41 ('A')`
/// - `0x36 ('6'), 0x31 ('1') => 0x61 ('a')`
#[inline(always)]
fn hex_pair_to_char(d1: u8, d2: u8) -> Option<u8> {
let (d_high, d_low) = (from_ascii_hex(d1)?, from_ascii_hex(d2)?);
let d_high = char::from(d1).to_digit(16)?;
let d_low = char::from(d2).to_digit(16)?;
// left shift high nibble by 4 bits
Some(d_high << 4 | d_low)
Some((d_high as u8) << 4 | (d_low as u8))
}
/// Sets bit in given bit-map to 1=true.
///
/// # Panics
/// Panics if `ch` index is out of bounds.
fn set_bit(array: &mut [u8], ch: u8) {
array[(ch >> 3) as usize] |= 0b1 << (ch & 0b111)
#[derive(Debug, Default, Clone)]
struct AsciiBitmap {
array: [u8; 16],
}
/// Returns true if bit to true in given bit-map.
///
/// # Panics
/// Panics if `ch` index is out of bounds.
fn bit_at(array: &[u8], ch: u8) -> bool {
array[(ch >> 3) as usize] & (0b1 << (ch & 0b111)) != 0
impl AsciiBitmap {
/// Sets bit in given bit-map to 1=true.
///
/// # Panics
/// Panics if `ch` index is out of bounds.
fn set_bit(&mut self, ch: u8) {
self.array[(ch >> 3) as usize] |= 0b1 << (ch & 0b111)
}
/// Returns true if bit to true in given bit-map.
///
/// # Panics
/// Panics if `ch` index is out of bounds.
fn bit_at(&self, ch: u8) -> bool {
self.array[(ch >> 3) as usize] & (0b1 << (ch & 0b111)) != 0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn hex_encoding() {
let hex = b"0123456789abcdefABCDEF";
for i in 0..256 {
let c = i as u8;
if hex.contains(&c) {
assert!(from_ascii_hex(c).is_some())
} else {
assert!(from_ascii_hex(c).is_none())
}
}
let expected = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15,
];
for i in 0..hex.len() {
assert_eq!(from_ascii_hex(hex[i]).unwrap(), expected[i]);
}
}
#[test]
fn custom_quoter() {
let q = Quoter::new(b"", b"+");
assert_eq!(q.requote(b"/a%25c").unwrap(), b"/a%c");
assert_eq!(q.requote(b"/a%2Bc").unwrap(), b"/a%2Bc");
assert_eq!(q.requote(b"/a%2Bc"), None);
let q = Quoter::new(b"%+", b"/");
assert_eq!(q.requote(b"/a%25b%2Bc").unwrap(), b"/a%b+c");
assert_eq!(q.requote(b"/a%2fb").unwrap(), b"/a%2fb");
assert_eq!(q.requote(b"/a%2Fb").unwrap(), b"/a%2Fb");
assert_eq!(q.requote(b"/a%2fb"), None);
assert_eq!(q.requote(b"/a%2Fb"), None);
assert_eq!(q.requote(b"/a%0Ab").unwrap(), b"/a\nb");
assert_eq!(q.requote(b"/a%FE\xffb").unwrap(), b"/a\xfe\xffb");
assert_eq!(q.requote(b"/a\xfe\xffb"), None);
@ -233,7 +160,8 @@ mod tests {
#[test]
fn invalid_sequences() {
let q = Quoter::new(b"%+", b"/");
assert_eq!(q.requote(b"/a%2x%2X%%").unwrap(), b"/a%2x%2X");
assert_eq!(q.requote(b"/a%2x%2X%%"), None);
assert_eq!(q.requote(b"/a%20%2X%%").unwrap(), b"/a %2X%%");
}
#[test]

View File

@ -5,8 +5,8 @@ use std::{
mem,
};
use firestorm::{profile_fn, profile_method, profile_section};
use regex::{escape, Regex, RegexSet};
use tracing::error;
use crate::{path::PathItem, IntoPatterns, Patterns, Resource, ResourcePath};
@ -271,7 +271,6 @@ impl ResourceDef {
/// assert!(!resource.is_match("/foo"));
/// ```
pub fn new<T: IntoPatterns>(paths: T) -> Self {
profile_method!(new);
Self::construct(paths, false)
}
@ -299,7 +298,6 @@ impl ResourceDef {
/// assert!(!resource.is_match("/foo"));
/// ```
pub fn prefix<T: IntoPatterns>(paths: T) -> Self {
profile_method!(prefix);
ResourceDef::construct(paths, true)
}
@ -324,7 +322,6 @@ impl ResourceDef {
/// assert!(!resource.is_match("user/123"));
/// ```
pub fn root_prefix(path: &str) -> Self {
profile_method!(root_prefix);
ResourceDef::prefix(insert_slash(path).into_owned())
}
@ -548,8 +545,6 @@ impl ResourceDef {
/// ```
#[inline]
pub fn is_match(&self, path: &str) -> bool {
profile_method!(is_match);
// this function could be expressed as:
// `self.find_match(path).is_some()`
// but this skips some checks and uses potentially faster regex methods
@ -597,8 +592,6 @@ impl ResourceDef {
/// assert_eq!(resource.find_match("/profile/1234"), Some(13));
/// ```
pub fn find_match(&self, path: &str) -> Option<usize> {
profile_method!(find_match);
match &self.pat_type {
PatternType::Static(pattern) => self.static_match(pattern, path),
@ -633,7 +626,6 @@ impl ResourceDef {
/// assert_eq!(path.unprocessed(), "");
/// ```
pub fn capture_match_info<R: Resource>(&self, resource: &mut R) -> bool {
profile_method!(capture_match_info);
self.capture_match_info_fn(resource, |_| true)
}
@ -657,7 +649,7 @@ impl ResourceDef {
/// resource.capture_match_info_fn(
/// path,
/// // when env var is not set, reject when path contains "admin"
/// |res| !(!admin_allowed && res.path().contains("admin")),
/// |path| !(!admin_allowed && path.as_str().contains("admin")),
/// )
/// }
///
@ -679,56 +671,35 @@ impl ResourceDef {
R: Resource,
F: FnOnce(&R) -> bool,
{
profile_method!(capture_match_info_fn);
let mut segments = <[PathItem; MAX_DYNAMIC_SEGMENTS]>::default();
let path = resource.resource_path();
let path_str = path.unprocessed();
let (matched_len, matched_vars) = match &self.pat_type {
PatternType::Static(pattern) => {
profile_section!(pattern_static_or_prefix);
match self.static_match(pattern, path_str) {
PatternType::Static(pattern) => match self.static_match(pattern, path_str) {
Some(len) => (len, None),
None => return false,
}
}
},
PatternType::Dynamic(re, names) => {
profile_section!(pattern_dynamic);
let captures = {
profile_section!(pattern_dynamic_regex_exec);
match re.captures(path.unprocessed()) {
let captures = match re.captures(path.unprocessed()) {
Some(captures) => captures,
_ => return false,
}
};
{
profile_section!(pattern_dynamic_extract_captures);
for (no, name) in names.iter().enumerate() {
if let Some(m) = captures.name(name) {
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
} else {
log::error!(
"Dynamic path match but not all segments found: {}",
name
);
error!("Dynamic path match but not all segments found: {}", name);
return false;
}
}
};
(captures[1].len(), Some(names))
}
PatternType::DynamicSet(re, params) => {
profile_section!(pattern_dynamic_set);
let path = path.unprocessed();
let (pattern, names) = match re.matches(path).into_iter().next() {
Some(idx) => &params[idx],
@ -744,7 +715,7 @@ impl ResourceDef {
if let Some(m) = captures.name(name) {
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
} else {
log::error!("Dynamic path match but not all segments found: {}", name);
error!("Dynamic path match but not all segments found: {}", name);
return false;
}
}
@ -811,7 +782,6 @@ impl ResourceDef {
I: IntoIterator,
I::Item: AsRef<str>,
{
profile_method!(resource_path_from_iter);
let mut iter = values.into_iter();
self.build_resource_path(path, |_| iter.next())
}
@ -847,7 +817,6 @@ impl ResourceDef {
V: AsRef<str>,
S: BuildHasher,
{
profile_method!(resource_path_from_map);
self.build_resource_path(path, |name| values.get(name))
}
@ -868,8 +837,6 @@ impl ResourceDef {
}
fn construct<T: IntoPatterns>(paths: T, is_prefix: bool) -> Self {
profile_method!(construct);
let patterns = paths.patterns();
let (pat_type, segments) = match &patterns {
Patterns::Single(pattern) => ResourceDef::parse(pattern, is_prefix, false),
@ -898,7 +865,7 @@ impl ResourceDef {
}
let pattern_re_set = RegexSet::new(re_set).unwrap();
let segments = segments.unwrap_or_else(Vec::new);
let segments = segments.unwrap_or_default();
(
PatternType::DynamicSet(pattern_re_set, pattern_data),
@ -928,8 +895,6 @@ impl ResourceDef {
/// # Panics
/// Panics if given patterns does not contain a dynamic segment.
fn parse_param(pattern: &str) -> (PatternSegment, String, &str, bool) {
profile_method!(parse_param);
const DEFAULT_PATTERN: &str = "[^/]+";
const DEFAULT_PATTERN_TAIL: &str = ".*";
@ -999,8 +964,6 @@ impl ResourceDef {
is_prefix: bool,
force_dynamic: bool,
) -> (PatternType, Vec<PatternSegment>) {
profile_method!(parse);
if !force_dynamic && pattern.find('{').is_none() && !pattern.ends_with('*') {
// pattern is static
return (
@ -1038,7 +1001,7 @@ impl ResourceDef {
// tail segments in prefixes have no defined semantics
#[cfg(not(test))]
log::warn!(
tracing::warn!(
"Prefix resources should not have tail segments. \
Use `ResourceDef::new` constructor. \
This may become a panic in the future."
@ -1053,7 +1016,7 @@ impl ResourceDef {
// unnamed tail segment
#[cfg(not(test))]
log::warn!(
tracing::warn!(
"Tail segments must have names. \
Consider `.../{{tail}}*`. \
This may become a panic in the future."
@ -1133,8 +1096,6 @@ impl From<String> for ResourceDef {
}
pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> {
profile_fn!(insert_slash);
if !path.is_empty() && !path.starts_with('/') {
let mut new_path = String::with_capacity(path.len() + 1);
new_path.push('/');

View File

@ -27,7 +27,7 @@ impl<'a> ResourcePath for &'a str {
impl ResourcePath for bytestring::ByteString {
fn path(&self) -> &str {
&*self
self
}
}

View File

@ -1,8 +1,6 @@
use firestorm::profile_method;
use crate::{IntoPatterns, Resource, ResourceDef};
#[derive(Debug, Copy, Clone, PartialEq)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct ResourceId(pub u16);
/// Resource router.
@ -30,7 +28,6 @@ impl<T, U> Router<T, U> {
where
R: Resource,
{
profile_method!(recognize);
self.recognize_fn(resource, |_, _| true)
}
@ -39,7 +36,6 @@ impl<T, U> Router<T, U> {
where
R: Resource,
{
profile_method!(recognize_mut);
self.recognize_mut_fn(resource, |_, _| true)
}
@ -55,8 +51,6 @@ impl<T, U> Router<T, U> {
R: Resource,
F: FnMut(&R, &U) -> bool,
{
profile_method!(recognize_checked);
for (rdef, val, ctx) in self.routes.iter() {
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
return Some((val, ResourceId(rdef.id())));
@ -77,8 +71,6 @@ impl<T, U> Router<T, U> {
R: Resource,
F: FnMut(&R, &U) -> bool,
{
profile_method!(recognize_mut_checked);
for (rdef, val, ctx) in self.routes.iter_mut() {
if rdef.capture_match_info_fn(resource, |res| check(res, ctx)) {
return Some((val, ResourceId(rdef.id())));
@ -104,7 +96,6 @@ impl<T, U> RouterBuilder<T, U> {
val: T,
ctx: U,
) -> (&mut ResourceDef, &mut T, &mut U) {
profile_method!(push);
self.routes.push((rdef, val, ctx));
self.routes
.last_mut()
@ -131,7 +122,6 @@ where
path: impl IntoPatterns,
val: T,
) -> (&mut ResourceDef, &mut T, &mut U) {
profile_method!(path);
self.push(ResourceDef::new(path), val, U::default())
}
@ -141,13 +131,11 @@ where
prefix: impl IntoPatterns,
val: T,
) -> (&mut ResourceDef, &mut T, &mut U) {
profile_method!(prefix);
self.push(ResourceDef::prefix(prefix), val, U::default())
}
/// Registers resource for [`ResourceDef`].
pub fn rdef(&mut self, rdef: ResourceDef, val: T) -> (&mut ResourceDef, &mut T, &mut U) {
profile_method!(rdef);
self.push(rdef, val, U::default())
}
}

View File

@ -3,7 +3,7 @@ use crate::ResourcePath;
use crate::Quoter;
thread_local! {
static DEFAULT_QUOTER: Quoter = Quoter::new(b"@:", b"%/+");
static DEFAULT_QUOTER: Quoter = Quoter::new(b"", b"%/+");
}
#[derive(Debug, Clone, Default)]

View File

@ -1,6 +1,15 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
## 0.1.0 - 2022-07-24
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
## 0.1.0-beta.13 - 2022-02-16
- No significant changes since `0.1.0-beta.12`.
## 0.1.0-beta.12 - 2022-01-31

View File

@ -1,6 +1,6 @@
[package]
name = "actix-test"
version = "0.1.0-beta.12"
version = "0.1.0"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
@ -28,14 +28,14 @@ rustls = ["tls-rustls", "actix-http/rustls", "awc/rustls"]
openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"]
[dependencies]
actix-codec = "0.4.1"
actix-http = "3.0.0-rc.1"
actix-http-test = "3.0.0-beta.12"
actix-codec = "0.5"
actix-http = "3"
actix-http-test = "3"
actix-rt = "2.1"
actix-service = "2.0.0"
actix-utils = "3.0.0"
actix-web = { version = "4.0.0-rc.1", default-features = false, features = ["cookies"] }
awc = { version = "3.0.0-beta.20", default-features = false, features = ["cookies"] }
actix-service = "2"
actix-utils = "3"
actix-web = { version = "4", default-features = false, features = ["cookies"] }
awc = { version = "3", default-features = false, features = ["cookies"] }
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
futures-util = { version = "0.3.7", default-features = false, features = [] }

View File

@ -43,7 +43,7 @@ pub use actix_http_test::unused_addr;
use actix_service::{map_config, IntoServiceFactory, ServiceFactory, ServiceFactoryExt as _};
pub use actix_web::test::{
call_and_read_body, call_and_read_body_json, call_service, init_service, ok_service,
read_body, read_body_json, simple_service, TestRequest,
read_body, read_body_json, status_service, TestRequest,
};
use actix_web::{
body::MessageBody,

View File

@ -1,6 +1,21 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
## 4.1.0 - 2022-03-02
- Add support for `actix` version `0.13`. [#2675]
[#2675]: https://github.com/actix/actix-web/pull/2675
## 4.0.0 - 2022-02-25
- No significant changes since `4.0.0-beta.12`.
## 4.0.0-beta.12 - 2022-02-16
- No significant changes since `4.0.0-beta.11`.
## 4.0.0-beta.11 - 2022-01-31

View File

@ -1,6 +1,6 @@
[package]
name = "actix-web-actors"
version = "4.0.0-beta.11"
version = "4.1.0"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix actors support for Actix Web"
keywords = ["actix", "http", "web", "framework", "async"]
@ -14,21 +14,24 @@ name = "actix_web_actors"
path = "src/lib.rs"
[dependencies]
actix = { version = "0.12.0", default-features = false }
actix-codec = "0.4.1"
actix-http = "3.0.0-rc.1"
actix-web = { version = "4.0.0-rc.1", default-features = false }
actix = { version = ">=0.12, <0.14", default-features = false }
actix-codec = "0.5"
actix-http = "3"
actix-web = { version = "4", default-features = false }
bytes = "1"
bytestring = "1"
futures-core = { version = "0.3.7", default-features = false }
pin-project-lite = "0.2"
tokio = { version = "1.8.4", features = ["sync"] }
tokio = { version = "1.13.1", features = ["sync"] }
[dev-dependencies]
actix-rt = "2.2"
actix-test = "0.1.0-beta.12"
awc = { version = "3.0.0-beta.20", default-features = false }
actix-test = "0.1"
awc = { version = "3", default-features = false }
actix-web = { version = "4", features = ["macros"] }
mime = "0.3"
env_logger = "0.9"
futures-util = { version = "0.3.7", default-features = false }

View File

@ -3,11 +3,11 @@
> Actix actors support for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.11)](https://docs.rs/actix-web-actors/4.0.0-beta.11)
[![Version](https://img.shields.io/badge/rustc-1.54+-ab6000.svg)](https://blog.rust-lang.org/2021/05/06/Rust-1.54.0.html)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.1.0)](https://docs.rs/actix-web-actors/4.1.0)
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-web-actors.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.11/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.11)
[![dependency status](https://deps.rs/crate/actix-web-actors/4.1.0/status.svg)](https://deps.rs/crate/actix-web-actors/4.1.0)
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@ -14,6 +14,58 @@ use futures_core::Stream;
use tokio::sync::oneshot::Sender;
/// Execution context for HTTP actors
///
/// # Example
///
/// A demonstration of [server-sent events](https://developer.mozilla.org/docs/Web/API/Server-sent_events) using actors:
///
/// ```no_run
/// use std::time::Duration;
///
/// use actix::{Actor, AsyncContext};
/// use actix_web::{get, http::header, App, HttpResponse, HttpServer};
/// use actix_web_actors::HttpContext;
/// use bytes::Bytes;
///
/// struct MyActor {
/// count: usize,
/// }
///
/// impl Actor for MyActor {
/// type Context = HttpContext<Self>;
///
/// fn started(&mut self, ctx: &mut Self::Context) {
/// ctx.run_later(Duration::from_millis(100), Self::write);
/// }
/// }
///
/// impl MyActor {
/// fn write(&mut self, ctx: &mut HttpContext<Self>) {
/// self.count += 1;
/// if self.count > 3 {
/// ctx.write_eof()
/// } else {
/// ctx.write(Bytes::from(format!("event: count\ndata: {}\n\n", self.count)));
/// ctx.run_later(Duration::from_millis(100), Self::write);
/// }
/// }
/// }
///
/// #[get("/")]
/// async fn index() -> HttpResponse {
/// HttpResponse::Ok()
/// .insert_header(header::ContentType(mime::TEXT_EVENT_STREAM))
/// .streaming(HttpContext::create(MyActor { count: 0 }))
/// }
///
/// #[actix_web::main]
/// async fn main() -> std::io::Result<()> {
/// HttpServer::new(|| App::new().service(index))
/// .bind(("127.0.0.1", 8080))?
/// .run()
/// .await
/// }
/// ```
pub struct HttpContext<A>
where
A: Actor<Context = HttpContext<A>>,
@ -210,7 +262,7 @@ mod tests {
type Context = HttpContext<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
ctx.run_later(Duration::from_millis(100), |slf, ctx| slf.write(ctx));
ctx.run_later(Duration::from_millis(100), Self::write);
}
}
@ -221,7 +273,7 @@ mod tests {
ctx.write_eof()
} else {
ctx.write(Bytes::from(format!("LINE-{}", self.count)));
ctx.run_later(Duration::from_millis(100), |slf, ctx| slf.write(ctx));
ctx.run_later(Duration::from_millis(100), Self::write);
}
}
}

View File

@ -1,4 +1,59 @@
//! Actix actors support for Actix Web.
//!
//! # Examples
//!
//! ```no_run
//! use actix::{Actor, StreamHandler};
//! use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
//! use actix_web_actors::ws;
//!
//! /// Define Websocket actor
//! struct MyWs;
//!
//! impl Actor for MyWs {
//! type Context = ws::WebsocketContext<Self>;
//! }
//!
//! /// Handler for ws::Message message
//! impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
//! fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
//! match msg {
//! Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
//! Ok(ws::Message::Text(text)) => ctx.text(text),
//! Ok(ws::Message::Binary(bin)) => ctx.binary(bin),
//! _ => (),
//! }
//! }
//! }
//!
//! #[get("/ws")]
//! async fn index(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
//! ws::start(MyWs, &req, stream)
//! }
//!
//! #[actix_web::main]
//! async fn main() -> std::io::Result<()> {
//! HttpServer::new(|| App::new().service(index))
//! .bind(("127.0.0.1", 8080))?
//! .run()
//! .await
//! }
//! ```
//!
//! # Documentation & Community Resources
//! In addition to this API documentation, several other resources are available:
//!
//! * [Website & User Guide](https://actix.rs/)
//! * [Documentation for `actix_web`](actix_web)
//! * [Examples Repository](https://github.com/actix/examples)
//! * [Community Chat on Discord](https://discord.gg/NWpN5mmg3x)
//!
//! To get started navigating the API docs, you may consider looking at the following pages first:
//!
//! * [`ws`]: This module provides actor support for WebSockets.
//!
//! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses.
//!
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]

View File

@ -1,4 +1,60 @@
//! Websocket integration.
//!
//! # Examples
//!
//! ```no_run
//! use actix::{Actor, StreamHandler};
//! use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
//! use actix_web_actors::ws;
//!
//! /// Define Websocket actor
//! struct MyWs;
//!
//! impl Actor for MyWs {
//! type Context = ws::WebsocketContext<Self>;
//! }
//!
//! /// Handler for ws::Message message
//! impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
//! fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
//! match msg {
//! Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
//! Ok(ws::Message::Text(text)) => ctx.text(text),
//! Ok(ws::Message::Binary(bin)) => ctx.binary(bin),
//! _ => (),
//! }
//! }
//! }
//!
//! #[get("/ws")]
//! async fn websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
//! ws::start(MyWs, &req, stream)
//! }
//!
//! const MAX_FRAME_SIZE: usize = 16_384; // 16KiB
//!
//! #[get("/custom-ws")]
//! async fn custom_websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
//! // Create a Websocket session with a specific max frame size, and protocols.
//! ws::WsResponseBuilder::new(MyWs, &req, stream)
//! .frame_size(MAX_FRAME_SIZE)
//! .protocols(&["A", "B"])
//! .start()
//! }
//!
//! #[actix_web::main]
//! async fn main() -> std::io::Result<()> {
//! HttpServer::new(|| {
//! App::new()
//! .service(websocket)
//! .service(custom_websocket)
//! })
//! .bind(("127.0.0.1", 8080))?
//! .run()
//! .await
//! }
//! ```
//!
use std::{
collections::VecDeque,
@ -41,20 +97,51 @@ use tokio::sync::oneshot;
///
/// # Examples
///
/// Create a Websocket session response with default configuration.
/// ```ignore
/// WsResponseBuilder::new(WsActor, &req, stream).start()
/// ```
/// ```no_run
/// # use actix::{Actor, StreamHandler};
/// # use actix_web::{get, web, App, Error, HttpRequest, HttpResponse, HttpServer};
/// # use actix_web_actors::ws;
/// #
/// # struct MyWs;
/// #
/// # impl Actor for MyWs {
/// # type Context = ws::WebsocketContext<Self>;
/// # }
/// #
/// # /// Handler for ws::Message message
/// # impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for MyWs {
/// # fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {}
/// # }
/// #
/// #[get("/ws")]
/// async fn websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
/// ws::WsResponseBuilder::new(MyWs, &req, stream).start()
/// }
///
/// Create a Websocket session with a specific max frame size, [`Codec`], and protocols.
/// ```ignore
/// const MAX_FRAME_SIZE: usize = 16_384; // 16KiB
///
/// ws::WsResponseBuilder::new(WsActor, &req, stream)
/// .codec(Codec::new())
/// .protocols(&["A", "B"])
/// #[get("/custom-ws")]
/// async fn custom_websocket(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
/// // Create a Websocket session with a specific max frame size, codec, and protocols.
/// ws::WsResponseBuilder::new(MyWs, &req, stream)
/// .codec(actix_http::ws::Codec::new())
/// // This will overwrite the codec's max frame-size
/// .frame_size(MAX_FRAME_SIZE)
/// .protocols(&["A", "B"])
/// .start()
/// }
/// #
/// # #[actix_web::main]
/// # async fn main() -> std::io::Result<()> {
/// # HttpServer::new(|| {
/// # App::new()
/// # .service(websocket)
/// # .service(custom_websocket)
/// # })
/// # .bind(("127.0.0.1", 8080))?
/// # .run()
/// # .await
/// # }
/// ```
pub struct WsResponseBuilder<'a, A, T>
where

View File

@ -1,6 +1,29 @@
# Changes
## Unreleased - 2021-xx-xx
## Unreleased - 2022-xx-xx
## 4.1.0 - 2022-09-11
- Add `#[routes]` macro to support multiple paths for one handler. [#2718]
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
[#2718]: https://github.com/actix/actix-web/pull/2718
## 4.0.1 - 2022-06-11
- Fix support for guard paths in route handler macros. [#2771]
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
[#2771]: https://github.com/actix/actix-web/pull/2771
## 4.0.0 - 2022-02-24
- Version aligned with `actix-web` and will remain in sync going forward.
- No significant changes since `0.5.0`.
## 0.5.0 - 2022-02-24
- No significant changes since `0.5.0-rc.2`.
## 0.5.0-rc.2 - 2022-02-01

View File

@ -1,6 +1,6 @@
[package]
name = "actix-web-codegen"
version = "0.5.0-rc.2"
version = "4.1.0"
description = "Routing and runtime macros for Actix Web"
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web.git"
@ -15,17 +15,17 @@ edition = "2018"
proc-macro = true
[dependencies]
actix-router = "0.5.0-rc.3"
actix-router = "0.5.0"
proc-macro2 = "1"
quote = "1"
syn = { version = "1", features = ["full", "parsing"] }
syn = { version = "1", features = ["full", "extra-traits"] }
[dev-dependencies]
actix-macros = "0.2.3"
actix-rt = "2.2"
actix-test = "0.1.0-beta.12"
actix-utils = "3.0.0"
actix-web = "4.0.0-rc.1"
actix-test = "0.1"
actix-utils = "3"
actix-web = "4"
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
trybuild = "1"

Some files were not shown because too many files have changed in this diff Show More