1
0
mirror of https://github.com/fafhrd91/actix-web synced 2025-07-03 01:34:32 +02:00

Compare commits

...

63 Commits

Author SHA1 Message Date
e0918fb179 chore(actix-web): prepare release 4.9.0 2024-08-10 03:21:55 +01:00
9ba326aed0 chore(actix-http): prepare release 3.9.0 2024-08-10 03:09:09 +01:00
882fb3d25b chore(actors): add version marker in changelog 2024-08-10 03:08:18 +01:00
be28a0bd6d feat: add from_fn middleware (#3447) 2024-08-10 01:41:27 +01:00
a431b7356c feat: add ThinData wrapper (#3446) 2024-08-10 00:42:34 +01:00
5be53820f0 docs(actors): add maintenance badge 2024-08-07 04:32:16 +01:00
d7d9000b19 chore: address clippy warnings 2024-08-07 04:06:18 +01:00
e4e4bb799c chore(actix-web-actors): prepare release 4.3.1 2024-08-07 04:02:30 +01:00
323d1fa64f build(deps): bump taiki-e/install-action from 2.42.9 to 2.42.17 (#3442)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.42.9 to 2.42.17.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.42.9...v2.42.17)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 01:05:32 +00:00
9aa62112aa build(deps): bump taiki-e/install-action from 2.42.4 to 2.42.9 (#3441)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.42.4 to 2.42.9.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.42.4...v2.42.9)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-07-29 00:25:30 +00:00
270a6a3b70 build(deps): bump taiki-e/install-action from 2.41.17 to 2.42.4 (#3440)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.41.17 to 2.42.4.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.41.17...v2.42.4)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-07-22 00:49:25 +00:00
07f720f716 docs: fix typo (#3439) 2024-07-21 17:34:42 +00:00
f71f9ca66b build(deps): bump taiki-e/install-action from 2.41.10 to 2.41.17 (#3431)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.41.10 to 2.41.17.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.41.10...v2.41.17)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-07-20 00:13:57 +00:00
b6bee346f7 build(deps): bump taiki-e/install-action from 2.41.7 to 2.41.10 (#3423)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.41.7 to 2.41.10.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.41.7...v2.41.10)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-07-08 09:31:50 +00:00
5c6e0e17d3 feat(http): impl FromIter for HeaderMap 2024-07-07 21:16:25 +01:00
e97e28db4f docs(multipart): improve crate root docs 2024-07-07 20:32:56 +01:00
16125bd3be docs(multipart): doc PayloadBuffer::readline 2024-07-07 20:19:56 +01:00
e9ccfbc866 refactor(multipart): clean up InnerField::poll 2024-07-07 20:19:35 +01:00
e0e4d1e661 chore: move deny lints to manifests 2024-07-07 03:54:00 +01:00
b01fbddba4 chore(actix-multipart): prepare release 0.7.2 2024-07-07 00:34:18 +01:00
215a294584 chore(actix-multipart-derive): prepare release 0.7.0 2024-07-07 00:30:27 +01:00
ffee672909 chore(actix-multipart): prepare release 0.7.1 2024-07-07 00:19:22 +01:00
01d60f3315 chore(actix-multipart): prepare release 0.7.0 2024-07-07 00:05:53 +01:00
6ae131ce29 test(multipart): replace SlowStream helper 2024-07-06 23:38:37 +01:00
5c9e6e7c1d feat(multipart): add field bytes method 2024-07-06 22:58:54 +01:00
611154beb2 refactor: rename multipart module 2024-07-04 05:03:42 +01:00
210c9a5eb3 refactor: multipart tweaks 2024-07-04 04:53:10 +01:00
00c185f617 refactor(multipart): move lints to manifest 2024-07-04 01:12:17 +01:00
7326707599 refactor(multipart): move Field to module 2024-07-04 00:40:25 +01:00
befb9c8196 refactor(multipart): move Payload* to module 2024-07-04 00:37:25 +01:00
2136e07bdd refactor(multipart): move Safety to module 2024-07-04 00:26:10 +01:00
e189e4a3bf chore(awc): fix the issue where the code in the awc example cannot run (#3421) 2024-07-01 09:39:54 +00:00
71cd3a31f9 fix(multipart): optional content-disposition for non-form-data requests (#3416) 2024-07-01 03:55:08 +01:00
668b8e5745 build(deps): bump taiki-e/install-action from 2.41.2 to 2.41.7 (#3419)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.41.2 to 2.41.7.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.41.2...v2.41.7)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-07-01 00:55:49 +00:00
763c58445a test: fix tests based on mime-guess inference
relates to https://github.com/abonander/mime_guess/pull/86
2024-06-30 20:28:11 +01:00
0b193c7106 build: fix doc-watch recipe 2024-06-30 18:55:59 +01:00
4db4251b8f chore: cargo update after version bumps 2024-06-30 18:55:58 +01:00
9f45be03e1 build(deps): bump taiki-e/install-action from 2.39.1 to 2.41.2 (#3412)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.39.1 to 2.41.2.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.39.1...v2.41.2)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-24 09:22:54 +00:00
4222f92bd3 chore(actix-web): prepare release 4.8.0 2024-06-20 00:23:11 +01:00
d92a73eacd chore(actix-http): prepare release 3.8.0 2024-06-20 00:18:22 +01:00
c612b5ce94 ci: fix checks 2024-06-20 00:13:42 +01:00
cbb55ba27d ci: use just for feature combos check 2024-06-20 00:04:35 +01:00
643d64581a Fix Rustls 0.22 & 0.23 are limited to 256 handshakes per second. (#3408) 2024-06-19 22:34:49 +00:00
66905efd7b build(deps): bump taiki-e/install-action from 2.38.0 to 2.39.1 (#3404)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.38.0 to 2.39.1.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.38.0...v2.39.1)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-17 02:28:03 +00:00
c076e34b5d build(deps): bump codecov/codecov-action from 4.4.1 to 4.5.0 (#3405)
Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4.4.1 to 4.5.0.
- [Release notes](https://github.com/codecov/codecov-action/releases)
- [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/codecov/codecov-action/compare/v4.4.1...v4.5.0)

---
updated-dependencies:
- dependency-name: codecov/codecov-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-17 02:27:23 +00:00
3ecaff5f5b build(deps): bump taiki-e/cache-cargo-install-action from 1.2.2 to 2.0.1 (#3406)
Bumps [taiki-e/cache-cargo-install-action](https://github.com/taiki-e/cache-cargo-install-action) from 1.2.2 to 2.0.1.
- [Release notes](https://github.com/taiki-e/cache-cargo-install-action/releases)
- [Changelog](https://github.com/taiki-e/cache-cargo-install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/cache-cargo-install-action/compare/v1.2.2...v2.0.1)

---
updated-dependencies:
- dependency-name: taiki-e/cache-cargo-install-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-17 00:28:10 +00:00
fa74ab3dfb remove references to StaticFiles (#3400) 2024-06-14 01:51:29 +00:00
188206a903 feat: Html responder (#3399) 2024-06-11 00:36:46 +01:00
0ce488e57a docs: fix build 2024-06-10 23:54:16 +01:00
132b84d3b1 docs(multipart): use cargo-rdme 2024-06-10 23:35:26 +01:00
cc5030c542 docs(http-test): use cargo-rdme 2024-06-10 23:31:45 +01:00
cd301a6932 docs: local docs doc everything but only list workspace crates 2024-06-10 23:30:51 +01:00
4c4c279938 docs(test): intrgrate cargo-rdme 2024-06-10 23:23:38 +01:00
0fd85bae2a test: demonstrate panic in multipart forms (#3397) 2024-06-10 21:51:53 +01:00
9b3de1f1fe ci: fix doctest coverage 2024-06-10 04:15:58 +01:00
9553e7afff ci: fix coverage 2024-06-10 04:08:10 +01:00
d9579cf58a test: coverage for doctests 2024-06-10 04:05:21 +01:00
7a2313cc4b web: add HttpRequest::full_url() (#3096)
* implemented function which returns full uir

* changes added into the changelog

* added test funtion for full_uri method

* refactor: rename to full_url

---------

Co-authored-by: Rob Ede <robjtede@icloud.com>
2024-06-10 02:49:50 +00:00
2ee92d778e ci: external types checking (#3175) 2024-06-10 03:39:06 +01:00
59e42c1446 Return 415 rather than 400 on Urlencoded Content-Type mismatch (#3334)
Co-authored-by: Rob Ede <robjtede@icloud.com>
2024-06-10 01:19:35 +00:00
53086a90a6 build: add coverage recipes to justfile 2024-06-10 01:58:16 +01:00
7f529e35b2 build(deps): bump actions-rust-lang/setup-rust-toolchain from 1.8.0 to 1.9.0 (#3395)
build(deps): bump actions-rust-lang/setup-rust-toolchain

Bumps [actions-rust-lang/setup-rust-toolchain](https://github.com/actions-rust-lang/setup-rust-toolchain) from 1.8.0 to 1.9.0.
- [Release notes](https://github.com/actions-rust-lang/setup-rust-toolchain/releases)
- [Changelog](https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions-rust-lang/setup-rust-toolchain/compare/v1.8.0...v1.9.0)

---
updated-dependencies:
- dependency-name: actions-rust-lang/setup-rust-toolchain
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-10 00:45:11 +00:00
4908fd7dea build(deps): bump taiki-e/install-action from 2.34.0 to 2.38.0 (#3396)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.34.0 to 2.38.0.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.34.0...v2.38.0)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-10 00:44:58 +00:00
97 changed files with 3381 additions and 1828 deletions

View File

@ -1,10 +0,0 @@
[alias]
lint = "clippy --workspace --all-targets -- -Dclippy::todo"
lint-all = "clippy --workspace --all-features --all-targets -- -Dclippy::todo"
# lib checking
ci-check-min = "hack --workspace check --no-default-features"
ci-check-default = "hack --workspace check"
ci-check-default-tests = "check --workspace --tests"
ci-check-all-feature-powerset="hack --workspace --feature-powerset --depth=4 --skip=__compress,experimental-io-uring check"
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --depth=4 --skip=__compress check"

View File

@ -44,20 +44,20 @@ jobs:
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
- name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@v2.34.0
uses: taiki-e/install-action@v2.42.17
with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
- name: check minimal
run: cargo ci-check-min
run: just check-min
- name: check default
run: cargo ci-check-default
run: just check-default
- name: tests
timeout-minutes: 60
@ -76,16 +76,16 @@ jobs:
- name: Free Disk Space
run: ./scripts/free-disk-space.sh
- name: Setup mold linker
uses: rui314/setup-mold@v1
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
- name: Install cargo-hack
uses: taiki-e/install-action@v2.34.0
- name: Install just, cargo-hack
uses: taiki-e/install-action@v2.42.17
with:
tool: cargo-hack
tool: just,cargo-hack
- name: check feature combinations
run: cargo ci-check-all-feature-powerset
- name: check feature combinations
run: cargo ci-check-all-feature-powerset-linux
- name: Check feature combinations
run: just check-feature-combinations

View File

@ -59,12 +59,12 @@ jobs:
uses: rui314/setup-mold@v1
- name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@v2.34.0
uses: taiki-e/install-action@v2.42.17
with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
@ -73,10 +73,10 @@ jobs:
run: just downgrade-for-msrv
- name: check minimal
run: cargo ci-check-min
run: just check-min
- name: check default
run: cargo ci-check-default
run: just check-default
- name: tests
timeout-minutes: 60
@ -92,7 +92,7 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly
@ -108,12 +108,12 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly
- name: Install just
uses: taiki-e/install-action@v2.34.0
uses: taiki-e/install-action@v2.42.17
with:
tool: just

View File

@ -17,21 +17,22 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
components: llvm-tools-preview
toolchain: nightly
components: llvm-tools
- name: Install just,cargo-llvm-cov
uses: taiki-e/install-action@v2.34.0
- name: Install just, cargo-llvm-cov, cargo-nextest
uses: taiki-e/install-action@v2.42.17
with:
tool: just,cargo-llvm-cov
tool: just,cargo-llvm-cov,cargo-nextest
- name: Generate code coverage
run: cargo llvm-cov --workspace --all-features --codecov --output-path codecov.json
run: just test-coverage-codecov
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4.4.1
uses: codecov/codecov-action@v4.5.0
with:
files: codecov.json
fail_ci_if_error: true

View File

@ -18,7 +18,7 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly
components: rustfmt
@ -36,7 +36,7 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
components: clippy
@ -55,7 +55,7 @@ jobs:
- uses: actions/checkout@v4
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly
components: rust-docs
@ -65,6 +65,29 @@ jobs:
RUSTDOCFLAGS: -D warnings
run: cargo +nightly doc --no-deps --workspace --all-features
check-external-types:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust (nightly-2024-05-01)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly-2024-05-01
- name: Install just
uses: taiki-e/install-action@v2.42.17
with:
tool: just
- name: Install cargo-check-external-types
uses: taiki-e/cache-cargo-install-action@v2.0.1
with:
tool: cargo-check-external-types
- name: check external types
run: just check-external-types-all +nightly-2024-05-01
public-api-diff:
runs-on: ubuntu-latest
steps:
@ -76,13 +99,13 @@ jobs:
- name: Checkout PR branch
uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
- name: Install Rust (nightly-2024-06-07)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly-2024-06-07
- name: Install cargo-public-api
uses: taiki-e/install-action@v2.34.0
uses: taiki-e/install-action@v2.42.17
with:
tool: cargo-public-api

View File

@ -51,3 +51,11 @@ awc = { path = "awc" }
# actix-utils = { path = "../actix-net/actix-utils" }
# actix-tls = { path = "../actix-net/actix-tls" }
# actix-server = { path = "../actix-net/actix-server" }
[workspace.lints.rust]
rust_2018_idioms = { level = "deny" }
future_incompatible = { level = "deny" }
nonstandard_style = { level = "deny" }
[workspace.lints.clippy]
# clone_on_ref_ptr = { level = "deny" }

View File

@ -13,9 +13,14 @@ categories = ["asynchronous", "web-programming::http-server"]
license = "MIT OR Apache-2.0"
edition = "2021"
[lib]
name = "actix_files"
path = "src/lib.rs"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_http::*",
"actix_service::*",
"actix_web::*",
"http::*",
"mime::*",
]
[features]
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
@ -49,3 +54,6 @@ actix-test = "0.1"
actix-web = "4"
env_logger = "0.11"
tempfile = "3.2"
[lints]
workspace = true

View File

@ -11,8 +11,7 @@
//! .service(Files::new("/static", ".").prefer_utf8(true));
//! ```
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
#![warn(missing_docs, missing_debug_implementations)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -307,11 +306,11 @@ mod tests {
let resp = file.respond_to(&req);
assert_eq!(
resp.headers().get(header::CONTENT_TYPE).unwrap(),
"application/javascript; charset=utf-8"
"text/javascript",
);
assert_eq!(
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
"inline; filename=\"test.js\""
"inline; filename=\"test.js\"",
);
}

View File

@ -18,9 +18,17 @@ edition = "2021"
[package.metadata.docs.rs]
features = []
[lib]
name = "actix_http_test"
path = "src/lib.rs"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_http::*",
"actix_server::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"tokio::*",
]
[features]
default = []
@ -51,3 +59,6 @@ tokio = { version = "1.24.2", features = ["sync"] }
[dev-dependencies]
actix-http = "3"
[lints]
workspace = true

View File

@ -1,7 +1,5 @@
# `actix-http-test`
> Various helpers for Actix applications to use during testing.
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
@ -14,3 +12,9 @@
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->
<!-- cargo-rdme start -->
Various helpers for Actix applications to use during testing.
<!-- cargo-rdme end -->

View File

@ -1,7 +1,5 @@
//! Various helpers for Actix applications to use during testing.
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]

View File

@ -2,6 +2,14 @@
## Unreleased
## 3.9.0
### Added
- Implement `FromIterator<(HeaderName, HeaderValue)>` for `HeaderMap`.
## 3.8.0
### Added
- Add `error::InvalidStatusCode` re-export.

View File

@ -1,6 +1,6 @@
[package]
name = "actix-http"
version = "3.7.0"
version = "3.9.0"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
@ -34,51 +34,72 @@ features = [
"compress-zstd",
]
[lib]
name = "actix_http"
path = "src/lib.rs"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_service::*",
"actix_tls::*",
"actix_utils::*",
"bytes::*",
"bytestring::*",
"encoding_rs::*",
"futures_core::*",
"h2::*",
"http::*",
"httparse::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"tokio_util::*",
"tokio::*",
]
[features]
default = []
# HTTP/2 protocol support
http2 = ["h2"]
http2 = ["dep:h2"]
# WebSocket protocol implementation
ws = [
"local-channel",
"base64",
"rand",
"sha1",
"dep:local-channel",
"dep:base64",
"dep:rand",
"dep:sha1",
]
# TLS via OpenSSL
openssl = ["actix-tls/accept", "actix-tls/openssl"]
openssl = ["__tls", "actix-tls/accept", "actix-tls/openssl"]
# TLS via Rustls v0.20
rustls = ["rustls-0_20"]
rustls = ["__tls", "rustls-0_20"]
# TLS via Rustls v0.20
rustls-0_20 = ["actix-tls/accept", "actix-tls/rustls-0_20"]
rustls-0_20 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_20"]
# TLS via Rustls v0.21
rustls-0_21 = ["actix-tls/accept", "actix-tls/rustls-0_21"]
rustls-0_21 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_21"]
# TLS via Rustls v0.22
rustls-0_22 = ["actix-tls/accept", "actix-tls/rustls-0_22"]
rustls-0_22 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_22"]
# TLS via Rustls v0.23
rustls-0_23 = ["actix-tls/accept", "actix-tls/rustls-0_23"]
rustls-0_23 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_23"]
# Compression codecs
compress-brotli = ["__compress", "brotli"]
compress-gzip = ["__compress", "flate2"]
compress-zstd = ["__compress", "zstd"]
compress-brotli = ["__compress", "dep:brotli"]
compress-gzip = ["__compress", "dep:flate2"]
compress-zstd = ["__compress", "dep:zstd"]
# Internal (PRIVATE!) features used to aid testing and checking feature status.
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
__compress = []
# Internal (PRIVATE!) features used to aid checking feature status.
# Don't rely on these whatsoever. They may disappear at anytime.
__tls = []
[dependencies]
actix-service = "2"
actix-codec = "0.5"
@ -146,6 +167,9 @@ tls-openssl = { package = "openssl", version = "0.10.55" }
tls-rustls_023 = { package = "rustls", version = "0.23" }
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
[lints]
workspace = true
[[example]]
name = "ws"
required-features = ["ws", "rustls-0_23"]

View File

@ -5,11 +5,11 @@
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.7.0)](https://docs.rs/actix-http/3.7.0)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.9.0)](https://docs.rs/actix-http/3.9.0)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-http/3.7.0/status.svg)](https://deps.rs/crate/actix-http/3.7.0)
[![dependency status](https://deps.rs/crate/actix-http/3.9.0/status.svg)](https://deps.rs/crate/actix-http/3.9.0)
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@ -541,6 +541,6 @@ where
fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future {
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
Dispatcher::new(io, self.flow.clone(), self.cfg.clone(), addr, conn_data)
Dispatcher::new(io, Rc::clone(&self.flow), self.cfg.clone(), addr, conn_data)
}
}

View File

@ -434,7 +434,7 @@ where
H2ServiceHandlerResponse {
state: State::Handshake(
Some(self.flow.clone()),
Some(Rc::clone(&self.flow)),
Some(self.cfg.clone()),
addr,
on_connect_data,

View File

@ -13,8 +13,9 @@ use super::AsHeaderName;
/// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s.
///
/// # Examples
///
/// ```
/// use actix_http::header::{self, HeaderMap, HeaderValue};
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
///
/// let mut map = HeaderMap::new();
///
@ -29,6 +30,21 @@ use super::AsHeaderName;
///
/// assert!(!map.contains_key(header::ORIGIN));
/// ```
///
/// Construct a header map using the [`FromIterator`] implementation. Note that it uses the append
/// strategy, so duplicate header names are preserved.
///
/// ```
/// use actix_http::header::{self, HeaderMap, HeaderValue};
///
/// let headers = HeaderMap::from_iter([
/// (header::CONTENT_TYPE, HeaderValue::from_static("text/plain")),
/// (header::COOKIE, HeaderValue::from_static("foo=1")),
/// (header::COOKIE, HeaderValue::from_static("bar=1")),
/// ]);
///
/// assert_eq!(headers.len(), 3);
/// ```
#[derive(Debug, Clone, Default)]
pub struct HeaderMap {
pub(crate) inner: AHashMap<HeaderName, Value>,
@ -368,8 +384,8 @@ impl HeaderMap {
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
/// assert!(!removed.is_empty());
/// ```
pub fn insert(&mut self, key: HeaderName, val: HeaderValue) -> Removed {
let value = self.inner.insert(key, Value::one(val));
pub fn insert(&mut self, name: HeaderName, val: HeaderValue) -> Removed {
let value = self.inner.insert(name, Value::one(val));
Removed::new(value)
}
@ -636,6 +652,16 @@ impl<'a> IntoIterator for &'a HeaderMap {
}
}
impl FromIterator<(HeaderName, HeaderValue)> for HeaderMap {
fn from_iter<T: IntoIterator<Item = (HeaderName, HeaderValue)>>(iter: T) -> Self {
iter.into_iter()
.fold(Self::new(), |mut map, (name, value)| {
map.append(name, value);
map
})
}
}
/// Convert a `http::HeaderMap` to our `HeaderMap`.
impl From<http::HeaderMap> for HeaderMap {
fn from(mut map: http::HeaderMap) -> Self {

View File

@ -6,10 +6,10 @@
//! | ------------------- | ------------------------------------------- |
//! | `http2` | HTTP/2 support via [h2]. |
//! | `openssl` | TLS support via [OpenSSL]. |
//! | `rustls` | TLS support via [rustls] 0.20. |
//! | `rustls-0_21` | TLS support via [rustls] 0.21. |
//! | `rustls-0_22` | TLS support via [rustls] 0.22. |
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
//! | `rustls-0_20` | TLS support via rustls 0.20. |
//! | `rustls-0_21` | TLS support via rustls 0.21. |
//! | `rustls-0_22` | TLS support via rustls 0.22. |
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
//! | `compress-brotli` | Payload compression support: Brotli. |
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
//! | `compress-zstd` | Payload compression support: Zstd. |
@ -20,8 +20,6 @@
//! [rustls]: https://crates.io/crates/rustls
//! [trust-dns]: https://crates.io/crates/trust-dns
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow(
clippy::type_complexity,
clippy::too_many_arguments,
@ -61,13 +59,7 @@ pub mod ws;
#[allow(deprecated)]
pub use self::payload::PayloadStream;
#[cfg(any(
feature = "openssl",
feature = "rustls-0_20",
feature = "rustls-0_21",
feature = "rustls-0_22",
feature = "rustls-0_23",
))]
#[cfg(feature = "__tls")]
pub use self::service::TlsAcceptorConfig;
pub use self::{
builder::HttpServiceBuilder,

View File

@ -66,7 +66,7 @@ impl<T: Head> ops::DerefMut for Message<T> {
impl<T: Head> Drop for Message<T> {
fn drop(&mut self) {
T::with_pool(|p| p.release(self.head.clone()))
T::with_pool(|p| p.release(Rc::clone(&self.head)))
}
}

View File

@ -351,12 +351,9 @@ mod tests {
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain");
let resp = Response::build(StatusCode::OK)
.content_type(mime::APPLICATION_JAVASCRIPT_UTF_8)
.content_type(mime::TEXT_JAVASCRIPT)
.body(Bytes::new());
assert_eq!(
resp.headers().get(CONTENT_TYPE).unwrap(),
"application/javascript; charset=utf-8"
);
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/javascript");
}
#[test]

View File

@ -241,25 +241,13 @@ where
}
/// Configuration options used when accepting TLS connection.
#[cfg(any(
feature = "openssl",
feature = "rustls-0_20",
feature = "rustls-0_21",
feature = "rustls-0_22",
feature = "rustls-0_23",
))]
#[cfg(feature = "__tls")]
#[derive(Debug, Default)]
pub struct TlsAcceptorConfig {
pub(crate) handshake_timeout: Option<std::time::Duration>,
}
#[cfg(any(
feature = "openssl",
feature = "rustls-0_20",
feature = "rustls-0_21",
feature = "rustls-0_22",
feature = "rustls-0_23",
))]
#[cfg(feature = "__tls")]
impl TlsAcceptorConfig {
/// Set TLS handshake timeout duration.
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
@ -922,7 +910,7 @@ where
handshake: Some((
crate::h2::handshake_with_timeout(io, &self.cfg),
self.cfg.clone(),
self.flow.clone(),
Rc::clone(&self.flow),
conn_data,
peer_addr,
)),
@ -938,7 +926,7 @@ where
state: State::H1 {
dispatcher: h1::Dispatcher::new(
io,
self.flow.clone(),
Rc::clone(&self.flow),
self.cfg.clone(),
peer_addr,
conn_data,

View File

@ -159,8 +159,8 @@ impl TestBuffer {
#[allow(dead_code)]
pub(crate) fn clone(&self) -> Self {
Self {
read_buf: self.read_buf.clone(),
write_buf: self.write_buf.clone(),
read_buf: Rc::clone(&self.read_buf),
write_buf: Rc::clone(&self.write_buf),
err: self.err.clone(),
}
}

View File

@ -2,6 +2,8 @@
## Unreleased
## 0.7.0
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.6.1

View File

@ -1,6 +1,6 @@
[package]
name = "actix-multipart-derive"
version = "0.6.1"
version = "0.7.0"
authors = ["Jacob Halsey <jacob@jhalsey.com>"]
description = "Multipart form derive macro for Actix Web"
keywords = ["http", "web", "framework", "async", "futures"]
@ -25,7 +25,10 @@ quote = "1"
syn = "2"
[dev-dependencies]
actix-multipart = "0.6"
actix-multipart = "0.7"
actix-web = "4"
rustversion = "1"
trybuild = "1"
[lints]
workspace = true

View File

@ -5,11 +5,11 @@
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-multipart-derive?label=latest)](https://crates.io/crates/actix-multipart-derive)
[![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.6.1)](https://docs.rs/actix-multipart-derive/0.6.1)
[![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.7.0)](https://docs.rs/actix-multipart-derive/0.7.0)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart-derive.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-multipart-derive/0.6.1/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.6.1)
[![dependency status](https://deps.rs/crate/actix-multipart-derive/0.7.0/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.7.0)
[![Download](https://img.shields.io/crates/d/actix-multipart-derive.svg)](https://crates.io/crates/actix-multipart-derive)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@ -2,8 +2,6 @@
//!
//! See [`macro@MultipartForm`] for usage examples.
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -138,7 +136,7 @@ struct ParsedField<'t> {
/// `#[multipart(duplicate_field = "<behavior>")]` attribute:
///
/// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted.
/// - "deny": A `MultipartError::UnsupportedField` error response is returned.
/// - "deny": A `MultipartError::UnknownField` error response is returned.
/// - "replace": Each field is processed, but only the last one is persisted.
///
/// Note that `Vec` fields will ignore this option.
@ -229,7 +227,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
// Return value when a field name is not supported by the form
let unknown_field_result = if attrs.deny_unknown_fields {
quote!(::std::result::Result::Err(
::actix_multipart::MultipartError::UnsupportedField(field.name().to_string())
::actix_multipart::MultipartError::UnknownField(field.name().unwrap().to_string())
))
} else {
quote!(::std::result::Result::Ok(()))
@ -292,7 +290,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
limits: &'t mut ::actix_multipart::form::Limits,
state: &'t mut ::actix_multipart::form::State,
) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> {
match field.name() {
match field.name().unwrap() {
#handle_field_impl
_ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)),
}

View File

@ -2,6 +2,26 @@
## Unreleased
## 0.7.2
- Fix re-exported version of `actix-multipart-derive`.
## 0.7.1
- Expose `LimitExceeded` error type.
## 0.7.0
- Add `MultipartError::ContentTypeIncompatible` variant.
- Add `MultipartError::ContentDispositionNameMissing` variant.
- Add `Field::bytes()` method.
- Rename `MultipartError::{NoContentDisposition => ContentDispositionMissing}` variant.
- Rename `MultipartError::{NoContentType => ContentTypeMissing}` variant.
- Rename `MultipartError::{ParseContentType => ContentTypeParse}` variant.
- Rename `MultipartError::{Boundary => BoundaryMissing}` variant.
- Rename `MultipartError::{UnsupportedField => UnknownField}` variant.
- Remove top-level re-exports of `test` utilities.
## 0.6.2
- Add testing utilities under new module `test`.

View File

@ -1,32 +1,47 @@
[package]
name = "actix-multipart"
version = "0.6.2"
version = "0.7.2"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Jacob Halsey <jacob@jhalsey.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Multipart form support for Actix Web"
keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0"
edition = "2021"
description = "Multipart request & form support for Actix Web"
keywords = ["http", "actix", "web", "multipart", "form"]
homepage.workspace = true
repository.workspace = true
license.workspace = true
edition.workspace = true
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"]
all-features = true
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_http::*",
"actix_multipart_derive::*",
"actix_utils::*",
"actix_web::*",
"bytes::*",
"futures_core::*",
"mime::*",
"serde_json::*",
"serde_plain::*",
"serde::*",
"tempfile::*",
]
[features]
default = ["tempfile", "derive"]
derive = ["actix-multipart-derive"]
tempfile = ["dep:tempfile", "tokio/fs"]
[dependencies]
actix-multipart-derive = { version = "=0.6.1", optional = true }
actix-multipart-derive = { version = "=0.7.0", optional = true }
actix-utils = "3"
actix-web = { version = "4", default-features = false }
bytes = "1"
derive_more = "0.99.5"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
@ -48,8 +63,14 @@ actix-multipart-rfc7578 = "0.10"
actix-rt = "2.2"
actix-test = "0.1"
actix-web = "4"
assert_matches = "1"
awc = "3"
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-test = "0.3"
multer = "3"
tokio = { version = "1.24.2", features = ["sync"] }
tokio-stream = "0.1"
[lints]
workspace = true

View File

@ -1,37 +1,32 @@
# `actix-multipart`
> Multipart form support for Actix Web.
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.6.2)](https://docs.rs/actix-multipart/0.6.2)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.7.2)](https://docs.rs/actix-multipart/0.7.2)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-multipart/0.6.2/status.svg)](https://deps.rs/crate/actix-multipart/0.6.2)
[![dependency status](https://deps.rs/crate/actix-multipart/0.7.2/status.svg)](https://deps.rs/crate/actix-multipart/0.7.2)
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->
## Example
<!-- cargo-rdme start -->
Dependencies:
Multipart request & form support for Actix Web.
```toml
[dependencies]
actix-multipart = "0.6"
actix-web = "4.5"
serde = { version = "1.0", features = ["derive"] }
```
The [`Multipart`] extractor aims to support all kinds of `multipart/*` requests, including `multipart/form-data`, `multipart/related` and `multipart/mixed`. This is a lower-level extractor which supports reading [multipart fields](Field), in the order they are sent by the client.
Code:
Due to additional requirements for `multipart/form-data` requests, the higher level [`MultipartForm`] extractor and derive macro only supports this media type.
## Examples
```rust
use actix_web::{post, App, HttpServer, Responder};
use actix_multipart::form::{json::Json as MPJson, tempfile::TempFile, MultipartForm};
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
@ -43,7 +38,7 @@ struct Metadata {
struct UploadForm {
#[multipart(limit = "100MB")]
file: TempFile,
json: MPJson<Metadata>,
json: MpJson<Metadata>,
}
#[post("/videos")]
@ -63,15 +58,17 @@ async fn main() -> std::io::Result<()> {
}
```
Curl request :
cURL request:
```bash
```sh
curl -v --request POST \
--url http://localhost:8080/videos \
-F 'json={"name": "Cargo.lock"};type=application/json' \
-F file=@./Cargo.lock
```
### Examples
[`MultipartForm`]: struct@form::MultipartForm
https://github.com/actix/examples/tree/master/forms/multipart
<!-- cargo-rdme end -->
[More available in the examples repo &rarr;](https://github.com/actix/examples/tree/master/forms/multipart)

View File

@ -0,0 +1,36 @@
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use actix_web::{middleware::Logger, post, App, HttpServer, Responder};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Metadata {
name: String,
}
#[derive(Debug, MultipartForm)]
struct UploadForm {
#[multipart(limit = "100MB")]
file: TempFile,
json: MpJson<Metadata>,
}
#[post("/videos")]
async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
format!(
"Uploaded file {}, with size: {}\ntemporary file ({}) was deleted\n",
form.json.name,
form.file.size,
form.file.file.path().display(),
)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
HttpServer::new(move || App::new().service(post_video).wrap(Logger::default()))
.workers(2)
.bind(("127.0.0.1", 8080))?
.run()
.await
}

View File

@ -10,78 +10,96 @@ use derive_more::{Display, Error, From};
/// A set of errors that can occur during parsing multipart streams.
#[derive(Debug, Display, From, Error)]
#[non_exhaustive]
pub enum MultipartError {
/// Content-Disposition header is not found or is not equal to "form-data".
pub enum Error {
/// Could not find Content-Type header.
#[display(fmt = "Could not find Content-Type header")]
ContentTypeMissing,
/// Could not parse Content-Type header.
#[display(fmt = "Could not parse Content-Type header")]
ContentTypeParse,
/// Parsed Content-Type did not have "multipart" top-level media type.
///
/// According to [RFC 7578 §4.2](https://datatracker.ietf.org/doc/html/rfc7578#section-4.2) a
/// Content-Disposition header must always be present and equal to "form-data".
#[display(fmt = "No Content-Disposition `form-data` header")]
NoContentDisposition,
/// Also raised when extracting a [`MultipartForm`] from a request that does not have the
/// "multipart/form-data" media type.
///
/// [`MultipartForm`]: struct@crate::form::MultipartForm
#[display(fmt = "Parsed Content-Type did not have "multipart" top-level media type")]
ContentTypeIncompatible,
/// Content-Type header is not found
#[display(fmt = "No Content-Type header found")]
NoContentType,
/// Can not parse Content-Type header
#[display(fmt = "Can not parse Content-Type header")]
ParseContentType,
/// Multipart boundary is not found
/// Multipart boundary is not found.
#[display(fmt = "Multipart boundary is not found")]
Boundary,
BoundaryMissing,
/// Nested multipart is not supported
/// Content-Disposition header was not found or not of disposition type "form-data" when parsing
/// a "form-data" field.
///
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
/// always be present and have a disposition type of "form-data".
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
ContentDispositionMissing,
/// Content-Disposition name parameter was not found when parsing a "form-data" field.
///
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
/// always include a "name" parameter.
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
ContentDispositionNameMissing,
/// Nested multipart is not supported.
#[display(fmt = "Nested multipart is not supported")]
Nested,
/// Multipart stream is incomplete
/// Multipart stream is incomplete.
#[display(fmt = "Multipart stream is incomplete")]
Incomplete,
/// Error during field parsing
#[display(fmt = "{}", _0)]
/// Field parsing failed.
#[display(fmt = "Error during field parsing")]
Parse(ParseError),
/// Payload error
#[display(fmt = "{}", _0)]
/// HTTP payload error.
#[display(fmt = "Payload error")]
Payload(PayloadError),
/// Not consumed
#[display(fmt = "Multipart stream is not consumed")]
/// Stream is not consumed.
#[display(fmt = "Stream is not consumed")]
NotConsumed,
/// An error from a field handler in a form
#[display(
fmt = "An error occurred processing field `{}`: {}",
field_name,
source
)]
/// Form field handler raised error.
#[display(fmt = "An error occurred processing field: {name}")]
Field {
field_name: String,
name: String,
source: actix_web::Error,
},
/// Duplicate field
#[display(fmt = "Duplicate field found for: `{}`", _0)]
/// Duplicate field found (for structure that opted-in to denying duplicate fields).
#[display(fmt = "Duplicate field found: {_0}")]
#[from(ignore)]
DuplicateField(#[error(not(source))] String),
/// Missing field
#[display(fmt = "Field with name `{}` is required", _0)]
/// Required field is missing.
#[display(fmt = "Required field is missing: {_0}")]
#[from(ignore)]
MissingField(#[error(not(source))] String),
/// Unknown field
#[display(fmt = "Unsupported field `{}`", _0)]
/// Unknown field (for structure that opted-in to denying unknown fields).
#[display(fmt = "Unknown field: {_0}")]
#[from(ignore)]
UnsupportedField(#[error(not(source))] String),
UnknownField(#[error(not(source))] String),
}
/// Return `BadRequest` for `MultipartError`
impl ResponseError for MultipartError {
/// Return `BadRequest` for `MultipartError`.
impl ResponseError for Error {
fn status_code(&self) -> StatusCode {
match &self {
MultipartError::Field { source, .. } => source.as_response_error().status_code(),
Error::Field { source, .. } => source.as_response_error().status_code(),
Error::ContentTypeIncompatible => StatusCode::UNSUPPORTED_MEDIA_TYPE,
_ => StatusCode::BAD_REQUEST,
}
}
@ -93,7 +111,7 @@ mod tests {
#[test]
fn test_multipart_error() {
let resp = MultipartError::Boundary.error_response();
let resp = Error::BoundaryMissing.error_response();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
}

View File

@ -1,21 +1,20 @@
//! Multipart payload support
use actix_utils::future::{ready, Ready};
use actix_web::{dev::Payload, Error, FromRequest, HttpRequest};
use crate::server::Multipart;
use crate::multipart::Multipart;
/// Get request's payload as multipart stream.
/// Extract request's payload as multipart stream.
///
/// Content-type: multipart/form-data;
/// Content-type: multipart/*;
///
/// # Examples
///
/// ```
/// use actix_web::{web, HttpResponse, Error};
/// use actix_web::{web, HttpResponse};
/// use actix_multipart::Multipart;
/// use futures_util::StreamExt as _;
///
/// async fn index(mut payload: Multipart) -> Result<HttpResponse, Error> {
/// async fn index(mut payload: Multipart) -> actix_web::Result<HttpResponse> {
/// // iterate over multipart stream
/// while let Some(item) = payload.next().await {
/// let mut field = item?;
@ -26,7 +25,7 @@ use crate::server::Multipart;
/// }
/// }
///
/// Ok(HttpResponse::Ok().into())
/// Ok(HttpResponse::Ok().finish())
/// }
/// ```
impl FromRequest for Multipart {
@ -35,9 +34,6 @@ impl FromRequest for Multipart {
#[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ready(Ok(match Multipart::boundary(req.headers()) {
Ok(boundary) => Multipart::from_boundary(boundary, payload.take()),
Err(err) => Multipart::from_error(err),
}))
ready(Ok(Multipart::from_req(req, payload)))
}
}

View File

@ -0,0 +1,501 @@
use std::{
cell::RefCell,
cmp, fmt,
future::poll_fn,
mem,
pin::Pin,
rc::Rc,
task::{ready, Context, Poll},
};
use actix_web::{
error::PayloadError,
http::header::{self, ContentDisposition, HeaderMap},
web::{Bytes, BytesMut},
};
use derive_more::{Display, Error};
use futures_core::Stream;
use mime::Mime;
use crate::{
error::Error,
payload::{PayloadBuffer, PayloadRef},
safety::Safety,
};
/// Error type returned from [`Field::bytes()`] when field data is larger than limit.
#[derive(Debug, Display, Error)]
#[display(fmt = "size limit exceeded while collecting field data")]
#[non_exhaustive]
pub struct LimitExceeded;
/// A single field in a multipart stream.
pub struct Field {
/// Field's Content-Type.
content_type: Option<Mime>,
/// Field's Content-Disposition.
content_disposition: Option<ContentDisposition>,
/// Form field name.
///
/// A non-optional storage for form field names to avoid unwraps in `form` module. Will be an
/// empty string in non-form contexts.
///
// INVARIANT: always non-empty when request content-type is multipart/form-data.
pub(crate) form_field_name: String,
/// Field's header map.
headers: HeaderMap,
safety: Safety,
inner: Rc<RefCell<InnerField>>,
}
impl Field {
pub(crate) fn new(
content_type: Option<Mime>,
content_disposition: Option<ContentDisposition>,
form_field_name: Option<String>,
headers: HeaderMap,
safety: Safety,
inner: Rc<RefCell<InnerField>>,
) -> Self {
Field {
content_type,
content_disposition,
form_field_name: form_field_name.unwrap_or_default(),
headers,
inner,
safety,
}
}
/// Returns a reference to the field's header map.
pub fn headers(&self) -> &HeaderMap {
&self.headers
}
/// Returns a reference to the field's content (mime) type, if it is supplied by the client.
///
/// According to [RFC 7578](https://www.rfc-editor.org/rfc/rfc7578#section-4.4), if it is not
/// present, it should default to "text/plain". Note it is the responsibility of the client to
/// provide the appropriate content type, there is no attempt to validate this by the server.
pub fn content_type(&self) -> Option<&Mime> {
self.content_type.as_ref()
}
/// Returns this field's parsed Content-Disposition header, if set.
///
/// # Validation
///
/// Per [RFC 7578 §4.2], the parts of a multipart/form-data payload MUST contain a
/// Content-Disposition header field where the disposition type is `form-data` and MUST also
/// contain an additional parameter of `name` with its value being the original field name from
/// the form. This requirement is enforced during extraction for multipart/form-data requests,
/// but not other kinds of multipart requests (such as multipart/related).
///
/// As such, it is safe to `.unwrap()` calls `.content_disposition()` if you've verified.
///
/// The [`name()`](Self::name) method is also provided as a convenience for obtaining the
/// aforementioned name parameter.
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
pub fn content_disposition(&self) -> Option<&ContentDisposition> {
self.content_disposition.as_ref()
}
/// Returns the field's name, if set.
///
/// See [`content_disposition()`](Self::content_disposition) regarding guarantees on presence of
/// the "name" field.
pub fn name(&self) -> Option<&str> {
self.content_disposition()?.get_name()
}
/// Collects the raw field data, up to `limit` bytes.
///
/// # Errors
///
/// Any errors produced by the data stream are returned as `Ok(Err(Error))` immediately.
///
/// If the buffered data size would exceed `limit`, an `Err(LimitExceeded)` is returned. Note
/// that, in this case, the full data stream is exhausted before returning the error so that
/// subsequent fields can still be read. To better defend against malicious/infinite requests,
/// it is advisable to also put a timeout on this call.
pub async fn bytes(&mut self, limit: usize) -> Result<Result<Bytes, Error>, LimitExceeded> {
/// Sensible default (2kB) for initial, bounded allocation when collecting body bytes.
const INITIAL_ALLOC_BYTES: usize = 2 * 1024;
let mut exceeded_limit = false;
let mut buf = BytesMut::with_capacity(INITIAL_ALLOC_BYTES);
let mut field = Pin::new(self);
match poll_fn(|cx| loop {
match ready!(field.as_mut().poll_next(cx)) {
// if already over limit, discard chunk to advance multipart request
Some(Ok(_chunk)) if exceeded_limit => {}
// if limit is exceeded set flag to true and continue
Some(Ok(chunk)) if buf.len() + chunk.len() > limit => {
exceeded_limit = true;
// eagerly de-allocate field data buffer
let _ = mem::take(&mut buf);
}
Some(Ok(chunk)) => buf.extend_from_slice(&chunk),
None => return Poll::Ready(Ok(())),
Some(Err(err)) => return Poll::Ready(Err(err)),
}
})
.await
{
// propagate error returned from body poll
Err(err) => Ok(Err(err)),
// limit was exceeded while reading body
Ok(()) if exceeded_limit => Err(LimitExceeded),
// otherwise return body buffer
Ok(()) => Ok(Ok(buf.freeze())),
}
}
}
impl Stream for Field {
type Item = Result<Bytes, Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
let mut inner = this.inner.borrow_mut();
if let Some(mut buffer) = inner
.payload
.as_ref()
.expect("Field should not be polled after completion")
.get_mut(&this.safety)
{
// check safety and poll read payload to buffer.
buffer.poll_stream(cx)?;
} else if !this.safety.is_clean() {
// safety violation
return Poll::Ready(Some(Err(Error::NotConsumed)));
} else {
return Poll::Pending;
}
inner.poll(&this.safety)
}
}
impl fmt::Debug for Field {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(ct) = &self.content_type {
writeln!(f, "\nField: {}", ct)?;
} else {
writeln!(f, "\nField:")?;
}
writeln!(f, " boundary: {}", self.inner.borrow().boundary)?;
writeln!(f, " headers:")?;
for (key, val) in self.headers.iter() {
writeln!(f, " {:?}: {:?}", key, val)?;
}
Ok(())
}
}
pub(crate) struct InnerField {
/// Payload is initialized as Some and is `take`n when the field stream finishes.
payload: Option<PayloadRef>,
/// Field boundary (without "--" prefix).
boundary: String,
/// True if request payload has been exhausted.
eof: bool,
/// Field data's stated size according to it's Content-Length header.
length: Option<u64>,
}
impl InnerField {
pub(crate) fn new_in_rc(
payload: PayloadRef,
boundary: String,
headers: &HeaderMap,
) -> Result<Rc<RefCell<InnerField>>, PayloadError> {
Self::new(payload, boundary, headers).map(|this| Rc::new(RefCell::new(this)))
}
pub(crate) fn new(
payload: PayloadRef,
boundary: String,
headers: &HeaderMap,
) -> Result<InnerField, PayloadError> {
let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) {
match len.to_str().ok().and_then(|len| len.parse::<u64>().ok()) {
Some(len) => Some(len),
None => return Err(PayloadError::Incomplete(None)),
}
} else {
None
};
Ok(InnerField {
boundary,
payload: Some(payload),
eof: false,
length: len,
})
}
/// Reads body part content chunk of the specified size.
///
/// The body part must has `Content-Length` header with proper value.
pub(crate) fn read_len(
payload: &mut PayloadBuffer,
size: &mut u64,
) -> Poll<Option<Result<Bytes, Error>>> {
if *size == 0 {
Poll::Ready(None)
} else {
match payload.read_max(*size)? {
Some(mut chunk) => {
let len = cmp::min(chunk.len() as u64, *size);
*size -= len;
let ch = chunk.split_to(len as usize);
if !chunk.is_empty() {
payload.unprocessed(chunk);
}
Poll::Ready(Some(Ok(ch)))
}
None => {
if payload.eof && (*size != 0) {
Poll::Ready(Some(Err(Error::Incomplete)))
} else {
Poll::Pending
}
}
}
}
}
/// Reads content chunk of body part with unknown length.
///
/// The `Content-Length` header for body part is not necessary.
pub(crate) fn read_stream(
payload: &mut PayloadBuffer,
boundary: &str,
) -> Poll<Option<Result<Bytes, Error>>> {
let mut pos = 0;
let len = payload.buf.len();
if len == 0 {
return if payload.eof {
Poll::Ready(Some(Err(Error::Incomplete)))
} else {
Poll::Pending
};
}
// check boundary
if len > 4 && payload.buf[0] == b'\r' {
let b_len = if payload.buf.starts_with(b"\r\n") && &payload.buf[2..4] == b"--" {
Some(4)
} else if &payload.buf[1..3] == b"--" {
Some(3)
} else {
None
};
if let Some(b_len) = b_len {
let b_size = boundary.len() + b_len;
if len < b_size {
return Poll::Pending;
} else if &payload.buf[b_len..b_size] == boundary.as_bytes() {
// found boundary
return Poll::Ready(None);
}
}
}
loop {
return if let Some(idx) = memchr::memmem::find(&payload.buf[pos..], b"\r") {
let cur = pos + idx;
// check if we have enough data for boundary detection
if cur + 4 > len {
if cur > 0 {
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
} else {
Poll::Pending
}
} else {
// check boundary
if (&payload.buf[cur..cur + 2] == b"\r\n"
&& &payload.buf[cur + 2..cur + 4] == b"--")
|| (&payload.buf[cur..=cur] == b"\r"
&& &payload.buf[cur + 1..cur + 3] == b"--")
{
if cur != 0 {
// return buffer
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
} else {
pos = cur + 1;
continue;
}
} else {
// not boundary
pos = cur + 1;
continue;
}
}
} else {
Poll::Ready(Some(Ok(payload.buf.split().freeze())))
};
}
}
pub(crate) fn poll(&mut self, safety: &Safety) -> Poll<Option<Result<Bytes, Error>>> {
if self.payload.is_none() {
return Poll::Ready(None);
}
let Some(mut payload) = self
.payload
.as_ref()
.expect("Field should not be polled after completion")
.get_mut(safety)
else {
return Poll::Pending;
};
if !self.eof {
let res = if let Some(ref mut len) = self.length {
Self::read_len(&mut payload, len)
} else {
Self::read_stream(&mut payload, &self.boundary)
};
match ready!(res) {
Some(Ok(bytes)) => return Poll::Ready(Some(Ok(bytes))),
Some(Err(err)) => return Poll::Ready(Some(Err(err))),
None => self.eof = true,
}
}
let result = match payload.readline() {
Ok(None) => Poll::Pending,
Ok(Some(line)) => {
if line.as_ref() != b"\r\n" {
log::warn!("multipart field did not read all the data or it is malformed");
}
Poll::Ready(None)
}
Err(err) => Poll::Ready(Some(Err(err))),
};
drop(payload);
if let Poll::Ready(None) = result {
// drop payload buffer and make future un-poll-able
let _ = self.payload.take();
}
result
}
}
#[cfg(test)]
mod tests {
use futures_util::{stream, StreamExt as _};
use super::*;
use crate::Multipart;
// TODO: use test utility when multi-file support is introduced
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
\r\n\
one+one+one\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
\r\n\
two+two+two\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
(bytes, headers)
}
#[actix_rt::test]
async fn bytes_unlimited() {
let (body, headers) = create_double_request_with_header();
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "one+one+one");
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "two+two+two");
}
#[actix_rt::test]
async fn bytes_limited() {
let (body, headers) = create_double_request_with_header();
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(8) // smaller than data size
.await
.expect_err("field data should be size limited");
// next field still readable
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "two+two+two");
}
}

View File

@ -1,7 +1,6 @@
//! Reads a field into memory.
use actix_web::HttpRequest;
use bytes::BytesMut;
use actix_web::{web::BytesMut, HttpRequest};
use futures_core::future::LocalBoxFuture;
use futures_util::TryStreamExt as _;
use mime::Mime;
@ -15,7 +14,7 @@ use crate::{
#[derive(Debug)]
pub struct Bytes {
/// The data.
pub data: bytes::Bytes,
pub data: actix_web::web::Bytes,
/// The value of the `Content-Type` header.
pub content_type: Option<Mime>,
@ -41,8 +40,9 @@ impl<'t> FieldReader<'t> for Bytes {
content_type: field.content_type().map(ToOwned::to_owned),
file_name: field
.content_disposition()
.expect("multipart form fields should have a content-disposition header")
.get_filename()
.map(str::to_owned),
.map(ToOwned::to_owned),
})
})
}

View File

@ -32,7 +32,6 @@ where
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move {
let config = JsonConfig::from_req(req);
let field_name = field.name().to_owned();
if config.validate_content_type {
let valid = if let Some(mime) = field.content_type() {
@ -43,17 +42,19 @@ where
if !valid {
return Err(MultipartError::Field {
field_name,
name: field.form_field_name,
source: config.map_error(req, JsonFieldError::ContentType),
});
}
}
let form_field_name = field.form_field_name.clone();
let bytes = Bytes::read_field(req, field, limits).await?;
Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err(
|err| MultipartError::Field {
field_name,
name: form_field_name,
source: config.map_error(req, JsonFieldError::Deserialize(err)),
},
)?))
@ -133,8 +134,7 @@ impl Default for JsonConfig {
mod tests {
use std::collections::HashMap;
use actix_web::{http::StatusCode, web, App, HttpResponse, Responder};
use bytes::Bytes;
use actix_web::{http::StatusCode, web, web::Bytes, App, HttpResponse, Responder};
use crate::form::{
json::{Json, JsonConfig},

View File

@ -1,4 +1,4 @@
//! Process and extract typed data from a multipart stream.
//! Extract and process typed data from fields of a `multipart/form-data` request.
use std::{
any::Any,
@ -33,6 +33,14 @@ pub trait FieldReader<'t>: Sized + Any {
type Future: Future<Output = Result<Self, MultipartError>>;
/// The form will call this function to handle the field.
///
/// # Panics
///
/// When reading the `field` payload using its `Stream` implementation, polling (manually or via
/// `next()`/`try_next()`) may panic after the payload is exhausted. If this is a problem for
/// your implementation of this method, you should [`fuse()`] the `Field` first.
///
/// [`fuse()`]: futures_util::stream::StreamExt::fuse()
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future;
}
@ -72,13 +80,13 @@ where
state: &'t mut State,
duplicate_field: DuplicateField,
) -> Self::Future {
if state.contains_key(field.name()) {
if state.contains_key(&field.form_field_name) {
match duplicate_field {
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
DuplicateField::Deny => {
return Box::pin(ready(Err(MultipartError::DuplicateField(
field.name().to_owned(),
field.form_field_name,
))))
}
@ -87,7 +95,7 @@ where
}
Box::pin(async move {
let field_name = field.name().to_owned();
let field_name = field.form_field_name.clone();
let t = T::read_field(req, field, limits).await?;
state.insert(field_name, Box::new(t));
Ok(())
@ -115,10 +123,8 @@ where
Box::pin(async move {
// Note: Vec GroupReader always allows duplicates
let field_name = field.name().to_owned();
let vec = state
.entry(field_name)
.entry(field.form_field_name.clone())
.or_insert_with(|| Box::<Vec<T>>::default())
.downcast_mut::<Vec<T>>()
.unwrap();
@ -151,13 +157,13 @@ where
state: &'t mut State,
duplicate_field: DuplicateField,
) -> Self::Future {
if state.contains_key(field.name()) {
if state.contains_key(&field.form_field_name) {
match duplicate_field {
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
DuplicateField::Deny => {
return Box::pin(ready(Err(MultipartError::DuplicateField(
field.name().to_owned(),
field.form_field_name,
))))
}
@ -166,7 +172,7 @@ where
}
Box::pin(async move {
let field_name = field.name().to_owned();
let field_name = field.form_field_name.clone();
let t = T::read_field(req, field, limits).await?;
state.insert(field_name, Box::new(t));
Ok(())
@ -273,6 +279,9 @@ impl Limits {
/// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this
/// for your struct.
///
/// Note that this extractor rejects requests with any other Content-Type such as `multipart/mixed`,
/// `multipart/related`, or non-multipart media types.
///
/// Add a [`MultipartFormConfig`] to your app data to configure extraction.
#[derive(Deref, DerefMut)]
pub struct MultipartForm<T: MultipartCollect>(pub T);
@ -286,14 +295,24 @@ impl<T: MultipartCollect> MultipartForm<T> {
impl<T> FromRequest for MultipartForm<T>
where
T: MultipartCollect,
T: MultipartCollect + 'static,
{
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
#[inline]
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
let mut payload = Multipart::new(req.headers(), payload.take());
let mut multipart = Multipart::from_req(req, payload);
let content_type = match multipart.content_type_or_bail() {
Ok(content_type) => content_type,
Err(err) => return Box::pin(ready(Err(err.into()))),
};
if content_type.subtype() != mime::FORM_DATA {
// this extractor only supports multipart/form-data
return Box::pin(ready(Err(MultipartError::ContentTypeIncompatible.into())));
};
let config = MultipartFormConfig::from_req(req);
let mut limits = Limits::new(config.total_limit, config.memory_limit);
@ -305,14 +324,20 @@ where
Box::pin(
async move {
let mut state = State::default();
// We need to ensure field limits are shared for all instances of this field name
// ensure limits are shared for all fields with this name
let mut field_limits = HashMap::<String, Option<usize>>::new();
while let Some(field) = payload.try_next().await? {
while let Some(field) = multipart.try_next().await? {
debug_assert!(
!field.form_field_name.is_empty(),
"multipart form fields should have names",
);
// Retrieve the limit for this field
let entry = field_limits
.entry(field.name().to_owned())
.or_insert_with(|| T::limit(field.name()));
.entry(field.form_field_name.clone())
.or_insert_with(|| T::limit(&field.form_field_name));
limits.field_limit_remaining.clone_from(entry);
@ -321,6 +346,7 @@ where
// Update the stored limit
*entry = limits.field_limit_remaining;
}
let inner = T::from_state(state)?;
Ok(MultipartForm(inner))
}
@ -396,11 +422,20 @@ mod tests {
use actix_http::encoding::Decoder;
use actix_multipart_rfc7578::client::multipart;
use actix_test::TestServer;
use actix_web::{dev::Payload, http::StatusCode, web, App, HttpResponse, Responder};
use actix_web::{
dev::Payload, http::StatusCode, web, App, HttpRequest, HttpResponse, Resource, Responder,
};
use awc::{Client, ClientResponse};
use futures_core::future::LocalBoxFuture;
use futures_util::TryStreamExt as _;
use super::MultipartForm;
use crate::form::{bytes::Bytes, tempfile::TempFile, text::Text, MultipartFormConfig};
use crate::{
form::{
bytes::Bytes, tempfile::TempFile, text::Text, FieldReader, Limits, MultipartFormConfig,
},
Field, MultipartError,
};
pub async fn send_form(
srv: &TestServer,
@ -734,4 +769,84 @@ mod tests {
let response = send_form(&srv, form, "/").await;
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
}
#[actix_rt::test]
async fn non_multipart_form_data() {
#[derive(MultipartForm)]
struct TestNonMultipartFormData {
#[allow(unused)]
#[multipart(limit = "30B")]
foo: Text<String>,
}
async fn non_multipart_form_data_route(
_form: MultipartForm<TestNonMultipartFormData>,
) -> String {
unreachable!("request is sent with multipart/mixed");
}
let srv = actix_test::start(|| {
App::new().route("/", web::post().to(non_multipart_form_data_route))
});
let mut form = multipart::Form::default();
form.add_text("foo", "foo");
// mangle content-type, keeping the boundary
let ct = form.content_type().replacen("/form-data", "/mixed", 1);
let res = Client::default()
.post(srv.url("/"))
.content_type(ct)
.send_body(multipart::Body::from(form))
.await
.unwrap();
assert_eq!(res.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
}
#[should_panic(expected = "called `Result::unwrap()` on an `Err` value: Connect(Disconnected)")]
#[actix_web::test]
async fn field_try_next_panic() {
#[derive(Debug)]
struct NullSink;
impl<'t> FieldReader<'t> for NullSink {
type Future = LocalBoxFuture<'t, Result<Self, MultipartError>>;
fn read_field(
_: &'t HttpRequest,
mut field: Field,
_limits: &'t mut Limits,
) -> Self::Future {
Box::pin(async move {
// exhaust field stream
while let Some(_chunk) = field.try_next().await? {}
// poll again, crash
let _post = field.try_next().await;
Ok(Self)
})
}
}
#[allow(dead_code)]
#[derive(MultipartForm)]
struct NullSinkForm {
foo: NullSink,
}
async fn null_sink(_form: MultipartForm<NullSinkForm>) -> impl Responder {
"unreachable"
}
let srv = actix_test::start(|| App::new().service(Resource::new("/").post(null_sink)));
let mut form = multipart::Form::default();
form.add_text("foo", "data is not important to this test");
// panics with Err(Connect(Disconnected)) due to form NullSink panic
let _res = send_form(&srv, form, "/").await;
}
}

View File

@ -42,38 +42,36 @@ impl<'t> FieldReader<'t> for TempFile {
fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move {
let config = TempFileConfig::from_req(req);
let field_name = field.name().to_owned();
let mut size = 0;
let file = config
.create_tempfile()
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
let file = config.create_tempfile().map_err(|err| {
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
})?;
let mut file_async =
tokio::fs::File::from_std(file.reopen().map_err(|err| {
config.map_error(req, &field_name, TempFileError::FileIo(err))
})?);
let mut file_async = tokio::fs::File::from_std(file.reopen().map_err(|err| {
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
})?);
while let Some(chunk) = field.try_next().await? {
limits.try_consume_limits(chunk.len(), false)?;
size += chunk.len();
file_async.write_all(chunk.as_ref()).await.map_err(|err| {
config.map_error(req, &field_name, TempFileError::FileIo(err))
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
})?;
}
file_async
.flush()
.await
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
file_async.flush().await.map_err(|err| {
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
})?;
Ok(TempFile {
file,
content_type: field.content_type().map(ToOwned::to_owned),
file_name: field
.content_disposition()
.expect("multipart form fields should have a content-disposition header")
.get_filename()
.map(str::to_owned),
.map(ToOwned::to_owned),
size,
})
})
@ -137,7 +135,7 @@ impl TempFileConfig {
};
MultipartError::Field {
field_name: field_name.to_owned(),
name: field_name.to_owned(),
source,
}
}

View File

@ -36,7 +36,6 @@ where
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move {
let config = TextConfig::from_req(req);
let field_name = field.name().to_owned();
if config.validate_content_type {
let valid = if let Some(mime) = field.content_type() {
@ -49,22 +48,24 @@ where
if !valid {
return Err(MultipartError::Field {
field_name,
name: field.form_field_name,
source: config.map_error(req, TextError::ContentType),
});
}
}
let form_field_name = field.form_field_name.clone();
let bytes = Bytes::read_field(req, field, limits).await?;
let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field {
field_name: field_name.clone(),
name: form_field_name.clone(),
source: config.map_error(req, TextError::Utf8Error(err)),
})?;
Ok(Text(serde_plain::from_str(text).map_err(|err| {
MultipartError::Field {
field_name,
name: form_field_name,
source: config.map_error(req, TextError::Deserialize(err)),
}
})?))

View File

@ -1,9 +1,19 @@
//! Multipart form support for Actix Web.
//! Multipart request & form support for Actix Web.
//!
//! The [`Multipart`] extractor aims to support all kinds of `multipart/*` requests, including
//! `multipart/form-data`, `multipart/related` and `multipart/mixed`. This is a lower-level
//! extractor which supports reading [multipart fields](Field), in the order they are sent by the
//! client.
//!
//! Due to additional requirements for `multipart/form-data` requests, the higher level
//! [`MultipartForm`] extractor and derive macro only supports this media type.
//!
//! # Examples
//!
//! ```no_run
//! use actix_web::{post, App, HttpServer, Responder};
//!
//! use actix_multipart::form::{json::Json as MPJson, tempfile::TempFile, MultipartForm};
//! use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
//! use serde::Deserialize;
//!
//! #[derive(Debug, Deserialize)]
@ -15,7 +25,7 @@
//! struct UploadForm {
//! #[multipart(limit = "100MB")]
//! file: TempFile,
//! json: MPJson<Metadata>,
//! json: MpJson<Metadata>,
//! }
//!
//! #[post("/videos")]
@ -34,10 +44,18 @@
//! .await
//! }
//! ```
//!
//! cURL request:
//!
//! ```sh
//! curl -v --request POST \
//! --url http://localhost:8080/videos \
//! -F 'json={"name": "Cargo.lock"};type=application/json' \
//! -F file=@./Cargo.lock
//! ```
//!
//! [`MultipartForm`]: struct@form::MultipartForm
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow(clippy::borrow_interior_mutable_const)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -48,14 +66,15 @@ extern crate self as actix_multipart;
mod error;
mod extractor;
pub(crate) mod field;
pub mod form;
mod server;
mod multipart;
pub(crate) mod payload;
pub(crate) mod safety;
pub mod test;
pub use self::{
error::MultipartError,
server::{Field, Multipart},
test::{
create_form_data_payload_and_headers, create_form_data_payload_and_headers_with_boundary,
},
error::Error as MultipartError,
field::{Field, LimitExceeded},
multipart::Multipart,
};

View File

@ -0,0 +1,883 @@
//! Multipart response payload support.
use std::{
cell::RefCell,
pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_web::{
dev,
error::{ParseError, PayloadError},
http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue},
web::Bytes,
HttpRequest,
};
use futures_core::stream::Stream;
use mime::Mime;
use crate::{
error::Error,
field::InnerField,
payload::{PayloadBuffer, PayloadRef},
safety::Safety,
Field,
};
const MAX_HEADERS: usize = 32;
/// The server-side implementation of `multipart/form-data` requests.
///
/// This will parse the incoming stream into `MultipartItem` instances via its `Stream`
/// implementation. `MultipartItem::Field` contains multipart field. `MultipartItem::Multipart` is
/// used for nested multipart streams.
pub struct Multipart {
flow: Flow,
safety: Safety,
}
enum Flow {
InFlight(Inner),
/// Error container is Some until an error is returned out of the flow.
Error(Option<Error>),
}
impl Multipart {
/// Creates multipart instance from parts.
pub fn new<S>(headers: &HeaderMap, stream: S) -> Self
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
match Self::find_ct_and_boundary(headers) {
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, stream),
Err(err) => Self::from_error(err),
}
}
/// Creates multipart instance from parts.
pub(crate) fn from_req(req: &HttpRequest, payload: &mut dev::Payload) -> Self {
match Self::find_ct_and_boundary(req.headers()) {
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, payload.take()),
Err(err) => Self::from_error(err),
}
}
/// Extract Content-Type and boundary info from headers.
pub(crate) fn find_ct_and_boundary(headers: &HeaderMap) -> Result<(Mime, String), Error> {
let content_type = headers
.get(&header::CONTENT_TYPE)
.ok_or(Error::ContentTypeMissing)?
.to_str()
.ok()
.and_then(|content_type| content_type.parse::<Mime>().ok())
.ok_or(Error::ContentTypeParse)?;
if content_type.type_() != mime::MULTIPART {
return Err(Error::ContentTypeIncompatible);
}
let boundary = content_type
.get_param(mime::BOUNDARY)
.ok_or(Error::BoundaryMissing)?
.as_str()
.to_owned();
Ok((content_type, boundary))
}
/// Constructs a new multipart reader from given Content-Type, boundary, and stream.
pub(crate) fn from_ct_and_boundary<S>(ct: Mime, boundary: String, stream: S) -> Multipart
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
Multipart {
safety: Safety::new(),
flow: Flow::InFlight(Inner {
payload: PayloadRef::new(PayloadBuffer::new(stream)),
content_type: ct,
boundary,
state: State::FirstBoundary,
item: Item::None,
}),
}
}
/// Constructs a new multipart reader from given `MultipartError`.
pub(crate) fn from_error(err: Error) -> Multipart {
Multipart {
flow: Flow::Error(Some(err)),
safety: Safety::new(),
}
}
/// Return requests parsed Content-Type or raise the stored error.
pub(crate) fn content_type_or_bail(&mut self) -> Result<mime::Mime, Error> {
match self.flow {
Flow::InFlight(ref inner) => Ok(inner.content_type.clone()),
Flow::Error(ref mut err) => Err(err
.take()
.expect("error should not be taken after it was returned")),
}
}
}
impl Stream for Multipart {
type Item = Result<Field, Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
match this.flow {
Flow::InFlight(ref mut inner) => {
if let Some(mut buffer) = inner.payload.get_mut(&this.safety) {
// check safety and poll read payload to buffer.
buffer.poll_stream(cx)?;
} else if !this.safety.is_clean() {
// safety violation
return Poll::Ready(Some(Err(Error::NotConsumed)));
} else {
return Poll::Pending;
}
inner.poll(&this.safety, cx)
}
Flow::Error(ref mut err) => Poll::Ready(Some(Err(err
.take()
.expect("Multipart polled after finish")))),
}
}
}
#[derive(PartialEq, Debug)]
enum State {
/// Skip data until first boundary.
FirstBoundary,
/// Reading boundary.
Boundary,
/// Reading Headers.
Headers,
/// Stream EOF.
Eof,
}
enum Item {
None,
Field(Rc<RefCell<InnerField>>),
}
struct Inner {
/// Request's payload stream & buffer.
payload: PayloadRef,
/// Request's Content-Type.
///
/// Guaranteed to have "multipart" top-level media type, i.e., `multipart/*`.
content_type: Mime,
/// Field boundary.
boundary: String,
state: State,
item: Item,
}
impl Inner {
fn read_field_headers(payload: &mut PayloadBuffer) -> Result<Option<HeaderMap>, Error> {
match payload.read_until(b"\r\n\r\n")? {
None => {
if payload.eof {
Err(Error::Incomplete)
} else {
Ok(None)
}
}
Some(bytes) => {
let mut hdrs = [httparse::EMPTY_HEADER; MAX_HEADERS];
match httparse::parse_headers(&bytes, &mut hdrs).map_err(ParseError::from)? {
httparse::Status::Complete((_, hdrs)) => {
// convert headers
let mut headers = HeaderMap::with_capacity(hdrs.len());
for h in hdrs {
let name =
HeaderName::try_from(h.name).map_err(|_| ParseError::Header)?;
let value =
HeaderValue::try_from(h.value).map_err(|_| ParseError::Header)?;
headers.append(name, value);
}
Ok(Some(headers))
}
httparse::Status::Partial => Err(ParseError::Header.into()),
}
}
}
}
/// Reads a field boundary from the payload buffer (and discards it).
///
/// Reads "in-between" and "final" boundaries. E.g. for boundary = "foo":
///
/// ```plain
/// --foo <-- in-between fields
/// --foo-- <-- end of request body, should be followed by EOF
/// ```
///
/// Returns:
///
/// - `Ok(Some(true))` - final field boundary read (EOF)
/// - `Ok(Some(false))` - field boundary read
/// - `Ok(None)` - boundary not found, more data needs reading
/// - `Err(BoundaryMissing)` - multipart boundary is missing
fn read_boundary(payload: &mut PayloadBuffer, boundary: &str) -> Result<Option<bool>, Error> {
// TODO: need to read epilogue
let chunk = match payload.readline_or_eof()? {
// TODO: this might be okay as a let Some() else return Ok(None)
None => return Ok(payload.eof.then_some(true)),
Some(chunk) => chunk,
};
const BOUNDARY_MARKER: &[u8] = b"--";
const LINE_BREAK: &[u8] = b"\r\n";
let boundary_len = boundary.len();
if chunk.len() < boundary_len + 2 + 2
|| !chunk.starts_with(BOUNDARY_MARKER)
|| &chunk[2..boundary_len + 2] != boundary.as_bytes()
{
return Err(Error::BoundaryMissing);
}
// chunk facts:
// - long enough to contain boundary + 2 markers or 1 marker and line-break
// - starts with boundary marker
// - chunk contains correct boundary
if &chunk[boundary_len + 2..] == LINE_BREAK {
// boundary is followed by line-break, indicating more fields to come
return Ok(Some(false));
}
// boundary is followed by marker
if &chunk[boundary_len + 2..boundary_len + 4] == BOUNDARY_MARKER
&& (
// chunk is exactly boundary len + 2 markers
chunk.len() == boundary_len + 2 + 2
// final boundary is allowed to end with a line-break
|| &chunk[boundary_len + 4..] == LINE_BREAK
)
{
return Ok(Some(true));
}
Err(Error::BoundaryMissing)
}
fn skip_until_boundary(
payload: &mut PayloadBuffer,
boundary: &str,
) -> Result<Option<bool>, Error> {
let mut eof = false;
loop {
match payload.readline()? {
Some(chunk) => {
if chunk.is_empty() {
return Err(Error::BoundaryMissing);
}
if chunk.len() < boundary.len() {
continue;
}
if &chunk[..2] == b"--" && &chunk[2..chunk.len() - 2] == boundary.as_bytes() {
break;
} else {
if chunk.len() < boundary.len() + 2 {
continue;
}
let b: &[u8] = boundary.as_ref();
if &chunk[..boundary.len()] == b
&& &chunk[boundary.len()..boundary.len() + 2] == b"--"
{
eof = true;
break;
}
}
}
None => {
return if payload.eof {
Err(Error::Incomplete)
} else {
Ok(None)
};
}
}
}
Ok(Some(eof))
}
fn poll(&mut self, safety: &Safety, cx: &Context<'_>) -> Poll<Option<Result<Field, Error>>> {
if self.state == State::Eof {
Poll::Ready(None)
} else {
// release field
loop {
// Nested multipart streams of fields has to be consumed
// before switching to next
if safety.current() {
let stop = match self.item {
Item::Field(ref mut field) => match field.borrow_mut().poll(safety) {
Poll::Pending => return Poll::Pending,
Poll::Ready(Some(Ok(_))) => continue,
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
Poll::Ready(None) => true,
},
Item::None => false,
};
if stop {
self.item = Item::None;
}
if let Item::None = self.item {
break;
}
}
}
let field_headers = if let Some(mut payload) = self.payload.get_mut(safety) {
match self.state {
// read until first boundary
State::FirstBoundary => {
match Inner::skip_until_boundary(&mut payload, &self.boundary)? {
None => return Poll::Pending,
Some(eof) => {
if eof {
self.state = State::Eof;
return Poll::Ready(None);
} else {
self.state = State::Headers;
}
}
}
}
// read boundary
State::Boundary => match Inner::read_boundary(&mut payload, &self.boundary)? {
None => return Poll::Pending,
Some(eof) => {
if eof {
self.state = State::Eof;
return Poll::Ready(None);
} else {
self.state = State::Headers;
}
}
},
_ => {}
}
// read field headers for next field
if self.state == State::Headers {
if let Some(headers) = Inner::read_field_headers(&mut payload)? {
self.state = State::Boundary;
headers
} else {
return Poll::Pending;
}
} else {
unreachable!()
}
} else {
log::debug!("NotReady: field is in flight");
return Poll::Pending;
};
let field_content_disposition = field_headers
.get(&header::CONTENT_DISPOSITION)
.and_then(|cd| ContentDisposition::from_raw(cd).ok())
.filter(|content_disposition| {
matches!(
content_disposition.disposition,
header::DispositionType::FormData,
)
});
let form_field_name = if self.content_type.subtype() == mime::FORM_DATA {
// According to RFC 7578 §4.2, which relates to "multipart/form-data" requests
// specifically, fields must have a Content-Disposition header, its disposition
// type must be set as "form-data", and it must have a name parameter.
let Some(cd) = &field_content_disposition else {
return Poll::Ready(Some(Err(Error::ContentDispositionMissing)));
};
let Some(field_name) = cd.get_name() else {
return Poll::Ready(Some(Err(Error::ContentDispositionNameMissing)));
};
Some(field_name.to_owned())
} else {
None
};
// TODO: check out other multipart/* RFCs for specific requirements
let field_content_type: Option<Mime> = field_headers
.get(&header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
.and_then(|ct| ct.parse().ok());
self.state = State::Boundary;
// nested multipart stream is not supported
if let Some(mime) = &field_content_type {
if mime.type_() == mime::MULTIPART {
return Poll::Ready(Some(Err(Error::Nested)));
}
}
let field_inner =
InnerField::new_in_rc(self.payload.clone(), self.boundary.clone(), &field_headers)?;
self.item = Item::Field(Rc::clone(&field_inner));
Poll::Ready(Some(Ok(Field::new(
field_content_type,
field_content_disposition,
form_field_name,
field_headers,
safety.clone(cx),
field_inner,
))))
}
}
}
impl Drop for Inner {
fn drop(&mut self) {
// InnerMultipartItem::Field has to be dropped first because of Safety.
self.item = Item::None;
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use actix_http::h1;
use actix_web::{
http::header::{DispositionParam, DispositionType},
rt,
test::TestRequest,
web::{BufMut as _, BytesMut},
FromRequest,
};
use assert_matches::assert_matches;
use futures_test::stream::StreamTestExt as _;
use futures_util::{stream, StreamExt as _};
use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream;
use super::*;
const BOUNDARY: &str = "abbc761f78ff4d7cb7573b5a23f96ef0";
#[actix_rt::test]
async fn test_boundary() {
let headers = HeaderMap::new();
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::ContentTypeMissing) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("test"),
);
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::ContentTypeParse) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("multipart/mixed"),
);
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::BoundaryMissing) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"5c02368e880e436dab70ed54e1c58209\"",
),
);
assert_eq!(
Multipart::find_ct_and_boundary(&headers).unwrap().1,
"5c02368e880e436dab70ed54e1c58209",
);
}
fn create_stream() -> (
mpsc::UnboundedSender<Result<Bytes, PayloadError>>,
impl Stream<Item = Result<Bytes, PayloadError>>,
) {
let (tx, rx) = mpsc::unbounded_channel();
(
tx,
UnboundedReceiverStream::new(rx).map(|res| res.map_err(|_| panic!())),
)
}
fn create_simple_request_with_header() -> (Bytes, HeaderMap) {
let (body, headers) = crate::test::create_form_data_payload_and_headers_with_boundary(
BOUNDARY,
"file",
Some("fn.txt".to_owned()),
Some(mime::TEXT_PLAIN_UTF_8),
Bytes::from_static(b"data"),
);
let mut buf = BytesMut::with_capacity(body.len() + 14);
// add junk before form to test pre-boundary data rejection
buf.put("testasdadsad\r\n".as_bytes());
buf.put(body);
(buf.freeze(), headers)
}
// TODO: use test utility when multi-file support is introduced
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
data\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
(bytes, headers)
}
#[actix_rt::test]
async fn test_multipart_no_end_crlf() {
let (sender, payload) = create_stream();
let (mut bytes, headers) = create_double_request_with_header();
let bytes_stripped = bytes.split_to(bytes.len()); // strip crlf
sender.send(Ok(bytes_stripped)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await.unwrap() {
Ok(_) => {}
_ => unreachable!(),
}
match multipart.next().await.unwrap() {
Ok(_) => {}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn test_multipart() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_double_request_with_header();
sender.send(Ok(bytes)).unwrap();
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await {
Some(Ok(mut field)) => {
let cd = field.content_disposition().unwrap();
assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
match field.next().await.unwrap() {
Ok(chunk) => assert_eq!(chunk, "test"),
_ => unreachable!(),
}
match field.next().await {
None => {}
_ => unreachable!(),
}
}
_ => unreachable!(),
}
match multipart.next().await.unwrap() {
Ok(mut field) => {
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
match field.next().await {
Some(Ok(chunk)) => assert_eq!(chunk, "data"),
_ => unreachable!(),
}
match field.next().await {
None => {}
_ => unreachable!(),
}
}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
// Loops, collecting all bytes until end-of-field
async fn get_whole_field(field: &mut Field) -> BytesMut {
let mut b = BytesMut::new();
loop {
match field.next().await {
Some(Ok(chunk)) => b.extend_from_slice(&chunk),
None => return b,
_ => unreachable!(),
}
}
}
#[actix_rt::test]
async fn test_stream() {
let (bytes, headers) = create_double_request_with_header();
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await.unwrap() {
Ok(mut field) => {
let cd = field.content_disposition().unwrap();
assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
assert_eq!(get_whole_field(&mut field).await, "test");
}
_ => unreachable!(),
}
match multipart.next().await {
Some(Ok(mut field)) => {
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
assert_eq!(get_whole_field(&mut field).await, "data");
}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn test_multipart_from_error() {
let err = Error::ContentTypeMissing;
let mut multipart = Multipart::from_error(err);
assert!(multipart.next().await.unwrap().is_err())
}
#[actix_rt::test]
async fn test_multipart_from_boundary() {
let (_, payload) = create_stream();
let (_, headers) = create_simple_request_with_header();
let (ct, boundary) = Multipart::find_ct_and_boundary(&headers).unwrap();
let _ = Multipart::from_ct_and_boundary(ct, boundary, payload);
}
#[actix_rt::test]
async fn test_multipart_payload_consumption() {
// with sample payload and HttpRequest with no headers
let (_, inner_payload) = h1::Payload::create(false);
let mut payload = actix_web::dev::Payload::from(inner_payload);
let req = TestRequest::default().to_http_request();
// multipart should generate an error
let mut mp = Multipart::from_request(&req, &mut payload).await.unwrap();
assert!(mp.next().await.unwrap().is_err());
// and should not consume the payload
match payload {
actix_web::dev::Payload::H1 { .. } => {} //expected
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn no_content_disposition_form_data() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert_matches!(
res.expect_err(
"according to RFC 7578, form-data fields require a content-disposition header"
),
Error::ContentDispositionMissing
);
}
#[actix_rt::test]
async fn no_content_disposition_non_form_data() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
res.unwrap();
}
#[actix_rt::test]
async fn no_name_in_form_data_content_disposition() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert_matches!(
res.expect_err("according to RFC 7578, form-data fields require a name attribute"),
Error::ContentDispositionNameMissing
);
}
#[actix_rt::test]
async fn test_drop_multipart_dont_hang() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_simple_request_with_header();
sender.send(Ok(bytes)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
let mut field = multipart.next().await.unwrap().unwrap();
drop(multipart);
// should fail immediately
match field.next().await {
Some(Err(Error::NotConsumed)) => {}
_ => panic!(),
};
}
#[actix_rt::test]
async fn test_drop_field_awaken_multipart() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_double_request_with_header();
sender.send(Ok(bytes)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
let mut field = multipart.next().await.unwrap().unwrap();
let task = rt::spawn(async move {
rt::time::sleep(Duration::from_millis(500)).await;
assert_eq!(field.next().await.unwrap().unwrap(), "test");
drop(field);
});
// dropping field should awaken current task
let _ = multipart.next().await.unwrap().unwrap();
task.await.unwrap();
}
}

View File

@ -0,0 +1,255 @@
use std::{
cell::{RefCell, RefMut},
cmp, mem,
pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_web::{
error::PayloadError,
web::{Bytes, BytesMut},
};
use futures_core::stream::{LocalBoxStream, Stream};
use crate::{error::Error, safety::Safety};
pub(crate) struct PayloadRef {
payload: Rc<RefCell<PayloadBuffer>>,
}
impl PayloadRef {
pub(crate) fn new(payload: PayloadBuffer) -> PayloadRef {
PayloadRef {
payload: Rc::new(RefCell::new(payload)),
}
}
pub(crate) fn get_mut(&self, safety: &Safety) -> Option<RefMut<'_, PayloadBuffer>> {
if safety.current() {
Some(self.payload.borrow_mut())
} else {
None
}
}
}
impl Clone for PayloadRef {
fn clone(&self) -> PayloadRef {
PayloadRef {
payload: Rc::clone(&self.payload),
}
}
}
/// Payload buffer.
pub(crate) struct PayloadBuffer {
pub(crate) stream: LocalBoxStream<'static, Result<Bytes, PayloadError>>,
pub(crate) buf: BytesMut,
/// EOF flag. If true, no more payload reads will be attempted.
pub(crate) eof: bool,
}
impl PayloadBuffer {
/// Constructs new payload buffer.
pub(crate) fn new<S>(stream: S) -> Self
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
PayloadBuffer {
stream: Box::pin(stream),
buf: BytesMut::with_capacity(1_024), // pre-allocate 1KiB
eof: false,
}
}
pub(crate) fn poll_stream(&mut self, cx: &mut Context<'_>) -> Result<(), PayloadError> {
loop {
match Pin::new(&mut self.stream).poll_next(cx) {
Poll::Ready(Some(Ok(data))) => {
self.buf.extend_from_slice(&data);
// try to read more data
continue;
}
Poll::Ready(Some(Err(err))) => return Err(err),
Poll::Ready(None) => {
self.eof = true;
return Ok(());
}
Poll::Pending => return Ok(()),
}
}
}
/// Reads exact number of bytes.
#[cfg(test)]
pub(crate) fn read_exact(&mut self, size: usize) -> Option<Bytes> {
if size <= self.buf.len() {
Some(self.buf.split_to(size).freeze())
} else {
None
}
}
pub(crate) fn read_max(&mut self, size: u64) -> Result<Option<Bytes>, Error> {
if !self.buf.is_empty() {
let size = cmp::min(self.buf.len() as u64, size) as usize;
Ok(Some(self.buf.split_to(size).freeze()))
} else if self.eof {
Err(Error::Incomplete)
} else {
Ok(None)
}
}
/// Reads until specified ending.
///
/// Returns:
///
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
/// - `Ok(None)` - `needle` is not found otherwise
pub(crate) fn read_until(&mut self, needle: &[u8]) -> Result<Option<Bytes>, Error> {
match memchr::memmem::find(&self.buf, needle) {
// buffer exhausted and EOF without finding needle
None if self.eof => Err(Error::Incomplete),
// needle not yet found
None => Ok(None),
// needle found, split chunk out of buf
Some(idx) => Ok(Some(self.buf.split_to(idx + needle.len()).freeze())),
}
}
/// Reads bytes until new line delimiter (`\n`, `0x0A`).
///
/// Returns:
///
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
/// - `Ok(None)` - `needle` is not found otherwise
#[inline]
pub(crate) fn readline(&mut self) -> Result<Option<Bytes>, Error> {
self.read_until(b"\n")
}
/// Reads bytes until new line delimiter or until EOF.
#[inline]
pub(crate) fn readline_or_eof(&mut self) -> Result<Option<Bytes>, Error> {
match self.readline() {
Err(Error::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())),
line => line,
}
}
/// Puts unprocessed data back to the buffer.
pub(crate) fn unprocessed(&mut self, data: Bytes) {
// TODO: use BytesMut::from when it's released, see https://github.com/tokio-rs/bytes/pull/710
let buf = BytesMut::from(&data[..]);
let buf = mem::replace(&mut self.buf, buf);
self.buf.extend_from_slice(&buf);
}
}
#[cfg(test)]
mod tests {
use actix_http::h1;
use futures_util::future::lazy;
use super::*;
#[actix_rt::test]
async fn basic() {
let (_, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(payload.buf.len(), 0);
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(None, payload.read_max(1).unwrap());
}
#[actix_rt::test]
async fn eof() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_max(4).unwrap());
sender.feed_data(Bytes::from("data"));
sender.feed_eof();
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(Some(Bytes::from("data")), payload.read_max(4).unwrap());
assert_eq!(payload.buf.len(), 0);
assert!(payload.read_max(1).is_err());
assert!(payload.eof);
}
#[actix_rt::test]
async fn err() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_max(1).unwrap());
sender.set_error(PayloadError::Incomplete(None));
lazy(|cx| payload.poll_stream(cx)).await.err().unwrap();
}
#[actix_rt::test]
async fn read_max() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(payload.buf.len(), 10);
assert_eq!(Some(Bytes::from("line1")), payload.read_max(5).unwrap());
assert_eq!(payload.buf.len(), 5);
assert_eq!(Some(Bytes::from("line2")), payload.read_max(5).unwrap());
assert_eq!(payload.buf.len(), 0);
}
#[actix_rt::test]
async fn read_exactly() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_exact(2));
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(Some(Bytes::from_static(b"li")), payload.read_exact(2));
assert_eq!(payload.buf.len(), 8);
assert_eq!(Some(Bytes::from_static(b"ne1l")), payload.read_exact(4));
assert_eq!(payload.buf.len(), 4);
}
#[actix_rt::test]
async fn read_until() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_until(b"ne").unwrap());
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(
Some(Bytes::from("line")),
payload.read_until(b"ne").unwrap()
);
assert_eq!(payload.buf.len(), 6);
assert_eq!(
Some(Bytes::from("1line2")),
payload.read_until(b"2").unwrap()
);
assert_eq!(payload.buf.len(), 0);
}
}

View File

@ -0,0 +1,60 @@
use std::{cell::Cell, marker::PhantomData, rc::Rc, task};
use local_waker::LocalWaker;
/// Counter. It tracks of number of clones of payloads and give access to payload only to top most.
///
/// - When dropped, parent task is awakened. This is to support the case where `Field` is dropped in
/// a separate task than `Multipart`.
/// - Assumes that parent owners don't move to different tasks; only the top-most is allowed to.
/// - If dropped and is not top most owner, is_clean flag is set to false.
#[derive(Debug)]
pub(crate) struct Safety {
task: LocalWaker,
level: usize,
payload: Rc<PhantomData<bool>>,
clean: Rc<Cell<bool>>,
}
impl Safety {
pub(crate) fn new() -> Safety {
let payload = Rc::new(PhantomData);
Safety {
task: LocalWaker::new(),
level: Rc::strong_count(&payload),
clean: Rc::new(Cell::new(true)),
payload,
}
}
pub(crate) fn current(&self) -> bool {
Rc::strong_count(&self.payload) == self.level && self.clean.get()
}
pub(crate) fn is_clean(&self) -> bool {
self.clean.get()
}
pub(crate) fn clone(&self, cx: &task::Context<'_>) -> Safety {
let payload = Rc::clone(&self.payload);
let s = Safety {
task: LocalWaker::new(),
level: Rc::strong_count(&payload),
clean: self.clean.clone(),
payload,
};
s.task.register(cx.waker());
s
}
}
impl Drop for Safety {
fn drop(&mut self) {
if Rc::strong_count(&self.payload) != self.level {
// Multipart dropped leaving a Field
self.clean.set(false);
}
self.task.wake();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,9 @@
use actix_web::http::header::{self, HeaderMap};
use bytes::{BufMut as _, Bytes, BytesMut};
//! Multipart testing utilities.
use actix_web::{
http::header::{self, HeaderMap},
web::{BufMut as _, Bytes, BytesMut},
};
use mime::Mime;
use rand::{
distributions::{Alphanumeric, DistString as _},
@ -21,8 +25,7 @@ const BOUNDARY_PREFIX: &str = "------------------------";
///
/// ```
/// use actix_multipart::test::create_form_data_payload_and_headers;
/// use actix_web::test::TestRequest;
/// use bytes::Bytes;
/// use actix_web::{test::TestRequest, web::Bytes};
/// use memchr::memmem::find;
///
/// let (body, headers) = create_form_data_payload_and_headers(

View File

@ -12,9 +12,11 @@ repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0"
edition = "2021"
[lib]
name = "actix_router"
path = "src/lib.rs"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"http::*",
"serde::*",
]
[features]
default = ["http", "unicode"]
@ -36,6 +38,9 @@ http = "0.2.7"
serde = { version = "1", features = ["derive"] }
percent-encoding = "2.1"
[lints]
workspace = true
[[bench]]
name = "router"
harness = false

View File

@ -511,11 +511,6 @@ mod tests {
value: String,
}
#[derive(Deserialize)]
struct Id {
_id: String,
}
#[derive(Debug, Deserialize)]
struct Test1(String, u32);

View File

@ -1,7 +1,5 @@
//! Resource path matching and router.
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]

View File

@ -18,6 +18,22 @@ categories = [
license = "MIT OR Apache-2.0"
edition = "2021"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_http_test::*",
"actix_http::*",
"actix_service::*",
"actix_web::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"openssl::*",
"rustls::*",
"tokio::*",
]
[features]
default = []
@ -57,3 +73,6 @@ tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true }
tls-rustls-0_22 = { package = "rustls", version = "0.22", optional = true }
tls-rustls-0_23 = { package = "rustls", version = "0.23", default-features = false, optional = true }
tokio = { version = "1.24.2", features = ["sync"] }
[lints]
workspace = true

45
actix-test/README.md Normal file
View File

@ -0,0 +1,45 @@
# `actix-test`
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-test?label=latest)](https://crates.io/crates/actix-test)
[![Documentation](https://docs.rs/actix-test/badge.svg?version=0.1.5)](https://docs.rs/actix-test/0.1.5)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-test.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-test/0.1.5/status.svg)](https://deps.rs/crate/actix-test/0.1.5)
[![Download](https://img.shields.io/crates/d/actix-test.svg)](https://crates.io/crates/actix-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->
<!-- cargo-rdme start -->
Integration testing tools for Actix Web applications.
The main integration testing tool is [`TestServer`]. It spawns a real HTTP server on an unused port and provides methods that use a real HTTP client. Therefore, it is much closer to real-world cases than using `init_service`, which skips HTTP encoding and decoding.
## Examples
```rust
use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
#[get("/")]
async fn my_handler() -> Result<impl Responder, Error> {
Ok(HttpResponse::Ok())
}
#[actix_rt::test]
async fn test_example() {
let srv = actix_test::start(||
App::new().service(my_handler)
);
let req = srv.get("/");
let res = req.send().await.unwrap();
assert!(res.status().is_success());
}
```
<!-- cargo-rdme end -->

View File

@ -5,6 +5,7 @@
//! real-world cases than using `init_service`, which skips HTTP encoding and decoding.
//!
//! # Examples
//!
//! ```
//! use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
//!
@ -26,8 +27,6 @@
//! }
//! ```
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]

View File

@ -2,7 +2,10 @@
## Unreleased
- Take the encoded buffer when yielding bytes in the response stream rather than splitting the buffer, reducing memory use
## 4.3.1 <!-- v4.3.1+deprecated -->
- Reduce memory usage by `take`-ing (rather than `split`-ing) the encoded buffer when yielding bytes in the response stream.
- Mark crate as deprecated.
- Minimum supported Rust version (MSRV) is now 1.72.
## 4.3.0

View File

@ -1,17 +1,24 @@
[package]
name = "actix-web-actors"
version = "4.3.0"
version = "4.3.1+deprecated"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix actors support for Actix Web"
keywords = ["actix", "http", "web", "framework", "async"]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0"
edition = "2021"
homepage.workspace = true
repository.workspace = true
license.workspace = true
edition.workspace = true
rust-version.workspace = true
[lib]
name = "actix_web_actors"
path = "src/lib.rs"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix::*",
"actix_http::*",
"actix_web::*",
"bytes::*",
"bytestring::*",
"futures_core::*",
]
[dependencies]
actix = { version = ">=0.12, <0.14", default-features = false }
@ -35,3 +42,6 @@ actix-web = { version = "4", features = ["macros"] }
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
mime = "0.3"
[lints]
workspace = true

View File

@ -1,15 +1,17 @@
# `actix-web-actors`
> Actix actors support for Actix Web.
>
> This crate is deprecated. Migrate to [`actix-ws`](https://crates.io/crates/actix-ws).
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.3.0)](https://docs.rs/actix-web-actors/4.3.0)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.3.1)](https://docs.rs/actix-web-actors/4.3.1)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-web-actors.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-web-actors/4.3.0/status.svg)](https://deps.rs/crate/actix-web-actors/4.3.0)
![maintenance-status](https://img.shields.io/badge/maintenance-deprecated-red.svg)
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@ -1,5 +1,7 @@
//! Actix actors support for Actix Web.
//!
//! This crate is deprecated. Migrate to [`actix-ws`](https://crates.io/crates/actix-ws).
//!
//! # Examples
//!
//! ```no_run
@ -55,8 +57,6 @@
//! * [`HttpContext`]: This struct provides actor support for streaming HTTP responses.
//!
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]

View File

@ -35,3 +35,6 @@ actix-web = "4"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
trybuild = "1"
rustversion = "1"
[lints]
workspace = true

View File

@ -73,8 +73,6 @@
//! [DELETE]: macro@delete
#![recursion_limit = "512"]
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]

View File

@ -145,7 +145,7 @@ async fn custom_resource_name_test<'a>(req: HttpRequest) -> impl Responder {
mod guard_module {
use actix_web::{guard::GuardContext, http::header};
pub fn guard(ctx: &GuardContext) -> bool {
pub fn guard(ctx: &GuardContext<'_>) -> bool {
ctx.header::<header::Accept>()
.map(|h| h.preference() == "image/*")
.unwrap_or(false)

View File

@ -1,7 +1,7 @@
use actix_web::{guard::GuardContext, http, http::header, web, App, HttpResponse, Responder};
use actix_web_codegen::{delete, get, post, route, routes, scope};
pub fn image_guard(ctx: &GuardContext) -> bool {
pub fn image_guard(ctx: &GuardContext<'_>) -> bool {
ctx.header::<header::Accept>()
.map(|h| h.preference() == "image/*")
.unwrap_or(false)

View File

@ -2,9 +2,25 @@
## Unreleased
## 4.9.0
### Added
- Add `middleware::from_fn()` helper.
- Add `web::ThinData` extractor.
## 4.8.0
### Added
- Add `web::Html` responder.
- Add `HttpRequest::full_url()` method to get the complete URL of the request.
### Fixed
- `ConnectionInfo::realip_remote_addr()` now handles IPv6 addresses from `Forwarded` header correctly. Previously, it sometimes returned the forwarded port as well.
- Always remove port from return value of `ConnectionInfo::realip_remote_addr()` when handling IPv6 addresses. from the `Forwarded` header.
- The `UrlencodedError::ContentType` variant (relevant to the `Form` extractor) now uses the 415 (Media Type Unsupported) status code in it's `ResponseError` implementation.
- Apply `HttpServer::max_connection_rate()` setting when using rustls v0.22 or v0.23.
## 4.7.0

View File

@ -1,6 +1,6 @@
[package]
name = "actix-web"
version = "4.7.0"
version = "4.9.0"
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
@ -35,9 +35,31 @@ features = [
"secure-cookies",
]
[lib]
name = "actix_web"
path = "src/lib.rs"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_http::*",
"actix_router::*",
"actix_rt::*",
"actix_server::*",
"actix_service::*",
"actix_utils::*",
"actix_web_codegen::*",
"bytes::*",
"cookie::*",
"cookie",
"futures_core::*",
"http::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"serde_json::*",
"serde_urlencoded::*",
"serde::*",
"serde::*",
"tokio::*",
"url::*",
]
[features]
default = [
@ -71,18 +93,18 @@ secure-cookies = ["cookies", "cookie/secure"]
http2 = ["actix-http/http2"]
# TLS via OpenSSL
openssl = ["http2", "actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
openssl = ["__tls", "http2", "actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
# TLS via Rustls v0.20
rustls = ["rustls-0_20"]
# TLS via Rustls v0.20
rustls-0_20 = ["http2", "actix-http/rustls-0_20", "actix-tls/accept", "actix-tls/rustls-0_20"]
rustls-0_20 = ["__tls", "http2", "actix-http/rustls-0_20", "actix-tls/accept", "actix-tls/rustls-0_20"]
# TLS via Rustls v0.21
rustls-0_21 = ["http2", "actix-http/rustls-0_21", "actix-tls/accept", "actix-tls/rustls-0_21"]
rustls-0_21 = ["__tls", "http2", "actix-http/rustls-0_21", "actix-tls/accept", "actix-tls/rustls-0_21"]
# TLS via Rustls v0.22
rustls-0_22 = ["http2", "actix-http/rustls-0_22", "actix-tls/accept", "actix-tls/rustls-0_22"]
rustls-0_22 = ["__tls", "http2", "actix-http/rustls-0_22", "actix-tls/accept", "actix-tls/rustls-0_22"]
# TLS via Rustls v0.23
rustls-0_23 = ["http2", "actix-http/rustls-0_23", "actix-tls/accept", "actix-tls/rustls-0_23"]
rustls-0_23 = ["__tls", "http2", "actix-http/rustls-0_23", "actix-tls/accept", "actix-tls/rustls-0_23"]
# Full unicode support
unicode = ["dep:regex", "actix-router/unicode"]
@ -91,6 +113,10 @@ unicode = ["dep:regex", "actix-router/unicode"]
# Don't rely on these whatsoever. They may disappear at anytime.
__compress = []
# Internal (PRIVATE!) features used to aid checking feature status.
# Don't rely on these whatsoever. They may disappear at anytime.
__tls = []
# io-uring feature only available for Linux OSes.
experimental-io-uring = ["actix-server/io-uring"]
@ -125,6 +151,7 @@ encoding_rs = "0.8"
futures-core = { version = "0.3.17", default-features = false }
futures-util = { version = "0.3.17", default-features = false }
itoa = "1"
impl-more = "0.1.4"
language-tags = "0.3"
log = "0.4"
mime = "0.3"
@ -162,6 +189,9 @@ tls-rustls = { package = "rustls", version = "0.23" }
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
zstd = "0.13"
[lints]
workspace = true
[[test]]
name = "test_server"
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]

View File

@ -8,10 +8,10 @@
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web)
[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.7.0)](https://docs.rs/actix-web/4.7.0)
[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.9.0)](https://docs.rs/actix-web/4.9.0)
![MSRV](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg)
[![Dependency Status](https://deps.rs/crate/actix-web/4.7.0/status.svg)](https://deps.rs/crate/actix-web/4.7.0)
[![Dependency Status](https://deps.rs/crate/actix-web/4.9.0/status.svg)](https://deps.rs/crate/actix-web/4.9.0)
<br />
[![CI](https://github.com/actix/actix-web/actions/workflows/ci.yml/badge.svg)](https://github.com/actix/actix-web/actions/workflows/ci.yml)
[![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web)
@ -109,4 +109,4 @@ This project is licensed under either of the following licenses, at your option:
## Code of Conduct
Contribution to the actix-web repo is organized under the terms of the Contributor Covenant. The Actix team promises to intervene to uphold that code of conduct.
Contribution to the `actix/actix-web` repo is organized under the terms of the Contributor Covenant. The Actix team promises to intervene to uphold that code of conduct.

View File

@ -2,11 +2,9 @@ use std::{future::Future, time::Instant};
use actix_http::body::BoxBody;
use actix_utils::future::{ready, Ready};
use actix_web::{
error, http::StatusCode, test::TestRequest, Error, HttpRequest, HttpResponse, Responder,
};
use actix_web::{http::StatusCode, test::TestRequest, Error, HttpRequest, HttpResponse, Responder};
use criterion::{criterion_group, criterion_main, Criterion};
use futures_util::future::{join_all, Either};
use futures_util::future::join_all;
// responder simulate the old responder trait.
trait FutureResponder {
@ -16,9 +14,6 @@ trait FutureResponder {
fn future_respond_to(self, req: &HttpRequest) -> Self::Future;
}
// a simple option responder type.
struct OptionResponder<T>(Option<T>);
// a simple wrapper type around string
struct StringResponder(String);
@ -34,22 +29,6 @@ impl FutureResponder for StringResponder {
}
}
impl<T> FutureResponder for OptionResponder<T>
where
T: FutureResponder,
T::Future: Future<Output = Result<HttpResponse, Error>>,
{
type Error = Error;
type Future = Either<T::Future, Ready<Result<HttpResponse, Self::Error>>>;
fn future_respond_to(self, req: &HttpRequest) -> Self::Future {
match self.0 {
Some(t) => Either::Left(t.future_respond_to(req)),
None => Either::Right(ready(Err(error::ErrorInternalServerError("err")))),
}
}
}
impl Responder for StringResponder {
type Body = BoxBody;
@ -60,17 +39,6 @@ impl Responder for StringResponder {
}
}
impl<T: Responder> Responder for OptionResponder<T> {
type Body = BoxBody;
fn respond_to(self, req: &HttpRequest) -> HttpResponse<Self::Body> {
match self.0 {
Some(t) => t.respond_to(req).map_into_boxed_body(),
None => HttpResponse::from_error(error::ErrorInternalServerError("err")),
}
}
}
fn future_responder(c: &mut Criterion) {
let rt = actix_rt::System::new();
let req = TestRequest::default().to_http_request();

View File

@ -0,0 +1,127 @@
//! Shows a couple of ways to use the `from_fn` middleware.
use std::{collections::HashMap, io, rc::Rc, time::Duration};
use actix_web::{
body::MessageBody,
dev::{Service, ServiceRequest, ServiceResponse, Transform},
http::header::{self, HeaderValue, Range},
middleware::{from_fn, Logger, Next},
web::{self, Header, Query},
App, Error, HttpResponse, HttpServer,
};
async fn noop<B>(req: ServiceRequest, next: Next<B>) -> Result<ServiceResponse<B>, Error> {
next.call(req).await
}
async fn print_range_header<B>(
range_header: Option<Header<Range>>,
req: ServiceRequest,
next: Next<B>,
) -> Result<ServiceResponse<B>, Error> {
if let Some(Header(range)) = range_header {
println!("Range: {range}");
} else {
println!("No Range header");
}
next.call(req).await
}
async fn mutate_body_type(
req: ServiceRequest,
next: Next<impl MessageBody + 'static>,
) -> Result<ServiceResponse<impl MessageBody>, Error> {
let res = next.call(req).await?;
Ok(res.map_into_left_body::<()>())
}
async fn mutate_body_type_with_extractors(
string_body: String,
query: Query<HashMap<String, String>>,
req: ServiceRequest,
next: Next<impl MessageBody + 'static>,
) -> Result<ServiceResponse<impl MessageBody>, Error> {
println!("body is: {string_body}");
println!("query string: {query:?}");
let res = next.call(req).await?;
Ok(res.map_body(move |_, _| string_body))
}
async fn timeout_10secs(
req: ServiceRequest,
next: Next<impl MessageBody + 'static>,
) -> Result<ServiceResponse<impl MessageBody>, Error> {
match tokio::time::timeout(Duration::from_secs(10), next.call(req)).await {
Ok(res) => res,
Err(_err) => Err(actix_web::error::ErrorRequestTimeout("")),
}
}
struct MyMw(bool);
impl MyMw {
async fn mw_cb(
&self,
req: ServiceRequest,
next: Next<impl MessageBody + 'static>,
) -> Result<ServiceResponse<impl MessageBody>, Error> {
let mut res = match self.0 {
true => req.into_response("short-circuited").map_into_right_body(),
false => next.call(req).await?.map_into_left_body(),
};
res.headers_mut()
.insert(header::WARNING, HeaderValue::from_static("42"));
Ok(res)
}
pub fn into_middleware<S, B>(
self,
) -> impl Transform<
S,
ServiceRequest,
Response = ServiceResponse<impl MessageBody>,
Error = Error,
InitError = (),
>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
B: MessageBody + 'static,
{
let this = Rc::new(self);
from_fn(move |req, next| {
let this = Rc::clone(&this);
async move { Self::mw_cb(&this, req, next).await }
})
}
}
#[actix_web::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
let bind = ("127.0.0.1", 8080);
log::info!("staring server at http://{}:{}", &bind.0, &bind.1);
HttpServer::new(|| {
App::new()
.wrap(from_fn(noop))
.wrap(from_fn(print_range_header))
.wrap(from_fn(mutate_body_type))
.wrap(from_fn(mutate_body_type_with_extractors))
.wrap(from_fn(timeout_10secs))
// switch bool to true to observe early response
.wrap(MyMw(false).into_middleware())
.wrap(Logger::default())
.default_service(web::to(HttpResponse::Ok))
})
.workers(1)
.bind(bind)?
.run()
.await
}

View File

@ -39,7 +39,7 @@ impl App<AppEntry> {
let factory_ref = Rc::new(RefCell::new(None));
App {
endpoint: AppEntry::new(factory_ref.clone()),
endpoint: AppEntry::new(Rc::clone(&factory_ref)),
data_factories: Vec::new(),
services: Vec::new(),
default: None,
@ -234,7 +234,6 @@ where
///
/// * *Resource* is an entry in resource table which corresponds to requested URL.
/// * *Scope* is a set of resources with common root path.
/// * "StaticFiles" is a service for static files support
pub fn service<F>(mut self, factory: F) -> Self
where
F: HttpServiceFactory + 'static,

View File

@ -71,7 +71,7 @@ where
});
// create App config to pass to child services
let mut config = AppService::new(config, default.clone());
let mut config = AppService::new(config, Rc::clone(&default));
// register services
mem::take(&mut *self.services.borrow_mut())

View File

@ -68,7 +68,7 @@ impl AppService {
pub(crate) fn clone_config(&self) -> Self {
AppService {
config: self.config.clone(),
default: self.default.clone(),
default: Rc::clone(&self.default),
services: Vec::new(),
root: false,
}
@ -81,7 +81,7 @@ impl AppService {
/// Returns default handler factory.
pub fn default_service(&self) -> Rc<BoxedHttpServiceFactory> {
self.default.clone()
Rc::clone(&self.default)
}
/// Register HTTP service.

View File

@ -184,7 +184,7 @@ impl<T: ?Sized + 'static> FromRequest for Data<T> {
impl<T: ?Sized + 'static> DataFactory for Data<T> {
fn create(&self, extensions: &mut Extensions) -> bool {
extensions.insert(Data(self.0.clone()));
extensions.insert(Data(Arc::clone(&self.0)));
true
}
}

View File

@ -100,6 +100,7 @@ impl ResponseError for UrlencodedError {
match self {
Self::Overflow { .. } => StatusCode::PAYLOAD_TOO_LARGE,
Self::UnknownLength => StatusCode::LENGTH_REQUIRED,
Self::ContentType => StatusCode::UNSUPPORTED_MEDIA_TYPE,
Self::Payload(err) => err.status_code(),
_ => StatusCode::BAD_REQUEST,
}
@ -232,7 +233,7 @@ mod tests {
let resp = UrlencodedError::UnknownLength.error_response();
assert_eq!(resp.status(), StatusCode::LENGTH_REQUIRED);
let resp = UrlencodedError::ContentType.error_response();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
assert_eq!(resp.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
}
#[test]

View File

@ -19,7 +19,7 @@ use crate::{
/// 1. It is an async function (or a function/closure that returns an appropriate future);
/// 1. The function parameters (up to 12) implement [`FromRequest`];
/// 1. The async function (or future) resolves to a type that can be converted into an
/// [`HttpResponse`] (i.e., it implements the [`Responder`] trait).
/// [`HttpResponse`] (i.e., it implements the [`Responder`] trait).
///
///
/// # Compiler Errors

View File

@ -154,7 +154,7 @@ impl DispositionParam {
#[inline]
pub fn as_name(&self) -> Option<&str> {
match self {
DispositionParam::Name(ref name) => Some(name.as_str()),
DispositionParam::Name(name) => Some(name.as_str()),
_ => None,
}
}
@ -163,7 +163,7 @@ impl DispositionParam {
#[inline]
pub fn as_filename(&self) -> Option<&str> {
match self {
DispositionParam::Filename(ref filename) => Some(filename.as_str()),
DispositionParam::Filename(filename) => Some(filename.as_str()),
_ => None,
}
}
@ -172,7 +172,7 @@ impl DispositionParam {
#[inline]
pub fn as_filename_ext(&self) -> Option<&ExtendedValue> {
match self {
DispositionParam::FilenameExt(ref value) => Some(value),
DispositionParam::FilenameExt(value) => Some(value),
_ => None,
}
}
@ -493,7 +493,7 @@ impl Header for ContentDisposition {
}
fn parse<T: crate::HttpMessage>(msg: &T) -> Result<Self, crate::error::ParseError> {
if let Some(h) = msg.headers().get(&Self::name()) {
if let Some(h) = msg.headers().get(Self::name()) {
Self::from_raw(h)
} else {
Err(crate::error::ParseError::Header)

View File

@ -107,16 +107,16 @@ impl ByteRangeSpec {
/// satisfiable if they meet the following conditions:
///
/// > If a valid byte-range-set includes at least one byte-range-spec with a first-byte-pos that
/// is less than the current length of the representation, or at least one
/// suffix-byte-range-spec with a non-zero suffix-length, then the byte-range-set
/// is satisfiable. Otherwise, the byte-range-set is unsatisfiable.
/// > is less than the current length of the representation, or at least one
/// > suffix-byte-range-spec with a non-zero suffix-length, then the byte-range-set is
/// > satisfiable. Otherwise, the byte-range-set is unsatisfiable.
///
/// The function also computes remainder ranges based on the RFC:
///
/// > If the last-byte-pos value is absent, or if the value is greater than or equal to the
/// current length of the representation data, the byte range is interpreted as the remainder
/// of the representation (i.e., the server replaces the value of last-byte-pos with a value
/// that is one less than the current length of the selected representation).
/// > current length of the representation data, the byte range is interpreted as the remainder
/// > of the representation (i.e., the server replaces the value of last-byte-pos with a value
/// > that is one less than the current length of the selected representation).
///
/// [RFC 7233 §2.1]: https://datatracker.ietf.org/doc/html/rfc7233
pub fn to_satisfiable_range(&self, full_length: u64) -> Option<(u64, u64)> {
@ -270,7 +270,7 @@ impl Header for Range {
#[inline]
fn parse<T: HttpMessage>(msg: &T) -> Result<Self, ParseError> {
header::from_one_raw_str(msg.headers().get(&Self::name()))
header::from_one_raw_str(msg.headers().get(Self::name()))
}
}

View File

@ -27,7 +27,8 @@ fn bare_address(val: &str) -> &str {
val.split("]:")
.next()
.map(|s| s.trim_start_matches('[').trim_end_matches(']'))
// This shouldn't *actually* ever happen
// this indicates that the IPv6 address is malformed so shouldn't
// usually happen, but if it does, just return the original input
.unwrap_or(val)
} else {
val.split(':').next().unwrap_or(val)

View File

@ -70,8 +70,6 @@
//! - `rustls-0_23` - HTTPS support via `rustls` 0.23 crate, supports `HTTP/2`
//! - `secure-cookies` - secure cookies support
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
@ -106,6 +104,7 @@ mod scope;
mod server;
mod service;
pub mod test;
mod thin_data;
pub(crate) mod types;
pub mod web;

View File

@ -141,7 +141,7 @@ where
actix_service::forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
let inner = self.inner.clone();
let inner = Rc::clone(&self.inner);
let fut = self.service.call(req);
DefaultHeaderFuture {

View File

@ -220,16 +220,20 @@ impl<B> ErrorHandlers<B> {
/// [`.handler()`][ErrorHandlers::handler]) will fall back on this.
///
/// Note that this will overwrite any default handlers previously set by calling
/// [`.default_handler_client()`][ErrorHandlers::default_handler_client] or
/// [`.default_handler_server()`][ErrorHandlers::default_handler_server], but not any set by
/// calling [`.handler()`][ErrorHandlers::handler].
/// [`default_handler_client()`] or [`.default_handler_server()`], but not any set by calling
/// [`.handler()`].
///
/// [`default_handler_client()`]: ErrorHandlers::default_handler_client
/// [`.default_handler_server()`]: ErrorHandlers::default_handler_server
/// [`.handler()`]: ErrorHandlers::handler
pub fn default_handler<F>(self, handler: F) -> Self
where
F: Fn(ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>> + 'static,
{
let handler = Rc::new(handler);
let handler2 = Rc::clone(&handler);
Self {
default_server: Some(handler.clone()),
default_server: Some(handler2),
default_client: Some(handler),
..self
}
@ -288,7 +292,7 @@ where
type Future = LocalBoxFuture<'static, Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
let handlers = self.handlers.clone();
let handlers = Rc::clone(&self.handlers);
let default_client = self.default_client.clone();
let default_server = self.default_server.clone();
Box::pin(async move {
@ -323,7 +327,7 @@ where
actix_service::forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
let handlers = self.handlers.clone();
let handlers = Rc::clone(&self.handlers);
let default_client = self.default_client.clone();
let default_server = self.default_server.clone();
let fut = self.service.call(req);

View File

@ -0,0 +1,349 @@
use std::{future::Future, marker::PhantomData, rc::Rc};
use actix_service::boxed::{self, BoxFuture, RcService};
use actix_utils::future::{ready, Ready};
use futures_core::future::LocalBoxFuture;
use crate::{
body::MessageBody,
dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform},
Error, FromRequest,
};
/// Wraps an async function to be used as a middleware.
///
/// # Examples
///
/// The wrapped function should have the following form:
///
/// ```
/// # use actix_web::{
/// # App, Error,
/// # body::MessageBody,
/// # dev::{ServiceRequest, ServiceResponse, Service as _},
/// # };
/// use actix_web::middleware::{self, Next};
///
/// async fn my_mw(
/// req: ServiceRequest,
/// next: Next<impl MessageBody>,
/// ) -> Result<ServiceResponse<impl MessageBody>, Error> {
/// // pre-processing
/// next.call(req).await
/// // post-processing
/// }
/// # App::new().wrap(middleware::from_fn(my_mw));
/// ```
///
/// Then use in an app builder like this:
///
/// ```
/// use actix_web::{
/// App, Error,
/// dev::{ServiceRequest, ServiceResponse, Service as _},
/// };
/// use actix_web::middleware::from_fn;
/// # use actix_web::middleware::Next;
/// # async fn my_mw<B>(req: ServiceRequest, next: Next<B>) -> Result<ServiceResponse<B>, Error> {
/// # next.call(req).await
/// # }
///
/// App::new()
/// .wrap(from_fn(my_mw))
/// # ;
/// ```
///
/// It is also possible to write a middleware that automatically uses extractors, similar to request
/// handlers, by declaring them as the first parameters. As usual, **take care with extractors that
/// consume the body stream**, since handlers will no longer be able to read it again without
/// putting the body "back" into the request object within your middleware.
///
/// ```
/// # use std::collections::HashMap;
/// # use actix_web::{
/// # App, Error,
/// # body::MessageBody,
/// # dev::{ServiceRequest, ServiceResponse},
/// # http::header::{Accept, Date},
/// # web::{Header, Query},
/// # };
/// use actix_web::middleware::Next;
///
/// async fn my_extracting_mw(
/// accept: Header<Accept>,
/// query: Query<HashMap<String, String>>,
/// req: ServiceRequest,
/// next: Next<impl MessageBody>,
/// ) -> Result<ServiceResponse<impl MessageBody>, Error> {
/// // pre-processing
/// next.call(req).await
/// // post-processing
/// }
/// # App::new().wrap(actix_web::middleware::from_fn(my_extracting_mw));
pub fn from_fn<F, Es>(mw_fn: F) -> MiddlewareFn<F, Es> {
MiddlewareFn {
mw_fn: Rc::new(mw_fn),
_phantom: PhantomData,
}
}
/// Middleware transform for [`from_fn`].
#[allow(missing_debug_implementations)]
pub struct MiddlewareFn<F, Es> {
mw_fn: Rc<F>,
_phantom: PhantomData<Es>,
}
impl<S, F, Fut, B, B2> Transform<S, ServiceRequest> for MiddlewareFn<F, ()>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
F: Fn(ServiceRequest, Next<B>) -> Fut + 'static,
Fut: Future<Output = Result<ServiceResponse<B2>, Error>>,
B2: MessageBody,
{
type Response = ServiceResponse<B2>;
type Error = Error;
type Transform = MiddlewareFnService<F, B, ()>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(MiddlewareFnService {
service: boxed::rc_service(service),
mw_fn: Rc::clone(&self.mw_fn),
_phantom: PhantomData,
}))
}
}
/// Middleware service for [`from_fn`].
#[allow(missing_debug_implementations)]
pub struct MiddlewareFnService<F, B, Es> {
service: RcService<ServiceRequest, ServiceResponse<B>, Error>,
mw_fn: Rc<F>,
_phantom: PhantomData<(B, Es)>,
}
impl<F, Fut, B, B2> Service<ServiceRequest> for MiddlewareFnService<F, B, ()>
where
F: Fn(ServiceRequest, Next<B>) -> Fut,
Fut: Future<Output = Result<ServiceResponse<B2>, Error>>,
B2: MessageBody,
{
type Response = ServiceResponse<B2>;
type Error = Error;
type Future = Fut;
forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
(self.mw_fn)(
req,
Next::<B> {
service: Rc::clone(&self.service),
},
)
}
}
macro_rules! impl_middleware_fn_service {
($($ext_type:ident),*) => {
impl<S, F, Fut, B, B2, $($ext_type),*> Transform<S, ServiceRequest> for MiddlewareFn<F, ($($ext_type),*,)>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
F: Fn($($ext_type),*, ServiceRequest, Next<B>) -> Fut + 'static,
$($ext_type: FromRequest + 'static,)*
Fut: Future<Output = Result<ServiceResponse<B2>, Error>> + 'static,
B: MessageBody + 'static,
B2: MessageBody + 'static,
{
type Response = ServiceResponse<B2>;
type Error = Error;
type Transform = MiddlewareFnService<F, B, ($($ext_type,)*)>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(MiddlewareFnService {
service: boxed::rc_service(service),
mw_fn: Rc::clone(&self.mw_fn),
_phantom: PhantomData,
}))
}
}
impl<F, $($ext_type),*, Fut, B: 'static, B2> Service<ServiceRequest>
for MiddlewareFnService<F, B, ($($ext_type),*,)>
where
F: Fn(
$($ext_type),*,
ServiceRequest,
Next<B>
) -> Fut + 'static,
$($ext_type: FromRequest + 'static,)*
Fut: Future<Output = Result<ServiceResponse<B2>, Error>> + 'static,
B2: MessageBody + 'static,
{
type Response = ServiceResponse<B2>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
forward_ready!(service);
#[allow(nonstandard_style)]
fn call(&self, mut req: ServiceRequest) -> Self::Future {
let mw_fn = Rc::clone(&self.mw_fn);
let service = Rc::clone(&self.service);
Box::pin(async move {
let ($($ext_type,)*) = req.extract::<($($ext_type,)*)>().await?;
(mw_fn)($($ext_type),*, req, Next::<B> { service }).await
})
}
}
};
}
impl_middleware_fn_service!(E1);
impl_middleware_fn_service!(E1, E2);
impl_middleware_fn_service!(E1, E2, E3);
impl_middleware_fn_service!(E1, E2, E3, E4);
impl_middleware_fn_service!(E1, E2, E3, E4, E5);
impl_middleware_fn_service!(E1, E2, E3, E4, E5, E6);
impl_middleware_fn_service!(E1, E2, E3, E4, E5, E6, E7);
impl_middleware_fn_service!(E1, E2, E3, E4, E5, E6, E7, E8);
impl_middleware_fn_service!(E1, E2, E3, E4, E5, E6, E7, E8, E9);
/// Wraps the "next" service in the middleware chain.
#[allow(missing_debug_implementations)]
pub struct Next<B> {
service: RcService<ServiceRequest, ServiceResponse<B>, Error>,
}
impl<B> Next<B> {
/// Equivalent to `Service::call(self, req)`.
pub fn call(&self, req: ServiceRequest) -> <Self as Service<ServiceRequest>>::Future {
Service::call(self, req)
}
}
impl<B> Service<ServiceRequest> for Next<B> {
type Response = ServiceResponse<B>;
type Error = Error;
type Future = BoxFuture<Result<Self::Response, Self::Error>>;
forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
self.service.call(req)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
http::header::{self, HeaderValue},
middleware::{Compat, Logger},
test, web, App, HttpResponse,
};
async fn noop<B>(req: ServiceRequest, next: Next<B>) -> Result<ServiceResponse<B>, Error> {
next.call(req).await
}
async fn add_res_header<B>(
req: ServiceRequest,
next: Next<B>,
) -> Result<ServiceResponse<B>, Error> {
let mut res = next.call(req).await?;
res.headers_mut()
.insert(header::WARNING, HeaderValue::from_static("42"));
Ok(res)
}
async fn mutate_body_type(
req: ServiceRequest,
next: Next<impl MessageBody + 'static>,
) -> Result<ServiceResponse<impl MessageBody>, Error> {
let res = next.call(req).await?;
Ok(res.map_into_left_body::<()>())
}
struct MyMw(bool);
impl MyMw {
async fn mw_cb(
&self,
req: ServiceRequest,
next: Next<impl MessageBody + 'static>,
) -> Result<ServiceResponse<impl MessageBody>, Error> {
let mut res = match self.0 {
true => req.into_response("short-circuited").map_into_right_body(),
false => next.call(req).await?.map_into_left_body(),
};
res.headers_mut()
.insert(header::WARNING, HeaderValue::from_static("42"));
Ok(res)
}
pub fn into_middleware<S, B>(
self,
) -> impl Transform<
S,
ServiceRequest,
Response = ServiceResponse<impl MessageBody>,
Error = Error,
InitError = (),
>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
B: MessageBody + 'static,
{
let this = Rc::new(self);
from_fn(move |req, next| {
let this = Rc::clone(&this);
async move { Self::mw_cb(&this, req, next).await }
})
}
}
#[actix_rt::test]
async fn compat_compat() {
let _ = App::new().wrap(Compat::new(from_fn(noop)));
let _ = App::new().wrap(Compat::new(from_fn(mutate_body_type)));
}
#[actix_rt::test]
async fn permits_different_in_and_out_body_types() {
let app = test::init_service(
App::new()
.wrap(from_fn(mutate_body_type))
.wrap(from_fn(add_res_header))
.wrap(Logger::default())
.wrap(from_fn(noop))
.default_service(web::to(HttpResponse::NotFound)),
)
.await;
let req = test::TestRequest::default().to_request();
let res = test::call_service(&app, req).await;
assert!(res.headers().contains_key(header::WARNING));
}
#[actix_rt::test]
async fn closure_capture_and_return_from_fn() {
let app = test::init_service(
App::new()
.wrap(Logger::default())
.wrap(MyMw(true).into_middleware())
.wrap(Logger::default()),
)
.await;
let req = test::TestRequest::default().to_request();
let res = test::call_service(&app, req).await;
assert!(res.headers().contains_key(header::WARNING));
}
}

View File

@ -276,7 +276,7 @@ where
ready(Ok(LoggerMiddleware {
service,
inner: self.0.clone(),
inner: Rc::clone(&self.0),
}))
}
}
@ -622,11 +622,7 @@ impl FormatText {
FormatText::ResponseHeader(ref name) => {
let s = if let Some(val) = res.headers().get(name) {
if let Ok(s) = val.to_str() {
s
} else {
"-"
}
val.to_str().unwrap_or("-")
} else {
"-"
};
@ -670,11 +666,7 @@ impl FormatText {
FormatText::RequestTime => *self = FormatText::Str(now.format(&Rfc3339).unwrap()),
FormatText::RequestHeader(ref name) => {
let s = if let Some(val) = req.headers().get(name) {
if let Ok(s) = val.to_str() {
s
} else {
"-"
}
val.to_str().unwrap_or("-")
} else {
"-"
};

View File

@ -15,10 +15,47 @@
//! - Access external services (e.g., [sessions](https://docs.rs/actix-session), etc.)
//!
//! Middleware is registered for each [`App`], [`Scope`](crate::Scope), or
//! [`Resource`](crate::Resource) and executed in opposite order as registration. In general, a
//! middleware is a pair of types that implements the [`Service`] trait and [`Transform`] trait,
//! respectively. The [`new_transform`] and [`call`] methods must return a [`Future`], though it
//! can often be [an immediately-ready one](actix_utils::future::Ready).
//! [`Resource`](crate::Resource) and executed in opposite order as registration.
//!
//! # Simple Middleware
//!
//! In many cases, you can model your middleware as an async function via the [`from_fn()`] helper
//! that provides a natural interface for implementing your desired behaviors.
//!
//! ```
//! # use actix_web::{
//! # App, Error,
//! # body::MessageBody,
//! # dev::{ServiceRequest, ServiceResponse, Service as _},
//! # };
//! use actix_web::middleware::{self, Next};
//!
//! async fn my_mw(
//! req: ServiceRequest,
//! next: Next<impl MessageBody>,
//! ) -> Result<ServiceResponse<impl MessageBody>, Error> {
//! // pre-processing
//!
//! // invoke the wrapped middleware or service
//! let res = next.call(req).await?;
//!
//! // post-processing
//!
//! Ok(res)
//! }
//!
//! App::new()
//! .wrap(middleware::from_fn(my_mw));
//! ```
//!
//! ## Complex Middleware
//!
//! In the more general ase, a middleware is a pair of types that implements the [`Service`] trait
//! and [`Transform`] trait, respectively. The [`new_transform`] and [`call`] methods must return a
//! [`Future`], though it can often be [an immediately-ready one](actix_utils::future::Ready).
//!
//! All the built-in middleware use this pattern with pairs of builder (`Transform`) +
//! implementation (`Service`) types.
//!
//! # Ordering
//!
@ -67,7 +104,7 @@
//! Response
//! ```
//! The request _first_ gets processed by the middleware specified _last_ - `MiddlewareC`. It passes
//! the request (modified a modified one) to the next middleware - `MiddlewareB` - _or_ directly
//! the request (possibly a modified one) to the next middleware - `MiddlewareB` - _or_ directly
//! responds to the request (e.g. when the request was invalid or an error occurred). `MiddlewareB`
//! processes the request as well and passes it to `MiddlewareA`, which then passes it to the
//! [`Service`]. In the [`Service`], the extractors will run first. They don't pass the request on,
@ -196,18 +233,6 @@
//! # }
//! ```
//!
//! # Simpler Middleware
//!
//! In many cases, you _can_ actually use an async function via a helper that will provide a more
//! natural flow for your behavior.
//!
//! The experimental `actix_web_lab` crate provides a [`from_fn`][lab_from_fn] utility which allows
//! an async fn to be wrapped and used in the same way as other middleware. See the
//! [`from_fn`][lab_from_fn] docs for more info and examples of it's use.
//!
//! While [`from_fn`][lab_from_fn] is experimental currently, it's likely this helper will graduate
//! to Actix Web in some form, so feedback is appreciated.
//!
//! [`Future`]: std::future::Future
//! [`App`]: crate::App
//! [`FromRequest`]: crate::FromRequest
@ -215,7 +240,7 @@
//! [`Transform`]: crate::dev::Transform
//! [`call`]: crate::dev::Service::call()
//! [`new_transform`]: crate::dev::Transform::new_transform()
//! [lab_from_fn]: https://docs.rs/actix-web-lab/latest/actix_web_lab/middleware/fn.from_fn.html
//! [`from_fn`]: crate
mod compat;
#[cfg(feature = "__compress")]
@ -223,6 +248,7 @@ mod compress;
mod condition;
mod default_headers;
mod err_handlers;
mod from_fn;
mod identity;
mod logger;
mod normalize;
@ -234,6 +260,7 @@ pub use self::{
condition::Condition,
default_headers::DefaultHeaders,
err_handlers::{ErrorHandlerResponse, ErrorHandlers},
from_fn::{from_fn, Next},
identity::Identity,
logger::Logger,
normalize::{NormalizePath, TrailingSlash},

View File

@ -91,6 +91,35 @@ impl HttpRequest {
&self.head().uri
}
/// Returns request's original full URL.
///
/// Reconstructed URL is best-effort, using [`connection_info`](HttpRequest::connection_info())
/// to get forwarded scheme & host.
///
/// ```
/// use actix_web::test::TestRequest;
/// let req = TestRequest::with_uri("http://10.1.2.3:8443/api?id=4&name=foo")
/// .insert_header(("host", "example.com"))
/// .to_http_request();
///
/// assert_eq!(
/// req.full_url().as_str(),
/// "http://example.com/api?id=4&name=foo",
/// );
/// ```
pub fn full_url(&self) -> url::Url {
let info = self.connection_info();
let scheme = info.scheme();
let host = info.host();
let path_and_query = self
.uri()
.path_and_query()
.map(|paq| paq.as_str())
.unwrap_or("/");
url::Url::parse(&format!("{scheme}://{host}{path_and_query}")).unwrap()
}
/// Read the Request method.
#[inline]
pub fn method(&self) -> &Method {
@ -963,4 +992,27 @@ mod tests {
assert!(format!("{:?}", req).contains(location_header));
}
#[test]
fn check_full_url() {
let req = TestRequest::with_uri("/api?id=4&name=foo").to_http_request();
assert_eq!(
req.full_url().as_str(),
"http://localhost:8080/api?id=4&name=foo",
);
let req = TestRequest::with_uri("https://example.com/api?id=4&name=foo").to_http_request();
assert_eq!(
req.full_url().as_str(),
"https://example.com/api?id=4&name=foo",
);
let req = TestRequest::with_uri("http://10.1.2.3:8443/api?id=4&name=foo")
.insert_header(("host", "example.com"))
.to_http_request();
assert_eq!(
req.full_url().as_str(),
"http://example.com/api?id=4&name=foo",
);
}
}

View File

@ -62,14 +62,14 @@ pub struct Resource<T = ResourceEndpoint> {
impl Resource {
/// Constructs new resource that matches a `path` pattern.
pub fn new<T: IntoPatterns>(path: T) -> Resource {
let fref = Rc::new(RefCell::new(None));
let factory_ref = Rc::new(RefCell::new(None));
Resource {
routes: Vec::new(),
rdef: path.patterns(),
name: None,
endpoint: ResourceEndpoint::new(fref.clone()),
factory_ref: fref,
endpoint: ResourceEndpoint::new(Rc::clone(&factory_ref)),
factory_ref,
guards: Vec::new(),
app_data: None,
default: boxed::factory(fn_service(|req: ServiceRequest| async {

View File

@ -463,7 +463,7 @@ mod tests {
// content type override
let res = HttpResponse::Ok()
.insert_header((CONTENT_TYPE, "text/json"))
.json(&vec!["v1", "v2", "v3"]);
.json(["v1", "v2", "v3"]);
let ct = res.headers().get(CONTENT_TYPE).unwrap();
assert_eq!(ct, HeaderValue::from_static("text/json"));
assert_body_eq!(res, br#"["v1","v2","v3"]"#);

View File

@ -77,7 +77,7 @@ impl ServiceFactory<ServiceRequest> for Route {
fn new_service(&self, _: ()) -> Self::Future {
let fut = self.service.new_service(());
let guards = self.guards.clone();
let guards = Rc::clone(&self.guards);
Box::pin(async move {
let service = fut.await?;

View File

@ -213,7 +213,6 @@ where
///
/// * *Resource* is an entry in resource table which corresponds to requested URL.
/// * *Scope* is a set of resources with common root path.
/// * "StaticFiles" is a service for static files support
///
/// ```
/// use actix_web::{web, App, HttpRequest};

View File

@ -7,13 +7,7 @@ use std::{
time::Duration,
};
#[cfg(any(
feature = "openssl",
feature = "rustls-0_20",
feature = "rustls-0_21",
feature = "rustls-0_22",
feature = "rustls-0_23",
))]
#[cfg(feature = "__tls")]
use actix_http::TlsAcceptorConfig;
use actix_http::{body::MessageBody, Extensions, HttpService, KeepAlive, Request, Response};
use actix_server::{Server, ServerBuilder};
@ -190,7 +184,7 @@ where
/// By default max connections is set to a 256.
#[allow(unused_variables)]
pub fn max_connection_rate(self, num: usize) -> Self {
#[cfg(any(feature = "rustls-0_20", feature = "rustls-0_21", feature = "openssl"))]
#[cfg(feature = "__tls")]
actix_tls::accept::max_concurrent_tls_connect(num);
self
}
@ -243,13 +237,7 @@ where
/// time, the connection is closed.
///
/// By default, the handshake timeout is 3 seconds.
#[cfg(any(
feature = "openssl",
feature = "rustls-0_20",
feature = "rustls-0_21",
feature = "rustls-0_22",
feature = "rustls-0_23",
))]
#[cfg(feature = "__tls")]
pub fn tls_handshake_timeout(self, dur: Duration) -> Self {
self.config
.lock()
@ -522,7 +510,7 @@ where
/// No changes are made to `lst`'s configuration. Ensure it is configured properly before
/// passing ownership to `listen()`.
pub fn listen(mut self, lst: net::TcpListener) -> io::Result<Self> {
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let factory = self.factory.clone();
let addr = lst.local_addr().unwrap();
@ -566,7 +554,7 @@ where
/// Binds to existing listener for accepting incoming plaintext HTTP/1.x or HTTP/2 connections.
#[cfg(feature = "http2")]
pub fn listen_auto_h2c(mut self, lst: net::TcpListener) -> io::Result<Self> {
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let factory = self.factory.clone();
let addr = lst.local_addr().unwrap();
@ -644,7 +632,7 @@ where
config: actix_tls::accept::rustls_0_20::reexports::ServerConfig,
) -> io::Result<Self> {
let factory = self.factory.clone();
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let addr = lst.local_addr().unwrap();
self.sockets.push(Socket {
addr,
@ -695,7 +683,7 @@ where
config: actix_tls::accept::rustls_0_21::reexports::ServerConfig,
) -> io::Result<Self> {
let factory = self.factory.clone();
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let addr = lst.local_addr().unwrap();
self.sockets.push(Socket {
addr,
@ -761,7 +749,7 @@ where
config: actix_tls::accept::rustls_0_22::reexports::ServerConfig,
) -> io::Result<Self> {
let factory = self.factory.clone();
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let addr = lst.local_addr().unwrap();
self.sockets.push(Socket {
addr,
@ -827,7 +815,7 @@ where
config: actix_tls::accept::rustls_0_23::reexports::ServerConfig,
) -> io::Result<Self> {
let factory = self.factory.clone();
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let addr = lst.local_addr().unwrap();
self.sockets.push(Socket {
addr,
@ -892,7 +880,7 @@ where
acceptor: SslAcceptor,
) -> io::Result<Self> {
let factory = self.factory.clone();
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let addr = lst.local_addr().unwrap();
self.sockets.push(Socket {
addr,
@ -949,7 +937,7 @@ where
use actix_rt::net::UnixStream;
use actix_service::{fn_service, ServiceFactoryExt as _};
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let factory = self.factory.clone();
let socket_addr =
net::SocketAddr::new(net::IpAddr::V4(net::Ipv4Addr::new(127, 0, 0, 1)), 8080);
@ -994,7 +982,7 @@ where
use actix_rt::net::UnixStream;
use actix_service::{fn_service, ServiceFactoryExt as _};
let cfg = self.config.clone();
let cfg = Arc::clone(&self.config);
let factory = self.factory.clone();
let socket_addr =
net::SocketAddr::new(net::IpAddr::V4(net::Ipv4Addr::new(127, 0, 0, 1)), 8080);

121
actix-web/src/thin_data.rs Normal file
View File

@ -0,0 +1,121 @@
use std::any::type_name;
use actix_utils::future::{ready, Ready};
use crate::{dev::Payload, error, FromRequest, HttpRequest};
/// Application data wrapper and extractor for cheaply-cloned types.
///
/// Similar to the [`Data`] wrapper but for `Clone`/`Copy` types that are already an `Arc` internally,
/// share state using some other means when cloned, or is otherwise static data that is very cheap
/// to clone.
///
/// Unlike `Data`, this wrapper clones `T` during extraction. Therefore, it is the user's
/// responsibility to ensure that clones of `T` do actually share the same state, otherwise state
/// may be unexpectedly different across multiple requests.
///
/// Note that if your type is literally an `Arc<T>` then it's recommended to use the
/// [`Data::from(arc)`][data_from_arc] conversion instead.
///
/// # Examples
///
/// ```
/// use actix_web::{
/// web::{self, ThinData},
/// App, HttpResponse, Responder,
/// };
///
/// // Use the `ThinData<T>` extractor to access a database connection pool.
/// async fn index(ThinData(db_pool): ThinData<DbPool>) -> impl Responder {
/// // database action ...
///
/// HttpResponse::Ok()
/// }
///
/// # type DbPool = ();
/// let db_pool = DbPool::default();
///
/// App::new()
/// .app_data(ThinData(db_pool.clone()))
/// .service(web::resource("/").get(index))
/// # ;
/// ```
///
/// [`Data`]: crate::web::Data
/// [data_from_arc]: crate::web::Data#impl-From<Arc<T>>-for-Data<T>
#[derive(Debug, Clone)]
pub struct ThinData<T>(pub T);
impl_more::impl_as_ref!(ThinData<T> => T);
impl_more::impl_as_mut!(ThinData<T> => T);
impl_more::impl_deref_and_mut!(<T> in ThinData<T> => T);
impl<T: Clone + 'static> FromRequest for ThinData<T> {
type Error = crate::Error;
type Future = Ready<Result<Self, Self::Error>>;
#[inline]
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
ready(req.app_data::<Self>().cloned().ok_or_else(|| {
log::debug!(
"Failed to extract `ThinData<{}>` for `{}` handler. For the ThinData extractor to work \
correctly, wrap the data with `ThinData()` and pass it to `App::app_data()`. \
Ensure that types align in both the set and retrieve calls.",
type_name::<T>(),
req.match_name().unwrap_or(req.path())
);
error::ErrorInternalServerError(
"Requested application data is not configured correctly. \
View/enable debug logs for more details.",
)
}))
}
}
#[cfg(test)]
mod tests {
use std::sync::{Arc, Mutex};
use super::*;
use crate::{
http::StatusCode,
test::{call_service, init_service, TestRequest},
web, App, HttpResponse,
};
type TestT = Arc<Mutex<u32>>;
#[actix_rt::test]
async fn thin_data() {
let test_data = TestT::default();
let app = init_service(App::new().app_data(ThinData(test_data.clone())).service(
web::resource("/").to(|td: ThinData<TestT>| {
*td.lock().unwrap() += 1;
HttpResponse::Ok()
}),
))
.await;
for _ in 0..3 {
let req = TestRequest::default().to_request();
let resp = call_service(&app, req).await;
assert_eq!(resp.status(), StatusCode::OK);
}
assert_eq!(*test_data.lock().unwrap(), 3);
}
#[actix_rt::test]
async fn thin_data_missing() {
let app = init_service(
App::new().service(web::resource("/").to(|_: ThinData<u32>| HttpResponse::Ok())),
)
.await;
let req = TestRequest::default().to_request();
let resp = call_service(&app, req).await;
assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR);
}
}

View File

@ -0,0 +1,66 @@
//! Semantic HTML responder. See [`Html`].
use crate::{
http::{
header::{self, ContentType, TryIntoHeaderValue},
StatusCode,
},
HttpRequest, HttpResponse, Responder,
};
/// Semantic HTML responder.
///
/// When used as a responder, creates a 200 OK response, sets the correct HTML content type, and
/// uses the string passed to [`Html::new()`] as the body.
///
/// ```
/// # use actix_web::web::Html;
/// Html::new("<p>Hello, World!</p>")
/// # ;
/// ```
#[derive(Debug, Clone, PartialEq, Hash)]
pub struct Html(String);
impl Html {
/// Constructs a new `Html` responder.
pub fn new(html: impl Into<String>) -> Self {
Self(html.into())
}
}
impl Responder for Html {
type Body = String;
fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
let mut res = HttpResponse::with_body(StatusCode::OK, self.0);
res.headers_mut().insert(
header::CONTENT_TYPE,
ContentType::html().try_into_value().unwrap(),
);
res
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test::TestRequest;
#[test]
fn responder() {
let req = TestRequest::default().to_http_request();
let res = Html::new("<p>Hello, World!</p>");
let res = res.respond_to(&req);
assert!(res.status().is_success());
assert!(res
.headers()
.get(header::CONTENT_TYPE)
.unwrap()
.to_str()
.unwrap()
.starts_with("text/html"));
assert!(res.body().starts_with("<p>"));
}
}

View File

@ -3,6 +3,7 @@
mod either;
mod form;
mod header;
mod html;
mod json;
mod path;
mod payload;
@ -13,6 +14,7 @@ pub use self::{
either::Either,
form::{Form, FormConfig, UrlEncoded},
header::Header,
html::Html,
json::{Json, JsonBody, JsonConfig},
path::{Path, PathConfig},
payload::{Payload, PayloadConfig},

View File

@ -2,6 +2,7 @@
//!
//! # Request Extractors
//! - [`Data`]: Application data item
//! - [`ThinData`]: Cheap-to-clone application data item
//! - [`ReqData`]: Request-local data item
//! - [`Path`]: URL path parameters / dynamic segments
//! - [`Query`]: URL query parameters
@ -22,7 +23,8 @@ use actix_router::IntoPatterns;
pub use bytes::{Buf, BufMut, Bytes, BytesMut};
pub use crate::{
config::ServiceConfig, data::Data, redirect::Redirect, request_data::ReqData, types::*,
config::ServiceConfig, data::Data, redirect::Redirect, request_data::ReqData,
thin_data::ThinData, types::*,
};
use crate::{
error::BlockingError, http::Method, service::WebService, FromRequest, Handler, Resource,

View File

@ -15,10 +15,6 @@ repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0"
edition = "2021"
[lib]
name = "awc"
path = "src/lib.rs"
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"]
features = [
@ -33,6 +29,27 @@ features = [
"compress-zstd",
]
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_http::*",
"actix_rt::*",
"actix_service::*",
"actix_tls::*",
"bytes::*",
"cookie::*",
"cookie",
"futures_core::*",
"h2::*",
"http::*",
"openssl::*",
"rustls::*",
"serde_json::*",
"serde_urlencoded::*",
"serde::*",
"tokio::*",
]
[features]
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
@ -134,7 +151,10 @@ rcgen = "0.13"
rustls-pemfile = "2"
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
zstd = "0.13"
tls-rustls-0_23 = { package = "rustls", version = "0.23" } # add rustls 0.23 with default features to make aws_lc_rs work in tests
tls-rustls-0_23 = { package = "rustls", version = "0.23" } # add rustls 0.23 with default features to make aws_lc_rs work in tests
[lints]
workspace = true
[[example]]
name = "client"

View File

@ -1,6 +1,8 @@
use std::error::Error as StdError;
#[tokio::main]
/// If we want to make requests to addresses starting with `https`, we need to enable the rustls feature of awc
/// `awc = { version = "3.5.0", features = ["rustls"] }`
#[actix_rt::main]
async fn main() -> Result<(), Box<dyn StdError>> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));

View File

@ -163,6 +163,7 @@ mod tests {
use super::*;
#[allow(dead_code)]
struct PinType(PhantomPinned);
impl MessageBody for PinType {

View File

@ -173,12 +173,15 @@ where
};
// acquire an owned permit and carry it with connection
let permit = inner.permits.clone().acquire_owned().await.map_err(|_| {
ConnectError::Io(io::Error::new(
io::ErrorKind::Other,
"failed to acquire semaphore on client connection pool",
))
})?;
let permit = Arc::clone(&inner.permits)
.acquire_owned()
.await
.map_err(|_| {
ConnectError::Io(io::Error::new(
io::ErrorKind::Other,
"failed to acquire semaphore on client connection pool",
))
})?;
let conn = {
let mut conn = None;

View File

@ -49,7 +49,7 @@ impl FrozenClientRequest {
where
B: MessageBody + 'static,
{
RequestSender::Rc(self.head.clone(), None).send_body(
RequestSender::Rc(Rc::clone(&self.head), None).send_body(
self.addr,
self.response_decompress,
self.timeout,
@ -60,7 +60,7 @@ impl FrozenClientRequest {
/// Send a json body.
pub fn send_json<T: Serialize>(&self, value: &T) -> SendClientRequest {
RequestSender::Rc(self.head.clone(), None).send_json(
RequestSender::Rc(Rc::clone(&self.head), None).send_json(
self.addr,
self.response_decompress,
self.timeout,
@ -71,7 +71,7 @@ impl FrozenClientRequest {
/// Send an urlencoded body.
pub fn send_form<T: Serialize>(&self, value: &T) -> SendClientRequest {
RequestSender::Rc(self.head.clone(), None).send_form(
RequestSender::Rc(Rc::clone(&self.head), None).send_form(
self.addr,
self.response_decompress,
self.timeout,
@ -86,7 +86,7 @@ impl FrozenClientRequest {
S: Stream<Item = Result<Bytes, E>> + 'static,
E: Into<BoxError> + 'static,
{
RequestSender::Rc(self.head.clone(), None).send_stream(
RequestSender::Rc(Rc::clone(&self.head), None).send_stream(
self.addr,
self.response_decompress,
self.timeout,
@ -97,7 +97,7 @@ impl FrozenClientRequest {
/// Send an empty body.
pub fn send(&self) -> SendClientRequest {
RequestSender::Rc(self.head.clone(), None).send(
RequestSender::Rc(Rc::clone(&self.head), None).send(
self.addr,
self.response_decompress,
self.timeout,

View File

@ -100,8 +100,6 @@
//! # }
//! ```
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow(unknown_lints)] // temp: #[allow(non_local_definitions)]
#![allow(
clippy::type_complexity,

View File

@ -78,7 +78,7 @@ where
RedirectServiceFuture::Tunnel { fut }
}
ConnectRequest::Client(head, body, addr) => {
let connector = self.connector.clone();
let connector = Rc::clone(&self.connector);
let max_redirect_times = self.max_redirect_times;
// backup the uri and method for reuse schema and authority.

View File

@ -3,6 +3,7 @@ _list:
# Format workspace.
fmt:
just --unstable --fmt
cargo +nightly fmt
fd --hidden --type=file --extension=md --extension=yml --exec-batch npx -y prettier --write
@ -17,20 +18,22 @@ msrv := ```
| sed -E 's/^1\.([0-9]{2})$/1\.\1\.0/'
```
msrv_rustup := "+" + msrv
non_linux_all_features_list := ```
cargo metadata --format-version=1 \
| jq '.packages[] | select(.source == null) | .features | keys' \
| jq -r --slurp \
--arg exclusions "tokio-uring,io-uring,experimental-io-uring" \
--arg exclusions "__tls,__compress,tokio-uring,io-uring,experimental-io-uring" \
'add | unique | . - ($exclusions | split(",")) | join(",")'
```
all_crate_features := if os() == "linux" { "--all-features" } else { "--features='" + non_linux_all_features_list + "'" }
all_crate_features := if os() == "linux" {
"--all-features"
} else {
"--features='" + non_linux_all_features_list + "'"
}
[private]
check-min:
cargo hack --workspace check --no-default-features
[private]
check-default:
cargo hack --workspace check
# Run Clippy over workspace.
clippy toolchain="":
@ -53,9 +56,33 @@ test-docs toolchain="": && doc
# Test workspace.
test-all toolchain="": (test toolchain) (test-docs toolchain)
# Test workspace and collect coverage info.
[private]
test-coverage toolchain="":
cargo {{ toolchain }} llvm-cov nextest --no-report {{ all_crate_features }}
cargo {{ toolchain }} llvm-cov --doc --no-report {{ all_crate_features }}
# Test workspace and generate Codecov report.
test-coverage-codecov toolchain="": (test-coverage toolchain)
cargo {{ toolchain }} llvm-cov report --doctests --codecov --output-path=codecov.json
# Test workspace and generate LCOV report.
test-coverage-lcov toolchain="": (test-coverage toolchain)
cargo {{ toolchain }} llvm-cov report --doctests --lcov --output-path=lcov.info
# Document crates in workspace.
doc *args:
RUSTDOCFLAGS="--cfg=docsrs -Dwarnings" cargo +nightly doc --no-deps --workspace {{ all_crate_features }} {{ args }}
doc *args: && doc-set-workspace-crates
RUSTDOCFLAGS="--cfg=docsrs -Dwarnings" cargo +nightly doc --workspace {{ all_crate_features }} {{ args }}
[private]
doc-set-workspace-crates:
#!/usr/bin/env bash
(
echo "window.ALL_CRATES ="
cargo metadata --format-version=1 \
| jq '[.packages[] | select(.source == null) | .targets | map(select(.doc) | .name)] | flatten'
echo ";"
) > "$(cargo metadata --format-version=1 | jq -r '.target_directory')/doc/crates.js"
# Document crates in workspace and watch for changes.
doc-watch:
@ -65,4 +92,42 @@ doc-watch:
# Update READMEs from crate root documentation.
update-readmes: && fmt
cd ./actix-files && cargo rdme --force
cd ./actix-http-test && cargo rdme --force
cd ./actix-router && cargo rdme --force
cd ./actix-multipart && cargo rdme --force
cd ./actix-test && cargo rdme --force
feature_combo_skip_list := if os() == "linux" { "__tls,__compress" } else { "__tls,__compress,experimental-io-uring" }
# Checks compatibility of feature combinations.
check-feature-combinations:
cargo hack --workspace \
--feature-powerset --depth=4 \
--skip={{ feature_combo_skip_list }} \
check
# Check for unintentional external type exposure on all crates in workspace.
check-external-types-all toolchain="+nightly":
#!/usr/bin/env bash
set -euo pipefail
exit=0
for f in $(find . -mindepth 2 -maxdepth 2 -name Cargo.toml | grep -vE "\-codegen/|\-derive/|\-macros/"); do
if ! just check-external-types-manifest "$f" {{ toolchain }}; then exit=1; fi
echo
echo
done
exit $exit
# Check for unintentional external type exposure on all crates in workspace.
check-external-types-all-table toolchain="+nightly":
#!/usr/bin/env bash
set -euo pipefail
for f in $(find . -mindepth 2 -maxdepth 2 -name Cargo.toml | grep -vE "\-codegen/|\-derive/|\-macros/"); do
echo
echo "Checking for $f"
just check-external-types-manifest "$f" {{ toolchain }} --output-format=markdown-table
done
# Check for unintentional external type exposure on a crate.
check-external-types-manifest manifest_path toolchain="+nightly" *extra_args="":
cargo {{ toolchain }} check-external-types --manifest-path "{{ manifest_path }}" {{ extra_args }}

View File

@ -169,3 +169,5 @@ if [ "$GH_RELEASE" = 'y' ] || [ "$GH_RELEASE" = 'Y' ]; then
fi
echo
cargo update >/dev/null 2>&1 || true