1
0
mirror of https://github.com/fafhrd91/actix-web synced 2025-08-13 17:58:25 +02:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Rob Ede
9a4090761c feat: use PRITIT for FromRequest 2023-12-23 18:54:52 +00:00
183 changed files with 2489 additions and 6000 deletions

14
.cargo/config.toml Normal file
View File

@@ -0,0 +1,14 @@
[alias]
lint = "clippy --workspace --tests --examples --bins -- -Dclippy::todo"
lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dclippy::todo"
# lib checking
ci-check-min = "hack --workspace check --no-default-features"
ci-check-default = "hack --workspace check"
ci-check-default-tests = "check --workspace --tests"
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,experimental-io-uring check"
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
# testing
ci-doctest-default = "test --workspace --doc --no-fail-fast -- --nocapture"
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"

View File

@@ -30,41 +30,49 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install nasm
if: matrix.target.os == 'windows-latest'
uses: ilammy/setup-nasm@v1.5.1
- name: Install OpenSSL - name: Install OpenSSL
if: matrix.target.os == 'windows-latest' if: matrix.target.os == 'windows-latest'
shell: bash run: choco install openssl -y --forcex64 --no-progress
- name: Set OpenSSL dir in env
if: matrix.target.os == 'windows-latest'
run: | run: |
set -e echo 'OPENSSL_DIR=C:\Program Files\OpenSSL-Win64' | Out-File -FilePath $env:GITHUB_ENV -Append
choco install openssl --version=1.1.1.2100 -y --no-progress echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
- name: Install Rust (${{ matrix.version.name }}) - name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0 uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with: with:
toolchain: ${{ matrix.version.version }} toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean - name: Install cargo-hack
uses: taiki-e/install-action@v2.41.7 uses: taiki-e/install-action@v2.22.0
with: with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean tool: cargo-hack
- name: check minimal - name: check minimal
run: just check-min run: cargo ci-check-min
- name: check default - name: check default
run: just check-default run: cargo ci-check-default
- name: tests - name: tests
timeout-minutes: 60 timeout-minutes: 60
run: just test run: |
cargo test --lib --tests -p=actix-router --all-features
cargo test --lib --tests -p=actix-http --all-features
cargo test --lib --tests -p=actix-web --features=rustls-0_20,rustls-0_21,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
cargo test --lib --tests -p=actix-web-codegen --all-features
cargo test --lib --tests -p=awc --all-features
cargo test --lib --tests -p=actix-http-test --all-features
cargo test --lib --tests -p=actix-test --all-features
cargo test --lib --tests -p=actix-files
cargo test --lib --tests -p=actix-multipart --all-features
cargo test --lib --tests -p=actix-web-actors --all-features
- name: CI cache clean - name: Clear the cargo caches
run: cargo-ci-cache-clean run: |
cargo install cargo-cache --version 0.8.3 --no-default-features --features ci-autoclean
cargo-cache
ci_feature_powerset_check: ci_feature_powerset_check:
name: Verify Feature Combinations name: Verify Feature Combinations
@@ -73,19 +81,34 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Free Disk Space - name: Install Rust
run: ./scripts/free-disk-space.sh uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
- name: Setup mold linker - name: Install cargo-hack
uses: rui314/setup-mold@v1 uses: taiki-e/install-action@v2.22.0
with:
tool: cargo-hack
- name: check feature combinations
run: cargo ci-check-all-feature-powerset
- name: check feature combinations
run: cargo ci-check-all-feature-powerset-linux
nextest:
name: nextest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0 uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
- name: Install just, cargo-hack - name: Install nextest
uses: taiki-e/install-action@v2.41.7 uses: taiki-e/install-action@v2.22.0
with: with:
tool: just,cargo-hack tool: nextest
- name: Check feature combinations - name: Test with cargo-nextest
run: just check-feature-combinations run: cargo nextest run

View File

@@ -16,13 +16,7 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
read_msrv:
name: Read MSRV
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@v0.1.0
build_and_test: build_and_test:
needs: read_msrv
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -32,7 +26,7 @@ jobs:
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin } - { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc } - { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
version: version:
- { name: msrv, version: "${{ needs.read_msrv.outputs.msrv }}" } - { name: msrv, version: 1.68.0 }
- { name: stable, version: stable } - { name: stable, version: stable }
name: ${{ matrix.target.name }} / ${{ matrix.version.name }} name: ${{ matrix.target.name }} / ${{ matrix.version.name }}
@@ -41,49 +35,56 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install nasm
if: matrix.target.os == 'windows-latest'
uses: ilammy/setup-nasm@v1.5.1
- name: Install OpenSSL - name: Install OpenSSL
if: matrix.target.os == 'windows-latest' if: matrix.target.os == 'windows-latest'
shell: bash run: choco install openssl -y --forcex64 --no-progress
- name: Set OpenSSL dir in env
if: matrix.target.os == 'windows-latest'
run: | run: |
set -e echo 'OPENSSL_DIR=C:\Program Files\OpenSSL-Win64' | Out-File -FilePath $env:GITHUB_ENV -Append
choco install openssl --version=1.1.1.2100 -y --no-progress echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
- name: Setup mold linker
if: matrix.target.os == 'ubuntu-latest'
uses: rui314/setup-mold@v1
- name: Install Rust (${{ matrix.version.name }}) - name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0 uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with: with:
toolchain: ${{ matrix.version.version }} toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean - name: Install cargo-hack
uses: taiki-e/install-action@v2.41.7 uses: taiki-e/install-action@v2.22.0
with: with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean tool: cargo-hack
- name: workaround MSRV issues - name: workaround MSRV issues
if: matrix.version.name == 'msrv' if: matrix.version.name == 'msrv'
run: just downgrade-for-msrv run: |
cargo update -p=clap --precise=4.3.24
cargo update -p=clap_lex --precise=0.5.0
cargo update -p=anstyle --precise=1.0.2
- name: check minimal - name: check minimal
run: just check-min run: cargo ci-check-min
- name: check default - name: check default
run: just check-default run: cargo ci-check-default
- name: tests - name: tests
timeout-minutes: 60 timeout-minutes: 60
run: just test run: |
cargo test --lib --tests -p=actix-router --all-features
cargo test --lib --tests -p=actix-http --all-features
cargo test --lib --tests -p=actix-web --features=rustls-0_20,rustls-0_21,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
cargo test --lib --tests -p=actix-web-codegen --all-features
cargo test --lib --tests -p=awc --all-features
cargo test --lib --tests -p=actix-http-test --all-features
cargo test --lib --tests -p=actix-test --all-features
cargo test --lib --tests -p=actix-files
cargo test --lib --tests -p=actix-multipart --all-features
cargo test --lib --tests -p=actix-web-actors --all-features
- name: CI cache clean - name: Clear the cargo caches
run: cargo-ci-cache-clean run: |
cargo install cargo-cache --version 0.8.3 --no-default-features --features ci-autoclean
cargo-cache
io-uring: io-uring:
name: io-uring tests name: io-uring tests
@@ -92,7 +93,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0 uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with: with:
toolchain: nightly toolchain: nightly
@@ -108,14 +109,10 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust (nightly) - name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0 uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with: with:
toolchain: nightly toolchain: nightly
- name: Install just
uses: taiki-e/install-action@v2.41.7
with:
tool: just
- name: doc tests - name: doc tests
run: just test-docs run: cargo ci-doctest
timeout-minutes: 60

View File

@@ -17,24 +17,21 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust (nightly) - name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0 uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with: with:
toolchain: nightly components: llvm-tools-preview
components: llvm-tools
- name: Install just, cargo-llvm-cov, cargo-nextest - name: Install cargo-llvm-cov
uses: taiki-e/install-action@v2.41.7 uses: taiki-e/install-action@v2.22.0
with: with:
tool: just,cargo-llvm-cov,cargo-nextest tool: cargo-llvm-cov
- name: Generate code coverage - name: Generate code coverage
run: just test-coverage-codecov run: cargo llvm-cov --workspace --all-features --codecov --output-path codecov.json
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v4.5.0 uses: codecov/codecov-action@v3.1.4
with: with:
files: codecov.json files: codecov.json
fail_ci_if_error: true fail_ci_if_error: true
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -17,13 +17,12 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust (nightly) - uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly toolchain: nightly
components: rustfmt components: rustfmt
- name: Check with Rustfmt - name: Check with rustfmt
run: cargo fmt --all -- --check run: cargo fmt --all -- --check
clippy: clippy:
@@ -36,7 +35,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0 uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with: with:
components: clippy components: clippy
@@ -54,8 +53,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install Rust (nightly) - uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly toolchain: nightly
components: rust-docs components: rust-docs
@@ -65,52 +63,25 @@ jobs:
RUSTDOCFLAGS: -D warnings RUSTDOCFLAGS: -D warnings
run: cargo +nightly doc --no-deps --workspace --all-features run: cargo +nightly doc --no-deps --workspace --all-features
check-external-types:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust (nightly-2024-05-01)
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with:
toolchain: nightly-2024-05-01
- name: Install just
uses: taiki-e/install-action@v2.41.7
with:
tool: just
- name: Install cargo-check-external-types
uses: taiki-e/cache-cargo-install-action@v2.0.1
with:
tool: cargo-check-external-types
- name: check external types
run: just check-external-types-all +nightly-2024-05-01
public-api-diff: public-api-diff:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout main branch - uses: actions/checkout@v4
uses: actions/checkout@v4
with: with:
ref: ${{ github.base_ref }} ref: ${{ github.base_ref }}
- name: Checkout PR branch - uses: actions/checkout@v4
uses: actions/checkout@v4
- name: Install Rust (nightly-2024-06-07) - uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
with: with:
toolchain: nightly-2024-06-07 toolchain: nightly-2023-08-25
- name: Install cargo-public-api - uses: taiki-e/cache-cargo-install-action@v1.3.0
uses: taiki-e/install-action@v2.41.7
with: with:
tool: cargo-public-api tool: cargo-public-api
- name: Generate API diff - name: generate API diff
run: | run: |
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
cargo public-api --manifest-path "$f" --simplified diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }} cargo public-api --manifest-path "$f" diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
done done

41
.github/workflows/upload-doc.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: Upload Documentation
on:
push:
branches: [master]
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
permissions:
contents: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
with:
toolchain: nightly
- name: Build Docs
run: cargo +nightly doc --no-deps --workspace --all-features
env:
RUSTDOCFLAGS: --cfg=docsrs
- name: Tweak HTML
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
- name: Deploy to GitHub Pages
uses: JamesIves/github-pages-deploy-action@v4.5.0
with:
folder: target/doc
single-commit: true

View File

@@ -1,5 +1,5 @@
overrides: overrides:
- files: "*.md" - files: '*.md'
options: options:
printWidth: 9999 printWidth: 9999
proseWrap: never proseWrap: never

View File

@@ -15,11 +15,9 @@ members = [
] ]
[workspace.package] [workspace.package]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
rust-version = "1.72" rust-version = "1.68"
[profile.dev] [profile.dev]
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much. # Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.

View File

@@ -2,18 +2,6 @@
## Unreleased ## Unreleased
## 0.6.6
- Update `tokio-uring` dependency to `0.4`.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.6.5
- Fix handling of special characters in filenames.
## 0.6.4
- Fix handling of newlines in filenames.
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 0.6.3 ## 0.6.3

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "actix-files" name = "actix-files"
version = "0.6.6" version = "0.6.3"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
@@ -13,14 +13,9 @@ categories = ["asynchronous", "web-programming::http-server"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
[package.metadata.cargo_check_external_types] [lib]
allowed_external_types = [ name = "actix_files"
"actix_http::*", path = "src/lib.rs"
"actix_service::*",
"actix_web::*",
"http::*",
"mime::*",
]
[features] [features]
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"] experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
@@ -45,12 +40,12 @@ v_htmlescape = "0.15.5"
# experimental-io-uring # experimental-io-uring
[target.'cfg(target_os = "linux")'.dependencies] [target.'cfg(target_os = "linux")'.dependencies]
tokio-uring = { version = "0.5", optional = true, features = ["bytes"] } tokio-uring = { version = "0.4", optional = true, features = ["bytes"] }
actix-server = { version = "2.4", optional = true } # ensure matching tokio-uring versions actix-server = { version = "2.2", optional = true } # ensure matching tokio-uring versions
[dev-dependencies] [dev-dependencies]
actix-rt = "2.7" actix-rt = "2.7"
actix-test = "0.1" actix-test = "0.1"
actix-web = "4" actix-web = "4"
env_logger = "0.11" env_logger = "0.10"
tempfile = "3.2" tempfile = "3.2"

View File

@@ -1,32 +1,18 @@
# `actix-files` # actix-files
<!-- prettier-ignore-start --> > Static file serving for Actix Web
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files) [![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.6)](https://docs.rs/actix-files/0.6.6) [![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.3)](https://docs.rs/actix-files/0.6.3)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-files.svg) ![License](https://img.shields.io/crates/l/actix-files.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-files/0.6.6/status.svg)](https://deps.rs/crate/actix-files/0.6.6) [![dependency status](https://deps.rs/crate/actix-files/0.6.3/status.svg)](https://deps.rs/crate/actix-files/0.6.3)
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files) [![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> ## Documentation & Resources
<!-- cargo-rdme start --> - [API Documentation](https://docs.rs/actix-files)
- [Example Project](https://github.com/actix/examples/tree/master/basics/static-files)
Static file serving for Actix Web. - Minimum Supported Rust Version (MSRV): 1.68
Provides a non-blocking service for serving static files from disk.
## Examples
```rust
use actix_web::App;
use actix_files::Files;
let app = App::new()
.service(Files::new("/static", ".").prefer_utf8(true));
```
<!-- cargo-rdme end -->

View File

@@ -75,7 +75,7 @@ mod tests {
dev::ServiceFactory, dev::ServiceFactory,
guard, guard,
http::{ http::{
header::{self, ContentDisposition, DispositionParam}, header::{self, ContentDisposition, DispositionParam, DispositionType},
Method, StatusCode, Method, StatusCode,
}, },
middleware::Compress, middleware::Compress,
@@ -307,11 +307,11 @@ mod tests {
let resp = file.respond_to(&req); let resp = file.respond_to(&req);
assert_eq!( assert_eq!(
resp.headers().get(header::CONTENT_TYPE).unwrap(), resp.headers().get(header::CONTENT_TYPE).unwrap(),
"text/javascript", "application/javascript; charset=utf-8"
); );
assert_eq!( assert_eq!(
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(), resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
"inline; filename=\"test.js\"", "inline; filename=\"test.js\""
); );
} }
@@ -568,30 +568,6 @@ mod tests {
assert_eq!(bytes, data); assert_eq!(bytes, data);
} }
#[cfg(not(target_os = "windows"))]
#[actix_rt::test]
async fn test_static_files_with_special_characters() {
// Create the file we want to test against ad-hoc. We can't check it in as otherwise
// Windows can't even checkout this repository.
let temp_dir = tempfile::tempdir().unwrap();
let file_with_newlines = temp_dir.path().join("test\n\x0B\x0C\rnewline.text");
fs::write(&file_with_newlines, "Look at my newlines").unwrap();
let srv = test::init_service(
App::new().service(Files::new("/", temp_dir.path()).index_file("Cargo.toml")),
)
.await;
let request = TestRequest::get()
.uri("/test%0A%0B%0C%0Dnewline.text")
.to_request();
let response = test::call_service(&srv, request).await;
assert_eq!(response.status(), StatusCode::OK);
let bytes = test::read_body(response).await;
let data = web::Bytes::from(fs::read(file_with_newlines).unwrap());
assert_eq!(bytes, data);
}
#[actix_rt::test] #[actix_rt::test]
async fn test_files_not_allowed() { async fn test_files_not_allowed() {
let srv = test::init_service(App::new().service(Files::new("/", "."))).await; let srv = test::init_service(App::new().service(Files::new("/", "."))).await;
@@ -864,9 +840,9 @@ mod tests {
#[actix_rt::test] #[actix_rt::test]
async fn test_percent_encoding_2() { async fn test_percent_encoding_2() {
let temp_dir = tempfile::tempdir().unwrap(); let tmpdir = tempfile::tempdir().unwrap();
let filename = match cfg!(unix) { let filename = match cfg!(unix) {
true => "ض:?#[]{}<>()@!$&'`|*+,;= %20\n.test", true => "ض:?#[]{}<>()@!$&'`|*+,;= %20.test",
false => "ض#[]{}()@!$&'`+,;= %20.test", false => "ض#[]{}()@!$&'`+,;= %20.test",
}; };
let filename_encoded = filename let filename_encoded = filename
@@ -876,9 +852,9 @@ mod tests {
write!(&mut buf, "%{:02X}", c).unwrap(); write!(&mut buf, "%{:02X}", c).unwrap();
buf buf
}); });
std::fs::File::create(temp_dir.path().join(filename)).unwrap(); std::fs::File::create(tmpdir.path().join(filename)).unwrap();
let srv = test::init_service(App::new().service(Files::new("/", temp_dir.path()))).await; let srv = test::init_service(App::new().service(Files::new("", tmpdir.path()))).await;
let req = TestRequest::get() let req = TestRequest::get()
.uri(&format!("/{}", filename_encoded)) .uri(&format!("/{}", filename_encoded))

View File

@@ -24,6 +24,7 @@ use bitflags::bitflags;
use derive_more::{Deref, DerefMut}; use derive_more::{Deref, DerefMut};
use futures_core::future::LocalBoxFuture; use futures_core::future::LocalBoxFuture;
use mime::Mime; use mime::Mime;
use mime_guess::from_path;
use crate::{encoding::equiv_utf8_text, range::HttpRange}; use crate::{encoding::equiv_utf8_text, range::HttpRange};
@@ -127,7 +128,7 @@ impl NamedFile {
} }
}; };
let ct = mime_guess::from_path(&path).first_or_octet_stream(); let ct = from_path(&path).first_or_octet_stream();
let disposition = match ct.type_() { let disposition = match ct.type_() {
mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline, mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline,
@@ -139,13 +140,7 @@ impl NamedFile {
_ => DispositionType::Attachment, _ => DispositionType::Attachment,
}; };
// replace special characters in filenames which could occur on some filesystems let mut parameters = vec![DispositionParam::Filename(String::from(filename.as_ref()))];
let filename_s = filename
.replace('\n', "%0A") // \n line break
.replace('\x0B', "%0B") // \v vertical tab
.replace('\x0C', "%0C") // \f form feed
.replace('\r', "%0D"); // \r carriage return
let mut parameters = vec![DispositionParam::Filename(filename_s)];
if !filename.is_ascii() { if !filename.is_ascii() {
parameters.push(DispositionParam::FilenameExt(ExtendedValue { parameters.push(DispositionParam::FilenameExt(ExtendedValue {

View File

@@ -3,7 +3,6 @@ use std::{
str::FromStr, str::FromStr,
}; };
use actix_utils::future::{ready, Ready};
use actix_web::{dev::Payload, FromRequest, HttpRequest}; use actix_web::{dev::Payload, FromRequest, HttpRequest};
use crate::error::UriSegmentError; use crate::error::UriSegmentError;
@@ -88,10 +87,10 @@ impl AsRef<Path> for PathBufWrap {
impl FromRequest for PathBufWrap { impl FromRequest for PathBufWrap {
type Error = UriSegmentError; type Error = UriSegmentError;
type Future = Ready<Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { #[inline]
ready(req.match_info().unprocessed().parse()) async fn from_request(req: &HttpRequest, _: &mut Payload) -> Result<Self, Self::Error> {
req.match_info().unprocessed().parse()
} }
} }

View File

@@ -2,10 +2,6 @@
## Unreleased ## Unreleased
- Minimum supported Rust version (MSRV) is now 1.72.
## 3.2.0
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 3.1.0 ## 3.1.0

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "actix-http-test" name = "actix-http-test"
version = "3.2.0" version = "3.1.0"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Various helpers for Actix applications to use during testing" description = "Various helpers for Actix applications to use during testing"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
@@ -18,17 +18,9 @@ edition = "2021"
[package.metadata.docs.rs] [package.metadata.docs.rs]
features = [] features = []
[package.metadata.cargo_check_external_types] [lib]
allowed_external_types = [ name = "actix_http_test"
"actix_codec::*", path = "src/lib.rs"
"actix_http::*",
"actix_server::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"tokio::*",
]
[features] [features]
default = [] default = []

View File

@@ -1,20 +1,17 @@
# `actix-http-test` # actix-http-test
<!-- prettier-ignore-start --> > Various helpers for Actix applications to use during testing.
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test) [![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.2.0)](https://docs.rs/actix-http-test/3.2.0) [![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.1.0)](https://docs.rs/actix-http-test/3.1.0)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
<br> <br>
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.2.0/status.svg)](https://deps.rs/crate/actix-http-test/3.2.0) [![Dependency Status](https://deps.rs/crate/actix-http-test/3.1.0/status.svg)](https://deps.rs/crate/actix-http-test/3.1.0)
[![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test) [![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> ## Documentation & Resources
<!-- cargo-rdme start --> - [API Documentation](https://docs.rs/actix-http-test)
- Minimum Supported Rust Version (MSRV): 1.68
Various helpers for Actix applications to use during testing.
<!-- cargo-rdme end -->

View File

@@ -2,51 +2,14 @@
## Unreleased ## Unreleased
## 3.8.0
### Added
- Add `error::InvalidStatusCode` re-export.
## 3.7.0
### Added
- Add `rustls-0_23` crate feature
- Add `{h1::H1Service, h2::H2Service, HttpService}::rustls_0_23()` and `HttpService::rustls_0_23_with_config()` service constructors.
### Changed
- Update `brotli` dependency to `6`.
- Minimum supported Rust version (MSRV) is now 1.72.
## 3.6.0
### Added
- Add `rustls-0_22` crate feature.
- Add `{h1::H1Service, h2::H2Service, HttpService}::rustls_0_22()` and `HttpService::rustls_0_22_with_config()` service constructors.
- Implement `From<&HeaderMap>` for `http::HeaderMap`.
## 3.5.1
### Fixed
- Prevent hang when returning zero-sized response bodies through compression layer.
## 3.5.0
### Added
- Implement `From<HeaderMap>` for `http::HeaderMap`.
### Changed ### Changed
- Updated `zstd` dependency to `0.13`. - Updated `zstd` dependency to `0.13`.
- Implemented `From<HeaderMap>` for `http::HeaderMap`.
### Fixed ### Fixed
- Prevent compression of zero-sized response bodies. - Do not encode zero-sized response bodies
## 3.4.0 ## 3.4.0

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "actix-http" name = "actix-http"
version = "3.8.0" version = "3.4.0"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
@@ -20,86 +20,48 @@ edition.workspace = true
rust-version.workspace = true rust-version.workspace = true
[package.metadata.docs.rs] [package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"] # features that docs.rs will build with
features = [ features = ["http2", "ws", "openssl", "rustls-0_20", "rustls-0_21", "compress-brotli", "compress-gzip", "compress-zstd"]
"http2",
"ws",
"openssl",
"rustls-0_20",
"rustls-0_21",
"rustls-0_22",
"rustls-0_23",
"compress-brotli",
"compress-gzip",
"compress-zstd",
]
[package.metadata.cargo_check_external_types] [lib]
allowed_external_types = [ name = "actix_http"
"actix_codec::*", path = "src/lib.rs"
"actix_service::*",
"actix_tls::*",
"actix_utils::*",
"bytes::*",
"bytestring::*",
"encoding_rs::*",
"futures_core::*",
"h2::*",
"http::*",
"httparse::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"tokio_util::*",
"tokio::*",
]
[features] [features]
default = [] default = []
# HTTP/2 protocol support # HTTP/2 protocol support
http2 = ["dep:h2"] http2 = ["h2"]
# WebSocket protocol implementation # WebSocket protocol implementation
ws = [ ws = [
"dep:local-channel", "local-channel",
"dep:base64", "base64",
"dep:rand", "rand",
"dep:sha1", "sha1",
] ]
# TLS via OpenSSL # TLS via OpenSSL
openssl = ["__tls", "actix-tls/accept", "actix-tls/openssl"] openssl = ["actix-tls/accept", "actix-tls/openssl"]
# TLS via Rustls v0.20 # TLS via Rustls v0.20
rustls = ["__tls", "rustls-0_20"] rustls = ["rustls-0_20"]
# TLS via Rustls v0.20 # TLS via Rustls v0.20
rustls-0_20 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_20"] rustls-0_20 = ["actix-tls/accept", "actix-tls/rustls-0_20"]
# TLS via Rustls v0.21 # TLS via Rustls v0.21
rustls-0_21 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_21"] rustls-0_21 = ["actix-tls/accept", "actix-tls/rustls-0_21"]
# TLS via Rustls v0.22
rustls-0_22 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_22"]
# TLS via Rustls v0.23
rustls-0_23 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_23"]
# Compression codecs # Compression codecs
compress-brotli = ["__compress", "dep:brotli"] compress-brotli = ["__compress", "brotli"]
compress-gzip = ["__compress", "dep:flate2"] compress-gzip = ["__compress", "flate2"]
compress-zstd = ["__compress", "dep:zstd"] compress-zstd = ["__compress", "zstd"]
# Internal (PRIVATE!) features used to aid testing and checking feature status. # Internal (PRIVATE!) features used to aid testing and checking feature status.
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime. # Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
__compress = [] __compress = []
# Internal (PRIVATE!) features used to aid checking feature status.
# Don't rely on these whatsoever. They may disappear at anytime.
__tls = []
[dependencies] [dependencies]
actix-service = "2" actix-service = "2"
actix-codec = "0.5" actix-codec = "0.5"
@@ -127,59 +89,54 @@ tokio-util = { version = "0.7", features = ["io", "codec"] }
tracing = { version = "0.1.30", default-features = false, features = ["log"] } tracing = { version = "0.1.30", default-features = false, features = ["log"] }
# http2 # http2
h2 = { version = "0.3.26", optional = true } h2 = { version = "0.3.17", optional = true }
# websockets # websockets
local-channel = { version = "0.1", optional = true } local-channel = { version = "0.1", optional = true }
base64 = { version = "0.22", optional = true } base64 = { version = "0.21", optional = true }
rand = { version = "0.8", optional = true } rand = { version = "0.8", optional = true }
sha1 = { version = "0.10", optional = true } sha1 = { version = "0.10", optional = true }
# openssl/rustls # openssl/rustls
actix-tls = { version = "3.4", default-features = false, optional = true } actix-tls = { version = "3.1", default-features = false, optional = true }
# compress-* # compress-*
brotli = { version = "6", optional = true } brotli = { version = "3.3.3", optional = true }
flate2 = { version = "1.0.13", optional = true } flate2 = { version = "1.0.13", optional = true }
zstd = { version = "0.13", optional = true } zstd = { version = "0.13", optional = true }
[dev-dependencies] [dev-dependencies]
actix-http-test = { version = "3", features = ["openssl"] } actix-http-test = { version = "3", features = ["openssl"] }
actix-server = "2" actix-server = "2"
actix-tls = { version = "3.4", features = ["openssl", "rustls-0_23-webpki-roots"] } actix-tls = { version = "3.1", features = ["openssl"] }
actix-web = "4" actix-web = "4"
async-stream = "0.3" async-stream = "0.3"
criterion = { version = "0.5", features = ["html_reports"] } criterion = { version = "0.5", features = ["html_reports"] }
divan = "0.1.8" env_logger = "0.10"
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
memchr = "2.4" memchr = "2.4"
once_cell = "1.9" once_cell = "1.9"
rcgen = "0.13" rcgen = "0.11"
regex = "1.3" regex = "1.3"
rustversion = "1" rustversion = "1"
rustls-pemfile = "2" rustls-pemfile = "1"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
static_assertions = "1" static_assertions = "1"
tls-openssl = { package = "openssl", version = "0.10.55" } tls-openssl = { package = "openssl", version = "0.10.55" }
tls-rustls_023 = { package = "rustls", version = "0.23" } tls-rustls_021 = { package = "rustls", version = "0.21" }
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] } tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
[[example]] [[example]]
name = "ws" name = "ws"
required-features = ["ws", "rustls-0_23"] required-features = ["ws", "rustls-0_21"]
[[example]] [[example]]
name = "tls_rustls" name = "tls_rustls"
required-features = ["http2", "rustls-0_23"] required-features = ["http2", "rustls-0_21"]
[[bench]] [[bench]]
name = "response-body-compression" name = "response-body-compression"
harness = false harness = false
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"] required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]
[[bench]]
name = "date-formatting"
harness = false

View File

@@ -5,16 +5,21 @@
<!-- prettier-ignore-start --> <!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.8.0)](https://docs.rs/actix-http/3.8.0) [![Documentation](https://docs.rs/actix-http/badge.svg?version=3.4.0)](https://docs.rs/actix-http/3.4.0)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-http/3.8.0/status.svg)](https://deps.rs/crate/actix-http/3.8.0) [![dependency status](https://deps.rs/crate/actix-http/3.4.0/status.svg)](https://deps.rs/crate/actix-http/3.4.0)
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http) [![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> <!-- prettier-ignore-end -->
## Documentation & Resources
- [API Documentation](https://docs.rs/actix-http)
- Minimum Supported Rust Version (MSRV): 1.68
## Examples ## Examples
```rust ```rust

View File

@@ -1,20 +0,0 @@
use std::time::SystemTime;
use actix_http::header::HttpDate;
use divan::{black_box, AllocProfiler, Bencher};
#[global_allocator]
static ALLOC: AllocProfiler = AllocProfiler::system();
#[divan::bench]
fn date_formatting(b: Bencher<'_, '_>) {
let now = SystemTime::now();
b.bench(|| {
black_box(HttpDate::from(black_box(now)).to_string());
})
}
fn main() {
divan::main();
}

View File

@@ -8,7 +8,7 @@
use std::{convert::Infallible, io}; use std::{convert::Infallible, io};
use actix_http::{body::BodyStream, HttpService, Request, Response, StatusCode}; use actix_http::{HttpService, Request, Response, StatusCode};
use actix_server::Server; use actix_server::Server;
#[tokio::main(flavor = "current_thread")] #[tokio::main(flavor = "current_thread")]
@@ -19,12 +19,7 @@ async fn main() -> io::Result<()> {
.bind("h2c-detect", ("127.0.0.1", 8080), || { .bind("h2c-detect", ("127.0.0.1", 8080), || {
HttpService::build() HttpService::build()
.finish(|_req: Request| async move { .finish(|_req: Request| async move {
Ok::<_, Infallible>(Response::build(StatusCode::OK).body(BodyStream::new( Ok::<_, Infallible>(Response::build(StatusCode::OK).body("Hello!"))
futures_util::stream::iter([
Ok::<_, String>("123".into()),
Err("wertyuikmnbvcxdfty6t".to_owned()),
]),
)))
}) })
.tcp_auto_h2c() .tcp_auto_h2c()
})? })?

View File

@@ -12,7 +12,7 @@
//! Protocol: HTTP/1.1 //! Protocol: HTTP/1.1
//! ``` //! ```
extern crate tls_rustls_023 as rustls; extern crate tls_rustls_021 as rustls;
use std::io; use std::io;
@@ -36,34 +36,31 @@ async fn main() -> io::Result<()> {
); );
ok::<_, Error>(Response::ok().set_body(body)) ok::<_, Error>(Response::ok().set_body(body))
}) })
.rustls_0_23(rustls_config()) .rustls_021(rustls_config())
})? })?
.run() .run()
.await .await
} }
fn rustls_config() -> rustls::ServerConfig { fn rustls_config() -> rustls::ServerConfig {
let rcgen::CertifiedKey { cert, key_pair } = let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap(); let cert_file = cert.serialize_pem().unwrap();
let cert_file = cert.pem(); let key_file = cert.serialize_private_key_pem();
let key_file = key_pair.serialize_pem();
let cert_file = &mut io::BufReader::new(cert_file.as_bytes()); let cert_file = &mut io::BufReader::new(cert_file.as_bytes());
let key_file = &mut io::BufReader::new(key_file.as_bytes()); let key_file = &mut io::BufReader::new(key_file.as_bytes());
let cert_chain = rustls_pemfile::certs(cert_file) let cert_chain = rustls_pemfile::certs(cert_file)
.collect::<Result<Vec<_>, _>>() .unwrap()
.unwrap(); .into_iter()
let mut keys = rustls_pemfile::pkcs8_private_keys(key_file) .map(rustls::Certificate)
.collect::<Result<Vec<_>, _>>() .collect();
.unwrap(); let mut keys = rustls_pemfile::pkcs8_private_keys(key_file).unwrap();
let mut config = rustls::ServerConfig::builder() let mut config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert( .with_single_cert(cert_chain, rustls::PrivateKey(keys.remove(0)))
cert_chain,
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
)
.unwrap(); .unwrap();
const H1_ALPN: &[u8] = b"http/1.1"; const H1_ALPN: &[u8] = b"http/1.1";

View File

@@ -1,7 +1,7 @@
//! Sets up a WebSocket server over TCP and TLS. //! Sets up a WebSocket server over TCP and TLS.
//! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames. //! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames.
extern crate tls_rustls_023 as rustls; extern crate tls_rustls_021 as rustls;
use std::{ use std::{
io, io,
@@ -30,7 +30,7 @@ async fn main() -> io::Result<()> {
.bind("tls", ("127.0.0.1", 8443), || { .bind("tls", ("127.0.0.1", 8443), || {
HttpService::build() HttpService::build()
.finish(handler) .finish(handler)
.rustls_0_23(tls_config()) .rustls_021(tls_config())
})? })?
.run() .run()
.await .await
@@ -85,27 +85,27 @@ impl Stream for Heartbeat {
fn tls_config() -> rustls::ServerConfig { fn tls_config() -> rustls::ServerConfig {
use std::io::BufReader; use std::io::BufReader;
use rustls::{Certificate, PrivateKey};
use rustls_pemfile::{certs, pkcs8_private_keys}; use rustls_pemfile::{certs, pkcs8_private_keys};
let rcgen::CertifiedKey { cert, key_pair } = let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap(); let cert_file = cert.serialize_pem().unwrap();
let cert_file = cert.pem(); let key_file = cert.serialize_private_key_pem();
let key_file = key_pair.serialize_pem();
let cert_file = &mut BufReader::new(cert_file.as_bytes()); let cert_file = &mut BufReader::new(cert_file.as_bytes());
let key_file = &mut BufReader::new(key_file.as_bytes()); let key_file = &mut BufReader::new(key_file.as_bytes());
let cert_chain = certs(cert_file).collect::<Result<Vec<_>, _>>().unwrap(); let cert_chain = certs(cert_file)
let mut keys = pkcs8_private_keys(key_file) .unwrap()
.collect::<Result<Vec<_>, _>>() .into_iter()
.unwrap(); .map(Certificate)
.collect();
let mut keys = pkcs8_private_keys(key_file).unwrap();
let mut config = rustls::ServerConfig::builder() let mut config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert( .with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
cert_chain,
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
)
.unwrap(); .unwrap();
config.alpn_protocols.push(b"http/1.1".to_vec()); config.alpn_protocols.push(b"http/1.1".to_vec());

View File

@@ -531,6 +531,7 @@ where
mod tests { mod tests {
use actix_rt::pin; use actix_rt::pin;
use actix_utils::future::poll_fn; use actix_utils::future::poll_fn;
use bytes::{Bytes, BytesMut};
use futures_util::stream; use futures_util::stream;
use super::*; use super::*;

View File

@@ -28,7 +28,7 @@ impl Date {
fn update(&mut self) { fn update(&mut self) {
self.pos = 0; self.pos = 0;
write!(self, "{}", httpdate::HttpDate::from(SystemTime::now())).unwrap(); write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
} }
} }

View File

@@ -50,21 +50,10 @@ impl<B: MessageBody> Encoder<B> {
} }
} }
fn empty() -> Self {
Encoder {
body: EncoderBody::Full { body: Bytes::new() },
encoder: None,
fut: None,
eof: true,
}
}
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self { pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
// no need to compress empty bodies // no need to compress an empty body
match body.size() { if matches!(body.size(), BodySize::None | BodySize::Sized(0)) {
BodySize::None => return Self::none(), return Self::none();
BodySize::Sized(0) => return Self::empty(),
_ => {}
} }
let should_encode = !(head.headers().contains_key(&CONTENT_ENCODING) let should_encode = !(head.headers().contains_key(&CONTENT_ENCODING)

View File

@@ -3,7 +3,7 @@
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error}; use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
use derive_more::{Display, Error, From}; use derive_more::{Display, Error, From};
pub use http::{status::InvalidStatusCode, Error as HttpError}; pub use http::Error as HttpError;
use http::{uri::InvalidUri, StatusCode}; use http::{uri::InvalidUri, StatusCode};
use crate::{body::BoxBody, Response}; use crate::{body::BoxBody, Response};
@@ -399,7 +399,9 @@ pub enum ContentTypeError {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use http::Error as HttpError; use std::io;
use http::{Error as HttpError, StatusCode};
use super::*; use super::*;

View File

@@ -198,6 +198,9 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use bytes::BytesMut;
use http::Method;
use super::*; use super::*;
use crate::HttpMessage as _; use crate::HttpMessage as _;

View File

@@ -563,8 +563,15 @@ impl Decoder for PayloadDecoder {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use bytes::{Bytes, BytesMut};
use http::{Method, Version};
use super::*; use super::*;
use crate::{header::SET_COOKIE, HttpMessage as _}; use crate::{
error::ParseError,
header::{HeaderName, SET_COOKIE},
HttpMessage as _,
};
impl PayloadType { impl PayloadType {
pub(crate) fn unwrap(self) -> PayloadDecoder { pub(crate) fn unwrap(self) -> PayloadDecoder {

View File

@@ -512,10 +512,8 @@ where
} }
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => {
let err = err.into();
tracing::error!("Response payload stream error: {err:?}");
this.flags.insert(Flags::FINISHED); this.flags.insert(Flags::FINISHED);
return Err(DispatchError::Body(err)); return Err(DispatchError::Body(err.into()));
} }
Poll::Pending => return Ok(PollResponse::DoNothing), Poll::Pending => return Ok(PollResponse::DoNothing),
@@ -551,7 +549,6 @@ where
} }
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => {
tracing::error!("Response payload stream error: {err:?}");
this.flags.insert(Flags::FINISHED); this.flags.insert(Flags::FINISHED);
return Err(DispatchError::Body( return Err(DispatchError::Body(
Error::new_body().with_cause(err).into(), Error::new_body().with_cause(err).into(),
@@ -706,7 +703,7 @@ where
req.head_mut().peer_addr = *this.peer_addr; req.head_mut().peer_addr = *this.peer_addr;
req.conn_data.clone_from(this.conn_data); req.conn_data = this.conn_data.as_ref().map(Rc::clone);
match this.codec.message_type() { match this.codec.message_type() {
// request has no payload // request has no payload

View File

@@ -153,7 +153,7 @@ mod openssl {
} }
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
mod rustls_0_20 { mod rustls_020 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@@ -214,7 +214,7 @@ mod rustls_0_20 {
} }
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
mod rustls_0_21 { mod rustls_021 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@@ -274,128 +274,6 @@ mod rustls_0_21 {
} }
} }
#[cfg(feature = "rustls-0_22")]
mod rustls_0_22 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>>,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>>,
B: MessageBody,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.22 based service.
pub fn rustls_0_22(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
#[cfg(feature = "rustls-0_23")]
mod rustls_0_23 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>>,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>>,
B: MessageBody,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.23 based service.
pub fn rustls_0_23(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
impl<T, S, B, X, U> H1Service<T, S, B, X, U> impl<T, S, B, X, U> H1Service<T, S, B, X, U>
where where
S: ServiceFactory<Request, Config = ()>, S: ServiceFactory<Request, Config = ()>,

View File

@@ -4,7 +4,7 @@ use std::{
future::Future, future::Future,
marker::PhantomData, marker::PhantomData,
net, net,
pin::{pin, Pin}, pin::Pin,
rc::Rc, rc::Rc,
task::{Context, Poll}, task::{Context, Poll},
}; };
@@ -20,6 +20,7 @@ use h2::{
Ping, PingPong, Ping, PingPong,
}; };
use pin_project_lite::pin_project; use pin_project_lite::pin_project;
use tracing::{error, trace, warn};
use crate::{ use crate::{
body::{BodySize, BoxBody, MessageBody}, body::{BodySize, BoxBody, MessageBody},
@@ -126,7 +127,7 @@ where
head.headers = parts.headers.into(); head.headers = parts.headers.into();
head.peer_addr = this.peer_addr; head.peer_addr = this.peer_addr;
req.conn_data.clone_from(&this.conn_data); req.conn_data = this.conn_data.as_ref().map(Rc::clone);
let fut = this.flow.service.call(req); let fut = this.flow.service.call(req);
let config = this.config.clone(); let config = this.config.clone();
@@ -146,13 +147,11 @@ where
if let Err(err) = res { if let Err(err) = res {
match err { match err {
DispatchError::SendResponse(err) => { DispatchError::SendResponse(err) => {
tracing::trace!("Error sending response: {err:?}"); trace!("Error sending HTTP/2 response: {:?}", err)
}
DispatchError::SendData(err) => {
tracing::warn!("Send data error: {err:?}");
} }
DispatchError::SendData(err) => warn!("{:?}", err),
DispatchError::ResponseBody(err) => { DispatchError::ResponseBody(err) => {
tracing::error!("Response payload stream error: {err:?}"); error!("Response payload stream error: {:?}", err)
} }
} }
} }
@@ -229,9 +228,9 @@ where
return Ok(()); return Ok(());
} }
let mut body = pin!(body);
// poll response body and send chunks to client // poll response body and send chunks to client
actix_rt::pin!(body);
while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await { while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await {
let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?; let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?;

View File

@@ -141,7 +141,7 @@ mod openssl {
} }
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
mod rustls_0_20 { mod rustls_020 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@@ -192,7 +192,7 @@ mod rustls_0_20 {
} }
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
mod rustls_0_21 { mod rustls_021 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@@ -242,108 +242,6 @@ mod rustls_0_21 {
} }
} }
#[cfg(feature = "rustls-0_22")]
mod rustls_0_22 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
{
/// Create Rustls v0.22 based service.
pub fn rustls_0_22(
self,
mut config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = S::InitError,
> {
let mut protos = vec![b"h2".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
#[cfg(feature = "rustls-0_23")]
mod rustls_0_23 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
{
/// Create Rustls v0.23 based service.
pub fn rustls_0_23(
self,
mut config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = S::InitError,
> {
let mut protos = vec![b"h2".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
Acceptor::new(config)
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.get_ref().0.peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
}
impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B> impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B>
where where
T: AsyncRead + AsyncWrite + Unpin + 'static, T: AsyncRead + AsyncWrite + Unpin + 'static,

View File

@@ -650,13 +650,6 @@ impl From<HeaderMap> for http::HeaderMap {
} }
} }
/// Convert our `&HeaderMap` to a `http::HeaderMap`.
impl From<&HeaderMap> for http::HeaderMap {
fn from(map: &HeaderMap) -> Self {
map.to_owned().into()
}
}
/// Iterator over removed, owned values with the same associated name. /// Iterator over removed, owned values with the same associated name.
/// ///
/// Returned from methods that remove or replace items. See [`HeaderMap::insert`] /// Returned from methods that remove or replace items. See [`HeaderMap::insert`]

View File

@@ -24,7 +24,8 @@ impl FromStr for HttpDate {
impl fmt::Display for HttpDate { impl fmt::Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
httpdate::HttpDate::from(self.0).fmt(f) let date_str = httpdate::fmt_http_date(self.0);
f.write_str(&date_str)
} }
} }
@@ -36,7 +37,7 @@ impl TryIntoHeaderValue for HttpDate {
let mut wrt = MutWriter(&mut buf); let mut wrt = MutWriter(&mut buf);
// unwrap: date output is known to be well formed and of known length // unwrap: date output is known to be well formed and of known length
write!(wrt, "{}", self).unwrap(); write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
HeaderValue::from_maybe_shared(buf.split().freeze()) HeaderValue::from_maybe_shared(buf.split().freeze())
} }

View File

@@ -80,18 +80,18 @@ mod tests {
#[test] #[test]
fn comma_delimited_parsing() { fn comma_delimited_parsing() {
let headers = []; let headers = vec![];
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap(); let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
assert_eq!(res, vec![0; 0]); assert_eq!(res, vec![0; 0]);
let headers = [ let headers = vec![
HeaderValue::from_static("1, 2"), HeaderValue::from_static("1, 2"),
HeaderValue::from_static("3,4"), HeaderValue::from_static("3,4"),
]; ];
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap(); let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
assert_eq!(res, vec![1, 2, 3, 4]); assert_eq!(res, vec![1, 2, 3, 4]);
let headers = [ let headers = vec![
HeaderValue::from_static(""), HeaderValue::from_static(""),
HeaderValue::from_static(","), HeaderValue::from_static(","),
HeaderValue::from_static(" "), HeaderValue::from_static(" "),

View File

@@ -6,10 +6,7 @@
//! | ------------------- | ------------------------------------------- | //! | ------------------- | ------------------------------------------- |
//! | `http2` | HTTP/2 support via [h2]. | //! | `http2` | HTTP/2 support via [h2]. |
//! | `openssl` | TLS support via [OpenSSL]. | //! | `openssl` | TLS support via [OpenSSL]. |
//! | `rustls-0_20` | TLS support via rustls 0.20. | //! | `rustls` | TLS support via [rustls]. |
//! | `rustls-0_21` | TLS support via rustls 0.21. |
//! | `rustls-0_22` | TLS support via rustls 0.22. |
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
//! | `compress-brotli` | Payload compression support: Brotli. | //! | `compress-brotli` | Payload compression support: Brotli. |
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. | //! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
//! | `compress-zstd` | Payload compression support: Zstd. | //! | `compress-zstd` | Payload compression support: Zstd. |
@@ -31,7 +28,7 @@
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
pub use http::{uri, uri::Uri, Method, StatusCode, Version}; pub use ::http::{uri, uri::Uri, Method, StatusCode, Version};
pub mod body; pub mod body;
mod builder; mod builder;
@@ -61,7 +58,7 @@ pub mod ws;
#[allow(deprecated)] #[allow(deprecated)]
pub use self::payload::PayloadStream; pub use self::payload::PayloadStream;
#[cfg(feature = "__tls")] #[cfg(any(feature = "openssl", feature = "rustls-0_20", feature = "rustls-0_21"))]
pub use self::service::TlsAcceptorConfig; pub use self::service::TlsAcceptorConfig;
pub use self::{ pub use self::{
builder::HttpServiceBuilder, builder::HttpServiceBuilder,

View File

@@ -5,7 +5,7 @@
use std::cell::RefCell; use std::cell::RefCell;
thread_local! { thread_local! {
static NOTIFY_DROPPED: RefCell<Option<bool>> = const { RefCell::new(None) }; static NOTIFY_DROPPED: RefCell<Option<bool>> = RefCell::new(None);
} }
/// Check if the spawned task is dropped. /// Check if the spawned task is dropped.

View File

@@ -16,10 +16,7 @@ pub struct RequestHead {
pub uri: Uri, pub uri: Uri,
pub version: Version, pub version: Version,
pub headers: HeaderMap, pub headers: HeaderMap,
/// Will only be None when called in unit tests unless set manually.
pub peer_addr: Option<net::SocketAddr>, pub peer_addr: Option<net::SocketAddr>,
flags: Flags, flags: Flags,
} }

View File

@@ -173,7 +173,7 @@ impl<P> Request<P> {
/// Peer address is the directly connected peer's socket address. If a proxy is used in front of /// Peer address is the directly connected peer's socket address. If a proxy is used in front of
/// the Actix Web server, then it would be address of this proxy. /// the Actix Web server, then it would be address of this proxy.
/// ///
/// Will only return None when called in unit tests unless set manually. /// Will only return None when called in unit tests.
#[inline] #[inline]
pub fn peer_addr(&self) -> Option<net::SocketAddr> { pub fn peer_addr(&self) -> Option<net::SocketAddr> {
self.head().peer_addr self.head().peer_addr

View File

@@ -351,9 +351,12 @@ mod tests {
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain"); assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain");
let resp = Response::build(StatusCode::OK) let resp = Response::build(StatusCode::OK)
.content_type(mime::TEXT_JAVASCRIPT) .content_type(mime::APPLICATION_JAVASCRIPT_UTF_8)
.body(Bytes::new()); .body(Bytes::new());
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/javascript"); assert_eq!(
resp.headers().get(CONTENT_TYPE).unwrap(),
"application/javascript; charset=utf-8"
);
} }
#[test] #[test]

View File

@@ -241,13 +241,13 @@ where
} }
/// Configuration options used when accepting TLS connection. /// Configuration options used when accepting TLS connection.
#[cfg(feature = "__tls")] #[cfg(any(feature = "openssl", feature = "rustls-0_20", feature = "rustls-0_21"))]
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct TlsAcceptorConfig { pub struct TlsAcceptorConfig {
pub(crate) handshake_timeout: Option<std::time::Duration>, pub(crate) handshake_timeout: Option<std::time::Duration>,
} }
#[cfg(feature = "__tls")] #[cfg(any(feature = "openssl", feature = "rustls-0_20", feature = "rustls-0_21"))]
impl TlsAcceptorConfig { impl TlsAcceptorConfig {
/// Set TLS handshake timeout duration. /// Set TLS handshake timeout duration.
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self { pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
@@ -353,12 +353,12 @@ mod openssl {
} }
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
mod rustls_0_20 { mod rustls_020 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{ use actix_tls::accept::{
rustls_0_20::{reexports::ServerConfig, Acceptor, TlsStream}, rustls::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError, TlsError,
}; };
@@ -389,7 +389,7 @@ mod rustls_0_20 {
U::Error: fmt::Display + Into<Response<BoxBody>>, U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create Rustls v0.20 based service. /// Create Rustls based service.
pub fn rustls( pub fn rustls(
self, self,
config: ServerConfig, config: ServerConfig,
@@ -403,7 +403,7 @@ mod rustls_0_20 {
self.rustls_with_config(config, TlsAcceptorConfig::default()) self.rustls_with_config(config, TlsAcceptorConfig::default())
} }
/// Create Rustls v0.20 based service with custom TLS acceptor configuration. /// Create Rustls based service with custom TLS acceptor configuration.
pub fn rustls_with_config( pub fn rustls_with_config(
self, self,
mut config: ServerConfig, mut config: ServerConfig,
@@ -449,7 +449,7 @@ mod rustls_0_20 {
} }
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
mod rustls_0_21 { mod rustls_021 {
use std::io; use std::io;
use actix_service::ServiceFactoryExt as _; use actix_service::ServiceFactoryExt as _;
@@ -485,7 +485,7 @@ mod rustls_0_21 {
U::Error: fmt::Display + Into<Response<BoxBody>>, U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug, U::InitError: fmt::Debug,
{ {
/// Create Rustls v0.21 based service. /// Create Rustls based service.
pub fn rustls_021( pub fn rustls_021(
self, self,
config: ServerConfig, config: ServerConfig,
@@ -499,7 +499,7 @@ mod rustls_0_21 {
self.rustls_021_with_config(config, TlsAcceptorConfig::default()) self.rustls_021_with_config(config, TlsAcceptorConfig::default())
} }
/// Create Rustls v0.21 based service with custom TLS acceptor configuration. /// Create Rustls based service with custom TLS acceptor configuration.
pub fn rustls_021_with_config( pub fn rustls_021_with_config(
self, self,
mut config: ServerConfig, mut config: ServerConfig,
@@ -544,198 +544,6 @@ mod rustls_0_21 {
} }
} }
#[cfg(feature = "rustls-0_22")]
mod rustls_0_22 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, h1::Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.22 based service.
pub fn rustls_0_22(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
self.rustls_0_22_with_config(config, TlsAcceptorConfig::default())
}
/// Create Rustls v0.22 based service with custom TLS acceptor configuration.
pub fn rustls_0_22_with_config(
self,
mut config: ServerConfig,
tls_acceptor_config: TlsAcceptorConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
let mut acceptor = Acceptor::new(config);
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
acceptor.set_handshake_timeout(handshake_timeout);
}
acceptor
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.and_then(|io: TlsStream<TcpStream>| async {
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
if protos.windows(2).any(|window| window == b"h2") {
Protocol::Http2
} else {
Protocol::Http1
}
} else {
Protocol::Http1
};
let peer_addr = io.get_ref().0.peer_addr().ok();
Ok((io, proto, peer_addr))
})
.and_then(self.map_err(TlsError::Service))
}
}
}
#[cfg(feature = "rustls-0_23")]
mod rustls_0_23 {
use std::io;
use actix_service::ServiceFactoryExt as _;
use actix_tls::accept::{
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
TlsError,
};
use super::*;
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(Request, Framed<TlsStream<TcpStream>, h1::Codec>),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Create Rustls v0.23 based service.
pub fn rustls_0_23(
self,
config: ServerConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
self.rustls_0_23_with_config(config, TlsAcceptorConfig::default())
}
/// Create Rustls v0.23 based service with custom TLS acceptor configuration.
pub fn rustls_0_23_with_config(
self,
mut config: ServerConfig,
tls_acceptor_config: TlsAcceptorConfig,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = TlsError<io::Error, DispatchError>,
InitError = (),
> {
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
protos.extend_from_slice(&config.alpn_protocols);
config.alpn_protocols = protos;
let mut acceptor = Acceptor::new(config);
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
acceptor.set_handshake_timeout(handshake_timeout);
}
acceptor
.map_init_err(|_| {
unreachable!("TLS acceptor service factory does not error on init")
})
.map_err(TlsError::into_service_error)
.and_then(|io: TlsStream<TcpStream>| async {
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
if protos.windows(2).any(|window| window == b"h2") {
Protocol::Http2
} else {
Protocol::Http1
}
} else {
Protocol::Http1
};
let peer_addr = io.get_ref().0.peer_addr().ok();
Ok((io, proto, peer_addr))
})
.and_then(self.map_err(TlsError::Service))
}
}
}
impl<T, S, B, X, U> ServiceFactory<(T, Protocol, Option<net::SocketAddr>)> impl<T, S, B, X, U> ServiceFactory<(T, Protocol, Option<net::SocketAddr>)>
for HttpService<T, S, B, X, U> for HttpService<T, S, B, X, U>
where where

View File

@@ -178,14 +178,14 @@ impl Parser {
}; };
if payload_len < 126 { if payload_len < 126 {
dst.reserve(p_len + 2); dst.reserve(p_len + 2 + if mask { 4 } else { 0 });
dst.put_slice(&[one, two | payload_len as u8]); dst.put_slice(&[one, two | payload_len as u8]);
} else if payload_len <= 65_535 { } else if payload_len <= 65_535 {
dst.reserve(p_len + 4); dst.reserve(p_len + 4 + if mask { 4 } else { 0 });
dst.put_slice(&[one, two | 126]); dst.put_slice(&[one, two | 126]);
dst.put_u16(payload_len as u16); dst.put_u16(payload_len as u16);
} else { } else {
dst.reserve(p_len + 10); dst.reserve(p_len + 10 + if mask { 4 } else { 0 });
dst.put_slice(&[one, two | 127]); dst.put_slice(&[one, two | 127]);
dst.put_u64(payload_len as u64); dst.put_u64(payload_len as u64);
}; };

View File

@@ -221,7 +221,7 @@ pub fn handshake_response(req: &RequestHead) -> ResponseBuilder {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::{header, test::TestRequest}; use crate::{header, test::TestRequest, Method};
#[test] #[test]
fn test_handshake() { fn test_handshake() {

View File

@@ -1,4 +1,7 @@
use std::fmt; use std::{
convert::{From, Into},
fmt,
};
use base64::prelude::*; use base64::prelude::*;
use tracing::error; use tracing::error;

View File

@@ -42,11 +42,9 @@ where
} }
fn tls_config() -> SslAcceptor { fn tls_config() -> SslAcceptor {
let rcgen::CertifiedKey { cert, key_pair } = let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap(); let cert_file = cert.serialize_pem().unwrap();
let cert_file = cert.pem(); let key_file = cert.serialize_private_key_pem();
let key_file = key_pair.serialize_pem();
let cert = X509::from_pem(cert_file.as_bytes()).unwrap(); let cert = X509::from_pem(cert_file.as_bytes()).unwrap();
let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap(); let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap();

View File

@@ -1,6 +1,6 @@
#![cfg(feature = "rustls-0_23")] #![cfg(feature = "rustls-0_21")]
extern crate tls_rustls_023 as rustls; extern crate tls_rustls_021 as rustls;
use std::{ use std::{
convert::Infallible, convert::Infallible,
@@ -20,13 +20,13 @@ use actix_http::{
use actix_http_test::test_server; use actix_http_test::test_server;
use actix_rt::pin; use actix_rt::pin;
use actix_service::{fn_factory_with_config, fn_service}; use actix_service::{fn_factory_with_config, fn_service};
use actix_tls::connect::rustls_0_23::webpki_roots_cert_store; use actix_tls::connect::rustls_0_21::webpki_roots_cert_store;
use actix_utils::future::{err, ok, poll_fn}; use actix_utils::future::{err, ok, poll_fn};
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use derive_more::{Display, Error}; use derive_more::{Display, Error};
use futures_core::{ready, Stream}; use futures_core::{ready, Stream};
use futures_util::stream::once; use futures_util::stream::once;
use rustls::{pki_types::ServerName, ServerConfig as RustlsServerConfig}; use rustls::{Certificate, PrivateKey, ServerConfig as RustlsServerConfig, ServerName};
use rustls_pemfile::{certs, pkcs8_private_keys}; use rustls_pemfile::{certs, pkcs8_private_keys};
async fn load_body<S>(stream: S) -> Result<BytesMut, PayloadError> async fn load_body<S>(stream: S) -> Result<BytesMut, PayloadError>
@@ -52,25 +52,24 @@ where
} }
fn tls_config() -> RustlsServerConfig { fn tls_config() -> RustlsServerConfig {
let rcgen::CertifiedKey { cert, key_pair } = let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap(); let cert_file = cert.serialize_pem().unwrap();
let cert_file = cert.pem(); let key_file = cert.serialize_private_key_pem();
let key_file = key_pair.serialize_pem();
let cert_file = &mut BufReader::new(cert_file.as_bytes()); let cert_file = &mut BufReader::new(cert_file.as_bytes());
let key_file = &mut BufReader::new(key_file.as_bytes()); let key_file = &mut BufReader::new(key_file.as_bytes());
let cert_chain = certs(cert_file).collect::<Result<Vec<_>, _>>().unwrap(); let cert_chain = certs(cert_file)
let mut keys = pkcs8_private_keys(key_file) .unwrap()
.collect::<Result<Vec<_>, _>>() .into_iter()
.unwrap(); .map(Certificate)
.collect();
let mut keys = pkcs8_private_keys(key_file).unwrap();
let mut config = RustlsServerConfig::builder() let mut config = RustlsServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert( .with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
cert_chain,
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
)
.unwrap(); .unwrap();
config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec());
@@ -84,6 +83,7 @@ pub fn get_negotiated_alpn_protocol(
client_alpn_protocol: &[u8], client_alpn_protocol: &[u8],
) -> Option<Vec<u8>> { ) -> Option<Vec<u8>> {
let mut config = rustls::ClientConfig::builder() let mut config = rustls::ClientConfig::builder()
.with_safe_defaults()
.with_root_certificates(webpki_roots_cert_store()) .with_root_certificates(webpki_roots_cert_store())
.with_no_client_auth(); .with_no_client_auth();
@@ -109,7 +109,7 @@ async fn h1() -> io::Result<()> {
let srv = test_server(move || { let srv = test_server(move || {
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Error>(Response::ok())) .h1(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -123,7 +123,7 @@ async fn h2() -> io::Result<()> {
let srv = test_server(move || { let srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok())) .h2(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -141,7 +141,7 @@ async fn h1_1() -> io::Result<()> {
assert_eq!(req.version(), Version::HTTP_11); assert_eq!(req.version(), Version::HTTP_11);
ok::<_, Error>(Response::ok()) ok::<_, Error>(Response::ok())
}) })
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -159,7 +159,7 @@ async fn h2_1() -> io::Result<()> {
assert_eq!(req.version(), Version::HTTP_2); assert_eq!(req.version(), Version::HTTP_2);
ok::<_, Error>(Response::ok()) ok::<_, Error>(Response::ok())
}) })
.rustls_0_23_with_config( .rustls_021_with_config(
tls_config(), tls_config(),
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)), TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
) )
@@ -180,7 +180,7 @@ async fn h2_body1() -> io::Result<()> {
let body = load_body(req.take_payload()).await?; let body = load_body(req.take_payload()).await?;
Ok::<_, Error>(Response::ok().set_body(body)) Ok::<_, Error>(Response::ok().set_body(body))
}) })
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -206,7 +206,7 @@ async fn h2_content_length() {
]; ];
ok::<_, Infallible>(Response::new(statuses[indx])) ok::<_, Infallible>(Response::new(statuses[indx]))
}) })
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -278,7 +278,7 @@ async fn h2_headers() {
} }
ok::<_, Infallible>(config.body(data.clone())) ok::<_, Infallible>(config.body(data.clone()))
}) })
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -317,7 +317,7 @@ async fn h2_body2() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -334,7 +334,7 @@ async fn h2_head_empty() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -360,7 +360,7 @@ async fn h2_head_binary() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -385,7 +385,7 @@ async fn h2_head_binary2() {
let srv = test_server(move || { let srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR))) .h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -411,7 +411,7 @@ async fn h2_body_length() {
Response::ok().set_body(SizedStream::new(STR.len() as u64, body)), Response::ok().set_body(SizedStream::new(STR.len() as u64, body)),
) )
}) })
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -435,7 +435,7 @@ async fn h2_body_chunked_explicit() {
.body(BodyStream::new(body)), .body(BodyStream::new(body)),
) )
}) })
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -464,7 +464,7 @@ async fn h2_response_http_error_handling() {
) )
})) }))
})) }))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -494,7 +494,7 @@ async fn h2_service_error() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h2(|_| err::<Response<BoxBody>, _>(BadRequest)) .h2(|_| err::<Response<BoxBody>, _>(BadRequest))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -511,7 +511,7 @@ async fn h1_service_error() {
let mut srv = test_server(move || { let mut srv = test_server(move || {
HttpService::build() HttpService::build()
.h1(|_| err::<Response<BoxBody>, _>(BadRequest)) .h1(|_| err::<Response<BoxBody>, _>(BadRequest))
.rustls_0_23(tls_config()) .rustls_021(tls_config())
}) })
.await; .await;
@@ -534,7 +534,7 @@ async fn alpn_h1() -> io::Result<()> {
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build() HttpService::build()
.h1(|_| ok::<_, Error>(Response::ok())) .h1(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(config) .rustls_021(config)
}) })
.await; .await;
@@ -556,7 +556,7 @@ async fn alpn_h2() -> io::Result<()> {
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build() HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok())) .h2(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(config) .rustls_021(config)
}) })
.await; .await;
@@ -582,7 +582,7 @@ async fn alpn_h2_1() -> io::Result<()> {
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec()); config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build() HttpService::build()
.finish(|_| ok::<_, Error>(Response::ok())) .finish(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(config) .rustls_021(config)
}) })
.await; .await;

View File

@@ -2,10 +2,6 @@
## Unreleased ## Unreleased
## 0.7.0
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.6.1 ## 0.6.1
- Update `syn` dependency to `2`. - Update `syn` dependency to `2`.

View File

@@ -1,14 +1,13 @@
[package] [package]
name = "actix-multipart-derive" name = "actix-multipart-derive"
version = "0.7.0" version = "0.6.1"
authors = ["Jacob Halsey <jacob@jhalsey.com>"] authors = ["Jacob Halsey <jacob@jhalsey.com>"]
description = "Multipart form derive macro for Actix Web" description = "Multipart form derive macro for Actix Web"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
homepage.workspace = true homepage = "https://actix.rs"
repository.workspace = true repository = "https://github.com/actix/actix-web"
license.workspace = true license = "MIT OR Apache-2.0"
edition.workspace = true edition = "2021"
rust-version.workspace = true
[package.metadata.docs.rs] [package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
@@ -25,7 +24,7 @@ quote = "1"
syn = "2" syn = "2"
[dev-dependencies] [dev-dependencies]
actix-multipart = "0.7" actix-multipart = "0.6"
actix-web = "4" actix-web = "4"
rustversion = "1" rustversion = "1"
trybuild = "1" trybuild = "1"

View File

@@ -1,16 +1,17 @@
# `actix-multipart-derive` # actix-multipart-derive
> The derive macro implementation for actix-multipart-derive. > The derive macro implementation for actix-multipart-derive.
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-multipart-derive?label=latest)](https://crates.io/crates/actix-multipart-derive) [![crates.io](https://img.shields.io/crates/v/actix-multipart-derive?label=latest)](https://crates.io/crates/actix-multipart-derive)
[![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.7.0)](https://docs.rs/actix-multipart-derive/0.7.0) [![Documentation](https://docs.rs/actix-multipart-derive/badge.svg?version=0.6.1)](https://docs.rs/actix-multipart-derive/0.6.1)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart-derive.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart-derive.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-multipart-derive/0.7.0/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.7.0) [![dependency status](https://deps.rs/crate/actix-multipart-derive/0.6.1/status.svg)](https://deps.rs/crate/actix-multipart-derive/0.6.1)
[![Download](https://img.shields.io/crates/d/actix-multipart-derive.svg)](https://crates.io/crates/actix-multipart-derive) [![Download](https://img.shields.io/crates/d/actix-multipart-derive.svg)](https://crates.io/crates/actix-multipart-derive)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-multipart-derive)
- Minimum Supported Rust Version (MSRV): 1.68

View File

@@ -138,7 +138,7 @@ struct ParsedField<'t> {
/// `#[multipart(duplicate_field = "<behavior>")]` attribute: /// `#[multipart(duplicate_field = "<behavior>")]` attribute:
/// ///
/// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted. /// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted.
/// - "deny": A `MultipartError::UnknownField` error response is returned. /// - "deny": A `MultipartError::UnsupportedField` error response is returned.
/// - "replace": Each field is processed, but only the last one is persisted. /// - "replace": Each field is processed, but only the last one is persisted.
/// ///
/// Note that `Vec` fields will ignore this option. /// Note that `Vec` fields will ignore this option.
@@ -229,7 +229,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
// Return value when a field name is not supported by the form // Return value when a field name is not supported by the form
let unknown_field_result = if attrs.deny_unknown_fields { let unknown_field_result = if attrs.deny_unknown_fields {
quote!(::std::result::Result::Err( quote!(::std::result::Result::Err(
::actix_multipart::MultipartError::UnknownField(field.name().unwrap().to_string()) ::actix_multipart::MultipartError::UnsupportedField(field.name().to_string())
)) ))
} else { } else {
quote!(::std::result::Result::Ok(())) quote!(::std::result::Result::Ok(()))
@@ -292,7 +292,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
limits: &'t mut ::actix_multipart::form::Limits, limits: &'t mut ::actix_multipart::form::Limits,
state: &'t mut ::actix_multipart::form::State, state: &'t mut ::actix_multipart::form::State,
) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> { ) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> {
match field.name().unwrap() { match field.name() {
#handle_field_impl #handle_field_impl
_ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)), _ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)),
} }

View File

@@ -1,4 +1,4 @@
#[rustversion::stable(1.72)] // MSRV #[rustversion::stable(1.68)] // MSRV
#[test] #[test]
fn compile_macros() { fn compile_macros() {
let t = trybuild::TestCases::new(); let t = trybuild::TestCases::new();

View File

@@ -2,29 +2,6 @@
## Unreleased ## Unreleased
- Fix re-exported version of `actix-multipart-derive`.
## 0.7.1
- Expose `LimitExceeded` error type.
## 0.7.0
- Add `MultipartError::ContentTypeIncompatible` variant.
- Add `MultipartError::ContentDispositionNameMissing` variant.
- Add `Field::bytes()` method.
- Rename `MultipartError::{NoContentDisposition => ContentDispositionMissing}` variant.
- Rename `MultipartError::{NoContentType => ContentTypeMissing}` variant.
- Rename `MultipartError::{ParseContentType => ContentTypeParse}` variant.
- Rename `MultipartError::{Boundary => BoundaryMissing}` variant.
- Rename `MultipartError::{UnsupportedField => UnknownField}` variant.
- Remove top-level re-exports of `test` utilities.
## 0.6.2
- Add testing utilities under new module `test`.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.6.1 ## 0.6.1
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "actix-multipart" name = "actix-multipart"
version = "0.7.1" version = "0.6.1"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Jacob Halsey <jacob@jhalsey.com>", "Jacob Halsey <jacob@jhalsey.com>",
@@ -16,31 +16,17 @@ edition = "2021"
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
all-features = true all-features = true
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_http::*",
"actix_multipart_derive::*",
"actix_utils::*",
"actix_web::*",
"bytes::*",
"futures_core::*",
"mime::*",
"serde_json::*",
"serde_plain::*",
"serde::*",
"tempfile::*",
]
[features] [features]
default = ["tempfile", "derive"] default = ["tempfile", "derive"]
derive = ["actix-multipart-derive"] derive = ["actix-multipart-derive"]
tempfile = ["dep:tempfile", "tokio/fs"] tempfile = ["dep:tempfile", "tokio/fs"]
[dependencies] [dependencies]
actix-multipart-derive = { version = "=0.7.0", optional = true } actix-multipart-derive = { version = "=0.6.1", optional = true }
actix-utils = "3" actix-utils = "3"
actix-web = { version = "4", default-features = false } actix-web = { version = "4", default-features = false }
bytes = "1"
derive_more = "0.99.5" derive_more = "0.99.5"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
@@ -49,7 +35,6 @@ local-waker = "0.1"
log = "0.4" log = "0.4"
memchr = "2.5" memchr = "2.5"
mime = "0.3" mime = "0.3"
rand = "0.8"
serde = "1" serde = "1"
serde_json = "1" serde_json = "1"
serde_plain = "1" serde_plain = "1"
@@ -61,17 +46,7 @@ actix-http = "3"
actix-multipart-rfc7578 = "0.10" actix-multipart-rfc7578 = "0.10"
actix-rt = "2.2" actix-rt = "2.2"
actix-test = "0.1" actix-test = "0.1"
actix-web = "4"
assert_matches = "1"
awc = "3" awc = "3"
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] } futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-test = "0.3"
multer = "3"
tokio = { version = "1.24.2", features = ["sync"] } tokio = { version = "1.24.2", features = ["sync"] }
tokio-stream = "0.1" tokio-stream = "0.1"
[lints.rust]
future_incompatible = { level = "deny" }
rust_2018_idioms = { level = "deny" }
nonstandard_style = { level = "deny" }

View File

@@ -1,68 +1,17 @@
# `actix-multipart` # actix-multipart
<!-- prettier-ignore-start --> > Multipart form support for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart) [![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.7.1)](https://docs.rs/actix-multipart/0.7.1) [![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.6.1)](https://docs.rs/actix-multipart/0.6.1)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-multipart/0.7.1/status.svg)](https://deps.rs/crate/actix-multipart/0.7.1) [![dependency status](https://deps.rs/crate/actix-multipart/0.6.1/status.svg)](https://deps.rs/crate/actix-multipart/0.6.1)
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart) [![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> ## Documentation & Resources
<!-- cargo-rdme start --> - [API Documentation](https://docs.rs/actix-multipart)
- Minimum Supported Rust Version (MSRV): 1.68
Multipart form support for Actix Web.
## Examples
```rust
use actix_web::{post, App, HttpServer, Responder};
use actix_multipart::form::{json::Json as MPJson, tempfile::TempFile, MultipartForm};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Metadata {
name: String,
}
#[derive(Debug, MultipartForm)]
struct UploadForm {
#[multipart(limit = "100MB")]
file: TempFile,
json: MPJson<Metadata>,
}
#[post("/videos")]
pub async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
format!(
"Uploaded file {}, with size: {}",
form.json.name, form.file.size
)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(move || App::new().service(post_video))
.bind(("127.0.0.1", 8080))?
.run()
.await
}
```
cURL request:
```sh
curl -v --request POST \
--url http://localhost:8080/videos \
-F 'json={"name": "Cargo.lock"};type=application/json' \
-F file=@./Cargo.lock
```
<!-- cargo-rdme end -->
[More available in the examples repo &rarr;](https://github.com/actix/examples/tree/master/forms/multipart)

View File

@@ -1,36 +0,0 @@
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use actix_web::{middleware::Logger, post, App, HttpServer, Responder};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Metadata {
name: String,
}
#[derive(Debug, MultipartForm)]
struct UploadForm {
#[multipart(limit = "100MB")]
file: TempFile,
json: MpJson<Metadata>,
}
#[post("/videos")]
async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
format!(
"Uploaded file {}, with size: {}\ntemporary file ({}) was deleted\n",
form.json.name,
form.file.size,
form.file.file.path().display(),
)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
HttpServer::new(move || App::new().service(post_video).wrap(Logger::default()))
.workers(2)
.bind(("127.0.0.1", 8080))?
.run()
.await
}

View File

@@ -10,96 +10,78 @@ use derive_more::{Display, Error, From};
/// A set of errors that can occur during parsing multipart streams. /// A set of errors that can occur during parsing multipart streams.
#[derive(Debug, Display, From, Error)] #[derive(Debug, Display, From, Error)]
#[non_exhaustive] #[non_exhaustive]
pub enum Error { pub enum MultipartError {
/// Could not find Content-Type header. /// Content-Disposition header is not found or is not equal to "form-data".
#[display(fmt = "Could not find Content-Type header")]
ContentTypeMissing,
/// Could not parse Content-Type header.
#[display(fmt = "Could not parse Content-Type header")]
ContentTypeParse,
/// Parsed Content-Type did not have "multipart" top-level media type.
/// ///
/// Also raised when extracting a [`MultipartForm`] from a request that does not have the /// According to [RFC 7578 §4.2](https://datatracker.ietf.org/doc/html/rfc7578#section-4.2) a
/// "multipart/form-data" media type. /// Content-Disposition header must always be present and equal to "form-data".
/// #[display(fmt = "No Content-Disposition `form-data` header")]
/// [`MultipartForm`]: struct@crate::form::MultipartForm NoContentDisposition,
#[display(fmt = "Parsed Content-Type did not have "multipart" top-level media type")]
ContentTypeIncompatible,
/// Multipart boundary is not found. /// Content-Type header is not found
#[display(fmt = "No Content-Type header found")]
NoContentType,
/// Can not parse Content-Type header
#[display(fmt = "Can not parse Content-Type header")]
ParseContentType,
/// Multipart boundary is not found
#[display(fmt = "Multipart boundary is not found")] #[display(fmt = "Multipart boundary is not found")]
BoundaryMissing, Boundary,
/// Content-Disposition header was not found or not of disposition type "form-data" when parsing /// Nested multipart is not supported
/// a "form-data" field.
///
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
/// always be present and have a disposition type of "form-data".
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
ContentDispositionMissing,
/// Content-Disposition name parameter was not found when parsing a "form-data" field.
///
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
/// always include a "name" parameter.
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
ContentDispositionNameMissing,
/// Nested multipart is not supported.
#[display(fmt = "Nested multipart is not supported")] #[display(fmt = "Nested multipart is not supported")]
Nested, Nested,
/// Multipart stream is incomplete. /// Multipart stream is incomplete
#[display(fmt = "Multipart stream is incomplete")] #[display(fmt = "Multipart stream is incomplete")]
Incomplete, Incomplete,
/// Field parsing failed. /// Error during field parsing
#[display(fmt = "Error during field parsing")] #[display(fmt = "{}", _0)]
Parse(ParseError), Parse(ParseError),
/// HTTP payload error. /// Payload error
#[display(fmt = "Payload error")] #[display(fmt = "{}", _0)]
Payload(PayloadError), Payload(PayloadError),
/// Stream is not consumed. /// Not consumed
#[display(fmt = "Stream is not consumed")] #[display(fmt = "Multipart stream is not consumed")]
NotConsumed, NotConsumed,
/// Form field handler raised error. /// An error from a field handler in a form
#[display(fmt = "An error occurred processing field: {name}")] #[display(
fmt = "An error occurred processing field `{}`: {}",
field_name,
source
)]
Field { Field {
name: String, field_name: String,
source: actix_web::Error, source: actix_web::Error,
}, },
/// Duplicate field found (for structure that opted-in to denying duplicate fields). /// Duplicate field
#[display(fmt = "Duplicate field found: {_0}")] #[display(fmt = "Duplicate field found for: `{}`", _0)]
#[from(ignore)] #[from(ignore)]
DuplicateField(#[error(not(source))] String), DuplicateField(#[error(not(source))] String),
/// Required field is missing. /// Missing field
#[display(fmt = "Required field is missing: {_0}")] #[display(fmt = "Field with name `{}` is required", _0)]
#[from(ignore)] #[from(ignore)]
MissingField(#[error(not(source))] String), MissingField(#[error(not(source))] String),
/// Unknown field (for structure that opted-in to denying unknown fields). /// Unknown field
#[display(fmt = "Unknown field: {_0}")] #[display(fmt = "Unsupported field `{}`", _0)]
#[from(ignore)] #[from(ignore)]
UnknownField(#[error(not(source))] String), UnsupportedField(#[error(not(source))] String),
} }
/// Return `BadRequest` for `MultipartError`. /// Return `BadRequest` for `MultipartError`
impl ResponseError for Error { impl ResponseError for MultipartError {
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {
match &self { match &self {
Error::Field { source, .. } => source.as_response_error().status_code(), MultipartError::Field { source, .. } => source.as_response_error().status_code(),
Error::ContentTypeIncompatible => StatusCode::UNSUPPORTED_MEDIA_TYPE,
_ => StatusCode::BAD_REQUEST, _ => StatusCode::BAD_REQUEST,
} }
} }
@@ -111,7 +93,7 @@ mod tests {
#[test] #[test]
fn test_multipart_error() { fn test_multipart_error() {
let resp = Error::BoundaryMissing.error_response(); let resp = MultipartError::Boundary.error_response();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST); assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
} }
} }

View File

@@ -1,20 +1,21 @@
//! Multipart payload support
use actix_utils::future::{ready, Ready}; use actix_utils::future::{ready, Ready};
use actix_web::{dev::Payload, Error, FromRequest, HttpRequest}; use actix_web::{dev::Payload, Error, FromRequest, HttpRequest};
use crate::multipart::Multipart; use crate::server::Multipart;
/// Extract request's payload as multipart stream. /// Get request's payload as multipart stream.
/// ///
/// Content-type: multipart/*; /// Content-type: multipart/form-data;
/// ///
/// # Examples /// # Examples
///
/// ``` /// ```
/// use actix_web::{web, HttpResponse}; /// use actix_web::{web, HttpResponse, Error};
/// use actix_multipart::Multipart; /// use actix_multipart::Multipart;
/// use futures_util::StreamExt as _; /// use futures_util::StreamExt as _;
/// ///
/// async fn index(mut payload: Multipart) -> actix_web::Result<HttpResponse> { /// async fn index(mut payload: Multipart) -> Result<HttpResponse, Error> {
/// // iterate over multipart stream /// // iterate over multipart stream
/// while let Some(item) = payload.next().await { /// while let Some(item) = payload.next().await {
/// let mut field = item?; /// let mut field = item?;
@@ -25,7 +26,7 @@ use crate::multipart::Multipart;
/// } /// }
/// } /// }
/// ///
/// Ok(HttpResponse::Ok().finish()) /// Ok(HttpResponse::Ok().into())
/// } /// }
/// ``` /// ```
impl FromRequest for Multipart { impl FromRequest for Multipart {
@@ -34,6 +35,9 @@ impl FromRequest for Multipart {
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ready(Ok(Multipart::from_req(req, payload))) ready(Ok(match Multipart::boundary(req.headers()) {
Ok(boundary) => Multipart::from_boundary(boundary, payload.take()),
Err(err) => Multipart::from_error(err),
}))
} }
} }

View File

@@ -1,493 +0,0 @@
use std::{
cell::RefCell,
cmp, fmt,
future::poll_fn,
mem,
pin::Pin,
rc::Rc,
task::{ready, Context, Poll},
};
use actix_web::{
error::PayloadError,
http::header::{self, ContentDisposition, HeaderMap},
web::{Bytes, BytesMut},
};
use derive_more::{Display, Error};
use futures_core::Stream;
use mime::Mime;
use crate::{
error::Error,
payload::{PayloadBuffer, PayloadRef},
safety::Safety,
};
/// Error type returned from [`Field::bytes()`] when field data is larger than limit.
#[derive(Debug, Display, Error)]
#[display(fmt = "size limit exceeded while collecting field data")]
#[non_exhaustive]
pub struct LimitExceeded;
/// A single field in a multipart stream.
pub struct Field {
/// Field's Content-Type.
content_type: Option<Mime>,
/// Field's Content-Disposition.
content_disposition: Option<ContentDisposition>,
/// Form field name.
///
/// A non-optional storage for form field names to avoid unwraps in `form` module. Will be an
/// empty string in non-form contexts.
///
// INVARIANT: always non-empty when request content-type is multipart/form-data.
pub(crate) form_field_name: String,
/// Field's header map.
headers: HeaderMap,
safety: Safety,
inner: Rc<RefCell<InnerField>>,
}
impl Field {
pub(crate) fn new(
content_type: Option<Mime>,
content_disposition: Option<ContentDisposition>,
form_field_name: Option<String>,
headers: HeaderMap,
safety: Safety,
inner: Rc<RefCell<InnerField>>,
) -> Self {
Field {
content_type,
content_disposition,
form_field_name: form_field_name.unwrap_or_default(),
headers,
inner,
safety,
}
}
/// Returns a reference to the field's header map.
pub fn headers(&self) -> &HeaderMap {
&self.headers
}
/// Returns a reference to the field's content (mime) type, if it is supplied by the client.
///
/// According to [RFC 7578](https://www.rfc-editor.org/rfc/rfc7578#section-4.4), if it is not
/// present, it should default to "text/plain". Note it is the responsibility of the client to
/// provide the appropriate content type, there is no attempt to validate this by the server.
pub fn content_type(&self) -> Option<&Mime> {
self.content_type.as_ref()
}
/// Returns this field's parsed Content-Disposition header, if set.
///
/// # Validation
///
/// Per [RFC 7578 §4.2], the parts of a multipart/form-data payload MUST contain a
/// Content-Disposition header field where the disposition type is `form-data` and MUST also
/// contain an additional parameter of `name` with its value being the original field name from
/// the form. This requirement is enforced during extraction for multipart/form-data requests,
/// but not other kinds of multipart requests (such as multipart/related).
///
/// As such, it is safe to `.unwrap()` calls `.content_disposition()` if you've verified.
///
/// The [`name()`](Self::name) method is also provided as a convenience for obtaining the
/// aforementioned name parameter.
///
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
pub fn content_disposition(&self) -> Option<&ContentDisposition> {
self.content_disposition.as_ref()
}
/// Returns the field's name, if set.
///
/// See [`content_disposition()`](Self::content_disposition) regarding guarantees on presence of
/// the "name" field.
pub fn name(&self) -> Option<&str> {
self.content_disposition()?.get_name()
}
/// Collects the raw field data, up to `limit` bytes.
///
/// # Errors
///
/// Any errors produced by the data stream are returned as `Ok(Err(Error))` immediately.
///
/// If the buffered data size would exceed `limit`, an `Err(LimitExceeded)` is returned. Note
/// that, in this case, the full data stream is exhausted before returning the error so that
/// subsequent fields can still be read. To better defend against malicious/infinite requests,
/// it is advisable to also put a timeout on this call.
pub async fn bytes(&mut self, limit: usize) -> Result<Result<Bytes, Error>, LimitExceeded> {
/// Sensible default (2kB) for initial, bounded allocation when collecting body bytes.
const INITIAL_ALLOC_BYTES: usize = 2 * 1024;
let mut exceeded_limit = false;
let mut buf = BytesMut::with_capacity(INITIAL_ALLOC_BYTES);
let mut field = Pin::new(self);
match poll_fn(|cx| loop {
match ready!(field.as_mut().poll_next(cx)) {
// if already over limit, discard chunk to advance multipart request
Some(Ok(_chunk)) if exceeded_limit => {}
// if limit is exceeded set flag to true and continue
Some(Ok(chunk)) if buf.len() + chunk.len() > limit => {
exceeded_limit = true;
// eagerly de-allocate field data buffer
let _ = mem::take(&mut buf);
}
Some(Ok(chunk)) => buf.extend_from_slice(&chunk),
None => return Poll::Ready(Ok(())),
Some(Err(err)) => return Poll::Ready(Err(err)),
}
})
.await
{
// propagate error returned from body poll
Err(err) => Ok(Err(err)),
// limit was exceeded while reading body
Ok(()) if exceeded_limit => Err(LimitExceeded),
// otherwise return body buffer
Ok(()) => Ok(Ok(buf.freeze())),
}
}
}
impl Stream for Field {
type Item = Result<Bytes, Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
let mut inner = this.inner.borrow_mut();
if let Some(mut buffer) = inner
.payload
.as_ref()
.expect("Field should not be polled after completion")
.get_mut(&this.safety)
{
// check safety and poll read payload to buffer.
buffer.poll_stream(cx)?;
} else if !this.safety.is_clean() {
// safety violation
return Poll::Ready(Some(Err(Error::NotConsumed)));
} else {
return Poll::Pending;
}
inner.poll(&this.safety)
}
}
impl fmt::Debug for Field {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(ct) = &self.content_type {
writeln!(f, "\nField: {}", ct)?;
} else {
writeln!(f, "\nField:")?;
}
writeln!(f, " boundary: {}", self.inner.borrow().boundary)?;
writeln!(f, " headers:")?;
for (key, val) in self.headers.iter() {
writeln!(f, " {:?}: {:?}", key, val)?;
}
Ok(())
}
}
pub(crate) struct InnerField {
/// Payload is initialized as Some and is `take`n when the field stream finishes.
payload: Option<PayloadRef>,
boundary: String,
eof: bool,
length: Option<u64>,
}
impl InnerField {
pub(crate) fn new_in_rc(
payload: PayloadRef,
boundary: String,
headers: &HeaderMap,
) -> Result<Rc<RefCell<InnerField>>, PayloadError> {
Self::new(payload, boundary, headers).map(|this| Rc::new(RefCell::new(this)))
}
pub(crate) fn new(
payload: PayloadRef,
boundary: String,
headers: &HeaderMap,
) -> Result<InnerField, PayloadError> {
let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) {
match len.to_str().ok().and_then(|len| len.parse::<u64>().ok()) {
Some(len) => Some(len),
None => return Err(PayloadError::Incomplete(None)),
}
} else {
None
};
Ok(InnerField {
boundary,
payload: Some(payload),
eof: false,
length: len,
})
}
/// Reads body part content chunk of the specified size.
///
/// The body part must has `Content-Length` header with proper value.
pub(crate) fn read_len(
payload: &mut PayloadBuffer,
size: &mut u64,
) -> Poll<Option<Result<Bytes, Error>>> {
if *size == 0 {
Poll::Ready(None)
} else {
match payload.read_max(*size)? {
Some(mut chunk) => {
let len = cmp::min(chunk.len() as u64, *size);
*size -= len;
let ch = chunk.split_to(len as usize);
if !chunk.is_empty() {
payload.unprocessed(chunk);
}
Poll::Ready(Some(Ok(ch)))
}
None => {
if payload.eof && (*size != 0) {
Poll::Ready(Some(Err(Error::Incomplete)))
} else {
Poll::Pending
}
}
}
}
}
/// Reads content chunk of body part with unknown length.
///
/// The `Content-Length` header for body part is not necessary.
pub(crate) fn read_stream(
payload: &mut PayloadBuffer,
boundary: &str,
) -> Poll<Option<Result<Bytes, Error>>> {
let mut pos = 0;
let len = payload.buf.len();
if len == 0 {
return if payload.eof {
Poll::Ready(Some(Err(Error::Incomplete)))
} else {
Poll::Pending
};
}
// check boundary
if len > 4 && payload.buf[0] == b'\r' {
let b_len = if &payload.buf[..2] == b"\r\n" && &payload.buf[2..4] == b"--" {
Some(4)
} else if &payload.buf[1..3] == b"--" {
Some(3)
} else {
None
};
if let Some(b_len) = b_len {
let b_size = boundary.len() + b_len;
if len < b_size {
return Poll::Pending;
} else if &payload.buf[b_len..b_size] == boundary.as_bytes() {
// found boundary
return Poll::Ready(None);
}
}
}
loop {
return if let Some(idx) = memchr::memmem::find(&payload.buf[pos..], b"\r") {
let cur = pos + idx;
// check if we have enough data for boundary detection
if cur + 4 > len {
if cur > 0 {
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
} else {
Poll::Pending
}
} else {
// check boundary
if (&payload.buf[cur..cur + 2] == b"\r\n"
&& &payload.buf[cur + 2..cur + 4] == b"--")
|| (&payload.buf[cur..=cur] == b"\r"
&& &payload.buf[cur + 1..cur + 3] == b"--")
{
if cur != 0 {
// return buffer
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
} else {
pos = cur + 1;
continue;
}
} else {
// not boundary
pos = cur + 1;
continue;
}
}
} else {
Poll::Ready(Some(Ok(payload.buf.split().freeze())))
};
}
}
pub(crate) fn poll(&mut self, safety: &Safety) -> Poll<Option<Result<Bytes, Error>>> {
if self.payload.is_none() {
return Poll::Ready(None);
}
let result = if let Some(mut payload) = self
.payload
.as_ref()
.expect("Field should not be polled after completion")
.get_mut(safety)
{
if !self.eof {
let res = if let Some(ref mut len) = self.length {
InnerField::read_len(&mut payload, len)
} else {
InnerField::read_stream(&mut payload, &self.boundary)
};
match res {
Poll::Pending => return Poll::Pending,
Poll::Ready(Some(Ok(bytes))) => return Poll::Ready(Some(Ok(bytes))),
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
Poll::Ready(None) => self.eof = true,
}
}
match payload.readline() {
Ok(None) => Poll::Pending,
Ok(Some(line)) => {
if line.as_ref() != b"\r\n" {
log::warn!("multipart field did not read all the data or it is malformed");
}
Poll::Ready(None)
}
Err(err) => Poll::Ready(Some(Err(err))),
}
} else {
Poll::Pending
};
if let Poll::Ready(None) = result {
// drop payload buffer and make future un-poll-able
let _ = self.payload.take();
}
result
}
}
#[cfg(test)]
mod tests {
use futures_util::{stream, StreamExt as _};
use super::*;
use crate::Multipart;
// TODO: use test utility when multi-file support is introduced
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
\r\n\
one+one+one\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
\r\n\
two+two+two\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
(bytes, headers)
}
#[actix_rt::test]
async fn bytes_unlimited() {
let (body, headers) = create_double_request_with_header();
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "one+one+one");
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "two+two+two");
}
#[actix_rt::test]
async fn bytes_limited() {
let (body, headers) = create_double_request_with_header();
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(8) // smaller than data size
.await
.expect_err("field data should be size limited");
// next field still readable
let field = multipart
.next()
.await
.expect("multipart should have two fields")
.expect("multipart body should be well formatted")
.bytes(usize::MAX)
.await
.expect("field data should not be size limited")
.expect("reading field data should not error");
assert_eq!(field, "two+two+two");
}
}

View File

@@ -1,6 +1,7 @@
//! Reads a field into memory. //! Reads a field into memory.
use actix_web::{web::BytesMut, HttpRequest}; use actix_web::HttpRequest;
use bytes::BytesMut;
use futures_core::future::LocalBoxFuture; use futures_core::future::LocalBoxFuture;
use futures_util::TryStreamExt as _; use futures_util::TryStreamExt as _;
use mime::Mime; use mime::Mime;
@@ -14,7 +15,7 @@ use crate::{
#[derive(Debug)] #[derive(Debug)]
pub struct Bytes { pub struct Bytes {
/// The data. /// The data.
pub data: actix_web::web::Bytes, pub data: bytes::Bytes,
/// The value of the `Content-Type` header. /// The value of the `Content-Type` header.
pub content_type: Option<Mime>, pub content_type: Option<Mime>,
@@ -40,9 +41,8 @@ impl<'t> FieldReader<'t> for Bytes {
content_type: field.content_type().map(ToOwned::to_owned), content_type: field.content_type().map(ToOwned::to_owned),
file_name: field file_name: field
.content_disposition() .content_disposition()
.expect("multipart form fields should have a content-disposition header")
.get_filename() .get_filename()
.map(ToOwned::to_owned), .map(str::to_owned),
}) })
}) })
} }

View File

@@ -32,6 +32,7 @@ where
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future { fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move { Box::pin(async move {
let config = JsonConfig::from_req(req); let config = JsonConfig::from_req(req);
let field_name = field.name().to_owned();
if config.validate_content_type { if config.validate_content_type {
let valid = if let Some(mime) = field.content_type() { let valid = if let Some(mime) = field.content_type() {
@@ -42,19 +43,17 @@ where
if !valid { if !valid {
return Err(MultipartError::Field { return Err(MultipartError::Field {
name: field.form_field_name, field_name,
source: config.map_error(req, JsonFieldError::ContentType), source: config.map_error(req, JsonFieldError::ContentType),
}); });
} }
} }
let form_field_name = field.form_field_name.clone();
let bytes = Bytes::read_field(req, field, limits).await?; let bytes = Bytes::read_field(req, field, limits).await?;
Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err( Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err(
|err| MultipartError::Field { |err| MultipartError::Field {
name: form_field_name, field_name,
source: config.map_error(req, JsonFieldError::Deserialize(err)), source: config.map_error(req, JsonFieldError::Deserialize(err)),
}, },
)?)) )?))
@@ -132,12 +131,14 @@ impl Default for JsonConfig {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::collections::HashMap; use std::{collections::HashMap, io::Cursor};
use actix_web::{http::StatusCode, web, web::Bytes, App, HttpResponse, Responder}; use actix_multipart_rfc7578::client::multipart;
use actix_web::{http::StatusCode, web, App, HttpResponse, Responder};
use crate::form::{ use crate::form::{
json::{Json, JsonConfig}, json::{Json, JsonConfig},
tests::send_form,
MultipartForm, MultipartForm,
}; };
@@ -154,8 +155,6 @@ mod tests {
HttpResponse::Ok().finish() HttpResponse::Ok().finish()
} }
const TEST_JSON: &str = r#"{"key1": "value1", "key2": "value2"}"#;
#[actix_rt::test] #[actix_rt::test]
async fn test_json_without_content_type() { async fn test_json_without_content_type() {
let srv = actix_test::start(|| { let srv = actix_test::start(|| {
@@ -164,16 +163,10 @@ mod tests {
.app_data(JsonConfig::default().validate_content_type(false)) .app_data(JsonConfig::default().validate_content_type(false))
}); });
let (body, headers) = crate::test::create_form_data_payload_and_headers( let mut form = multipart::Form::default();
"json", form.add_text("json", "{\"key1\": \"value1\", \"key2\": \"value2\"}");
None, let response = send_form(&srv, form, "/").await;
None, assert_eq!(response.status(), StatusCode::OK);
Bytes::from_static(TEST_JSON.as_bytes()),
);
let mut req = srv.post("/");
*req.headers_mut() = headers;
let res = req.send_body(body).await.unwrap();
assert_eq!(res.status(), StatusCode::OK);
} }
#[actix_rt::test] #[actix_rt::test]
@@ -185,27 +178,17 @@ mod tests {
}); });
// Deny because wrong content type // Deny because wrong content type
let (body, headers) = crate::test::create_form_data_payload_and_headers( let bytes = Cursor::new("{\"key1\": \"value1\", \"key2\": \"value2\"}");
"json", let mut form = multipart::Form::default();
None, form.add_reader_file_with_mime("json", bytes, "", mime::APPLICATION_OCTET_STREAM);
Some(mime::APPLICATION_OCTET_STREAM), let response = send_form(&srv, form, "/").await;
Bytes::from_static(TEST_JSON.as_bytes()), assert_eq!(response.status(), StatusCode::BAD_REQUEST);
);
let mut req = srv.post("/");
*req.headers_mut() = headers;
let res = req.send_body(body).await.unwrap();
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
// Allow because correct content type // Allow because correct content type
let (body, headers) = crate::test::create_form_data_payload_and_headers( let bytes = Cursor::new("{\"key1\": \"value1\", \"key2\": \"value2\"}");
"json", let mut form = multipart::Form::default();
None, form.add_reader_file_with_mime("json", bytes, "", mime::APPLICATION_JSON);
Some(mime::APPLICATION_JSON), let response = send_form(&srv, form, "/").await;
Bytes::from_static(TEST_JSON.as_bytes()), assert_eq!(response.status(), StatusCode::OK);
);
let mut req = srv.post("/");
*req.headers_mut() = headers;
let res = req.send_body(body).await.unwrap();
assert_eq!(res.status(), StatusCode::OK);
} }
} }

View File

@@ -33,14 +33,6 @@ pub trait FieldReader<'t>: Sized + Any {
type Future: Future<Output = Result<Self, MultipartError>>; type Future: Future<Output = Result<Self, MultipartError>>;
/// The form will call this function to handle the field. /// The form will call this function to handle the field.
///
/// # Panics
///
/// When reading the `field` payload using its `Stream` implementation, polling (manually or via
/// `next()`/`try_next()`) may panic after the payload is exhausted. If this is a problem for
/// your implementation of this method, you should [`fuse()`] the `Field` first.
///
/// [`fuse()`]: futures_util::stream::StreamExt::fuse()
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future; fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future;
} }
@@ -80,13 +72,13 @@ where
state: &'t mut State, state: &'t mut State,
duplicate_field: DuplicateField, duplicate_field: DuplicateField,
) -> Self::Future { ) -> Self::Future {
if state.contains_key(&field.form_field_name) { if state.contains_key(field.name()) {
match duplicate_field { match duplicate_field {
DuplicateField::Ignore => return Box::pin(ready(Ok(()))), DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
DuplicateField::Deny => { DuplicateField::Deny => {
return Box::pin(ready(Err(MultipartError::DuplicateField( return Box::pin(ready(Err(MultipartError::DuplicateField(
field.form_field_name, field.name().to_owned(),
)))) ))))
} }
@@ -95,7 +87,7 @@ where
} }
Box::pin(async move { Box::pin(async move {
let field_name = field.form_field_name.clone(); let field_name = field.name().to_owned();
let t = T::read_field(req, field, limits).await?; let t = T::read_field(req, field, limits).await?;
state.insert(field_name, Box::new(t)); state.insert(field_name, Box::new(t));
Ok(()) Ok(())
@@ -123,8 +115,10 @@ where
Box::pin(async move { Box::pin(async move {
// Note: Vec GroupReader always allows duplicates // Note: Vec GroupReader always allows duplicates
let field_name = field.name().to_owned();
let vec = state let vec = state
.entry(field.form_field_name.clone()) .entry(field_name)
.or_insert_with(|| Box::<Vec<T>>::default()) .or_insert_with(|| Box::<Vec<T>>::default())
.downcast_mut::<Vec<T>>() .downcast_mut::<Vec<T>>()
.unwrap(); .unwrap();
@@ -157,13 +151,13 @@ where
state: &'t mut State, state: &'t mut State,
duplicate_field: DuplicateField, duplicate_field: DuplicateField,
) -> Self::Future { ) -> Self::Future {
if state.contains_key(&field.form_field_name) { if state.contains_key(field.name()) {
match duplicate_field { match duplicate_field {
DuplicateField::Ignore => return Box::pin(ready(Ok(()))), DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
DuplicateField::Deny => { DuplicateField::Deny => {
return Box::pin(ready(Err(MultipartError::DuplicateField( return Box::pin(ready(Err(MultipartError::DuplicateField(
field.form_field_name, field.name().to_owned(),
)))) ))))
} }
@@ -172,7 +166,7 @@ where
} }
Box::pin(async move { Box::pin(async move {
let field_name = field.form_field_name.clone(); let field_name = field.name().to_owned();
let t = T::read_field(req, field, limits).await?; let t = T::read_field(req, field, limits).await?;
state.insert(field_name, Box::new(t)); state.insert(field_name, Box::new(t));
Ok(()) Ok(())
@@ -279,9 +273,6 @@ impl Limits {
/// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this /// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this
/// for your struct. /// for your struct.
/// ///
/// Note that this extractor rejects requests with any other Content-Type such as `multipart/mixed`,
/// `multipart/related`, or non-multipart media types.
///
/// Add a [`MultipartFormConfig`] to your app data to configure extraction. /// Add a [`MultipartFormConfig`] to your app data to configure extraction.
#[derive(Deref, DerefMut)] #[derive(Deref, DerefMut)]
pub struct MultipartForm<T: MultipartCollect>(pub T); pub struct MultipartForm<T: MultipartCollect>(pub T);
@@ -295,24 +286,14 @@ impl<T: MultipartCollect> MultipartForm<T> {
impl<T> FromRequest for MultipartForm<T> impl<T> FromRequest for MultipartForm<T>
where where
T: MultipartCollect + 'static, T: MultipartCollect,
{ {
type Error = Error; type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>; type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
let mut multipart = Multipart::from_req(req, payload); let mut payload = Multipart::new(req.headers(), payload.take());
let content_type = match multipart.content_type_or_bail() {
Ok(content_type) => content_type,
Err(err) => return Box::pin(ready(Err(err.into()))),
};
if content_type.subtype() != mime::FORM_DATA {
// this extractor only supports multipart/form-data
return Box::pin(ready(Err(MultipartError::ContentTypeIncompatible.into())));
};
let config = MultipartFormConfig::from_req(req); let config = MultipartFormConfig::from_req(req);
let mut limits = Limits::new(config.total_limit, config.memory_limit); let mut limits = Limits::new(config.total_limit, config.memory_limit);
@@ -324,29 +305,21 @@ where
Box::pin( Box::pin(
async move { async move {
let mut state = State::default(); let mut state = State::default();
// We need to ensure field limits are shared for all instances of this field name
// ensure limits are shared for all fields with this name
let mut field_limits = HashMap::<String, Option<usize>>::new(); let mut field_limits = HashMap::<String, Option<usize>>::new();
while let Some(field) = multipart.try_next().await? { while let Some(field) = payload.try_next().await? {
debug_assert!(
!field.form_field_name.is_empty(),
"multipart form fields should have names",
);
// Retrieve the limit for this field // Retrieve the limit for this field
let entry = field_limits let entry = field_limits
.entry(field.form_field_name.clone()) .entry(field.name().to_owned())
.or_insert_with(|| T::limit(&field.form_field_name)); .or_insert_with(|| T::limit(field.name()));
limits.field_limit_remaining = entry.to_owned();
limits.field_limit_remaining.clone_from(entry);
T::handle_field(&req, field, &mut limits, &mut state).await?; T::handle_field(&req, field, &mut limits, &mut state).await?;
// Update the stored limit // Update the stored limit
*entry = limits.field_limit_remaining; *entry = limits.field_limit_remaining;
} }
let inner = T::from_state(state)?; let inner = T::from_state(state)?;
Ok(MultipartForm(inner)) Ok(MultipartForm(inner))
} }
@@ -422,20 +395,11 @@ mod tests {
use actix_http::encoding::Decoder; use actix_http::encoding::Decoder;
use actix_multipart_rfc7578::client::multipart; use actix_multipart_rfc7578::client::multipart;
use actix_test::TestServer; use actix_test::TestServer;
use actix_web::{ use actix_web::{dev::Payload, http::StatusCode, web, App, HttpResponse, Responder};
dev::Payload, http::StatusCode, web, App, HttpRequest, HttpResponse, Resource, Responder,
};
use awc::{Client, ClientResponse}; use awc::{Client, ClientResponse};
use futures_core::future::LocalBoxFuture;
use futures_util::TryStreamExt as _;
use super::MultipartForm; use super::MultipartForm;
use crate::{ use crate::form::{bytes::Bytes, tempfile::TempFile, text::Text, MultipartFormConfig};
form::{
bytes::Bytes, tempfile::TempFile, text::Text, FieldReader, Limits, MultipartFormConfig,
},
Field, MultipartError,
};
pub async fn send_form( pub async fn send_form(
srv: &TestServer, srv: &TestServer,
@@ -769,84 +733,4 @@ mod tests {
let response = send_form(&srv, form, "/").await; let response = send_form(&srv, form, "/").await;
assert_eq!(response.status(), StatusCode::BAD_REQUEST); assert_eq!(response.status(), StatusCode::BAD_REQUEST);
} }
#[actix_rt::test]
async fn non_multipart_form_data() {
#[derive(MultipartForm)]
struct TestNonMultipartFormData {
#[allow(unused)]
#[multipart(limit = "30B")]
foo: Text<String>,
}
async fn non_multipart_form_data_route(
_form: MultipartForm<TestNonMultipartFormData>,
) -> String {
unreachable!("request is sent with multipart/mixed");
}
let srv = actix_test::start(|| {
App::new().route("/", web::post().to(non_multipart_form_data_route))
});
let mut form = multipart::Form::default();
form.add_text("foo", "foo");
// mangle content-type, keeping the boundary
let ct = form.content_type().replacen("/form-data", "/mixed", 1);
let res = Client::default()
.post(srv.url("/"))
.content_type(ct)
.send_body(multipart::Body::from(form))
.await
.unwrap();
assert_eq!(res.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
}
#[should_panic(expected = "called `Result::unwrap()` on an `Err` value: Connect(Disconnected)")]
#[actix_web::test]
async fn field_try_next_panic() {
#[derive(Debug)]
struct NullSink;
impl<'t> FieldReader<'t> for NullSink {
type Future = LocalBoxFuture<'t, Result<Self, MultipartError>>;
fn read_field(
_: &'t HttpRequest,
mut field: Field,
_limits: &'t mut Limits,
) -> Self::Future {
Box::pin(async move {
// exhaust field stream
while let Some(_chunk) = field.try_next().await? {}
// poll again, crash
let _post = field.try_next().await;
Ok(Self)
})
}
}
#[allow(dead_code)]
#[derive(MultipartForm)]
struct NullSinkForm {
foo: NullSink,
}
async fn null_sink(_form: MultipartForm<NullSinkForm>) -> impl Responder {
"unreachable"
}
let srv = actix_test::start(|| App::new().service(Resource::new("/").post(null_sink)));
let mut form = multipart::Form::default();
form.add_text("foo", "data is not important to this test");
// panics with Err(Connect(Disconnected)) due to form NullSink panic
let _res = send_form(&srv, form, "/").await;
}
} }

View File

@@ -42,36 +42,38 @@ impl<'t> FieldReader<'t> for TempFile {
fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future { fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move { Box::pin(async move {
let config = TempFileConfig::from_req(req); let config = TempFileConfig::from_req(req);
let field_name = field.name().to_owned();
let mut size = 0; let mut size = 0;
let file = config.create_tempfile().map_err(|err| { let file = config
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err)) .create_tempfile()
})?; .map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
let mut file_async = tokio::fs::File::from_std(file.reopen().map_err(|err| { let mut file_async =
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err)) tokio::fs::File::from_std(file.reopen().map_err(|err| {
})?); config.map_error(req, &field_name, TempFileError::FileIo(err))
})?);
while let Some(chunk) = field.try_next().await? { while let Some(chunk) = field.try_next().await? {
limits.try_consume_limits(chunk.len(), false)?; limits.try_consume_limits(chunk.len(), false)?;
size += chunk.len(); size += chunk.len();
file_async.write_all(chunk.as_ref()).await.map_err(|err| { file_async.write_all(chunk.as_ref()).await.map_err(|err| {
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err)) config.map_error(req, &field_name, TempFileError::FileIo(err))
})?; })?;
} }
file_async.flush().await.map_err(|err| { file_async
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err)) .flush()
})?; .await
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
Ok(TempFile { Ok(TempFile {
file, file,
content_type: field.content_type().map(ToOwned::to_owned), content_type: field.content_type().map(ToOwned::to_owned),
file_name: field file_name: field
.content_disposition() .content_disposition()
.expect("multipart form fields should have a content-disposition header")
.get_filename() .get_filename()
.map(ToOwned::to_owned), .map(str::to_owned),
size, size,
}) })
}) })
@@ -135,7 +137,7 @@ impl TempFileConfig {
}; };
MultipartError::Field { MultipartError::Field {
name: field_name.to_owned(), field_name: field_name.to_owned(),
source, source,
} }
} }

View File

@@ -36,6 +36,7 @@ where
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future { fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
Box::pin(async move { Box::pin(async move {
let config = TextConfig::from_req(req); let config = TextConfig::from_req(req);
let field_name = field.name().to_owned();
if config.validate_content_type { if config.validate_content_type {
let valid = if let Some(mime) = field.content_type() { let valid = if let Some(mime) = field.content_type() {
@@ -48,24 +49,22 @@ where
if !valid { if !valid {
return Err(MultipartError::Field { return Err(MultipartError::Field {
name: field.form_field_name, field_name,
source: config.map_error(req, TextError::ContentType), source: config.map_error(req, TextError::ContentType),
}); });
} }
} }
let form_field_name = field.form_field_name.clone();
let bytes = Bytes::read_field(req, field, limits).await?; let bytes = Bytes::read_field(req, field, limits).await?;
let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field { let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field {
name: form_field_name.clone(), field_name: field_name.clone(),
source: config.map_error(req, TextError::Utf8Error(err)), source: config.map_error(req, TextError::Utf8Error(err)),
})?; })?;
Ok(Text(serde_plain::from_str(text).map_err(|err| { Ok(Text(serde_plain::from_str(text).map_err(|err| {
MultipartError::Field { MultipartError::Field {
name: form_field_name, field_name,
source: config.map_error(req, TextError::Deserialize(err)), source: config.map_error(req, TextError::Deserialize(err)),
} }
})?)) })?))

View File

@@ -1,51 +1,8 @@
//! Multipart form support for Actix Web. //! Multipart form support for Actix Web.
//!
//! # Examples
//!
//! ```no_run
//! use actix_web::{post, App, HttpServer, Responder};
//!
//! use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
//! use serde::Deserialize;
//!
//! #[derive(Debug, Deserialize)]
//! struct Metadata {
//! name: String,
//! }
//!
//! #[derive(Debug, MultipartForm)]
//! struct UploadForm {
//! #[multipart(limit = "100MB")]
//! file: TempFile,
//! json: MpJson<Metadata>,
//! }
//!
//! #[post("/videos")]
//! pub async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
//! format!(
//! "Uploaded file {}, with size: {}",
//! form.json.name, form.file.size
//! )
//! }
//!
//! #[actix_web::main]
//! async fn main() -> std::io::Result<()> {
//! HttpServer::new(move || App::new().service(post_video))
//! .bind(("127.0.0.1", 8080))?
//! .run()
//! .await
//! }
//! ```
//!
//! cURL request:
//!
//! ```sh
//! curl -v --request POST \
//! --url http://localhost:8080/videos \
//! -F 'json={"name": "Cargo.lock"};type=application/json' \
//! -F file=@./Cargo.lock
//! ```
#![deny(rust_2018_idioms, nonstandard_style)]
#![warn(future_incompatible)]
#![allow(clippy::borrow_interior_mutable_const)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")] #![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")] #![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@@ -56,15 +13,11 @@ extern crate self as actix_multipart;
mod error; mod error;
mod extractor; mod extractor;
pub(crate) mod field; mod server;
pub mod form; pub mod form;
mod multipart;
pub(crate) mod payload;
pub(crate) mod safety;
pub mod test;
pub use self::{ pub use self::{
error::Error as MultipartError, error::MultipartError,
field::{Field, LimitExceeded}, server::{Field, Multipart},
multipart::Multipart,
}; };

View File

@@ -1,977 +0,0 @@
//! Multipart response payload support.
use std::{
cell::RefCell,
pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_web::{
dev,
error::{ParseError, PayloadError},
http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue},
web::Bytes,
HttpRequest,
};
use futures_core::stream::Stream;
use mime::Mime;
use crate::{
error::Error,
field::InnerField,
payload::{PayloadBuffer, PayloadRef},
safety::Safety,
Field,
};
const MAX_HEADERS: usize = 32;
/// The server-side implementation of `multipart/form-data` requests.
///
/// This will parse the incoming stream into `MultipartItem` instances via its `Stream`
/// implementation. `MultipartItem::Field` contains multipart field. `MultipartItem::Multipart` is
/// used for nested multipart streams.
pub struct Multipart {
flow: Flow,
safety: Safety,
}
enum Flow {
InFlight(Inner),
/// Error container is Some until an error is returned out of the flow.
Error(Option<Error>),
}
impl Multipart {
/// Creates multipart instance from parts.
pub fn new<S>(headers: &HeaderMap, stream: S) -> Self
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
match Self::find_ct_and_boundary(headers) {
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, stream),
Err(err) => Self::from_error(err),
}
}
/// Creates multipart instance from parts.
pub(crate) fn from_req(req: &HttpRequest, payload: &mut dev::Payload) -> Self {
match Self::find_ct_and_boundary(req.headers()) {
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, payload.take()),
Err(err) => Self::from_error(err),
}
}
/// Extract Content-Type and boundary info from headers.
pub(crate) fn find_ct_and_boundary(headers: &HeaderMap) -> Result<(Mime, String), Error> {
let content_type = headers
.get(&header::CONTENT_TYPE)
.ok_or(Error::ContentTypeMissing)?
.to_str()
.ok()
.and_then(|content_type| content_type.parse::<Mime>().ok())
.ok_or(Error::ContentTypeParse)?;
if content_type.type_() != mime::MULTIPART {
return Err(Error::ContentTypeIncompatible);
}
let boundary = content_type
.get_param(mime::BOUNDARY)
.ok_or(Error::BoundaryMissing)?
.as_str()
.to_owned();
Ok((content_type, boundary))
}
/// Constructs a new multipart reader from given Content-Type, boundary, and stream.
pub(crate) fn from_ct_and_boundary<S>(ct: Mime, boundary: String, stream: S) -> Multipart
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
Multipart {
safety: Safety::new(),
flow: Flow::InFlight(Inner {
payload: PayloadRef::new(PayloadBuffer::new(stream)),
content_type: ct,
boundary,
state: State::FirstBoundary,
item: Item::None,
}),
}
}
/// Constructs a new multipart reader from given `MultipartError`.
pub(crate) fn from_error(err: Error) -> Multipart {
Multipart {
flow: Flow::Error(Some(err)),
safety: Safety::new(),
}
}
/// Return requests parsed Content-Type or raise the stored error.
pub(crate) fn content_type_or_bail(&mut self) -> Result<mime::Mime, Error> {
match self.flow {
Flow::InFlight(ref inner) => Ok(inner.content_type.clone()),
Flow::Error(ref mut err) => Err(err
.take()
.expect("error should not be taken after it was returned")),
}
}
}
impl Stream for Multipart {
type Item = Result<Field, Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.get_mut();
match this.flow {
Flow::InFlight(ref mut inner) => {
if let Some(mut buffer) = inner.payload.get_mut(&this.safety) {
// check safety and poll read payload to buffer.
buffer.poll_stream(cx)?;
} else if !this.safety.is_clean() {
// safety violation
return Poll::Ready(Some(Err(Error::NotConsumed)));
} else {
return Poll::Pending;
}
inner.poll(&this.safety, cx)
}
Flow::Error(ref mut err) => Poll::Ready(Some(Err(err
.take()
.expect("Multipart polled after finish")))),
}
}
}
#[derive(PartialEq, Debug)]
enum State {
/// Skip data until first boundary.
FirstBoundary,
/// Reading boundary.
Boundary,
/// Reading Headers.
Headers,
/// Stream EOF.
Eof,
}
enum Item {
None,
Field(Rc<RefCell<InnerField>>),
}
struct Inner {
/// Request's payload stream & buffer.
payload: PayloadRef,
/// Request's Content-Type.
///
/// Guaranteed to have "multipart" top-level media type, i.e., `multipart/*`.
content_type: Mime,
/// Field boundary.
boundary: String,
state: State,
item: Item,
}
impl Inner {
fn read_field_headers(payload: &mut PayloadBuffer) -> Result<Option<HeaderMap>, Error> {
match payload.read_until(b"\r\n\r\n")? {
None => {
if payload.eof {
Err(Error::Incomplete)
} else {
Ok(None)
}
}
Some(bytes) => {
let mut hdrs = [httparse::EMPTY_HEADER; MAX_HEADERS];
match httparse::parse_headers(&bytes, &mut hdrs).map_err(ParseError::from)? {
httparse::Status::Complete((_, hdrs)) => {
// convert headers
let mut headers = HeaderMap::with_capacity(hdrs.len());
for h in hdrs {
let name =
HeaderName::try_from(h.name).map_err(|_| ParseError::Header)?;
let value =
HeaderValue::try_from(h.value).map_err(|_| ParseError::Header)?;
headers.append(name, value);
}
Ok(Some(headers))
}
httparse::Status::Partial => Err(ParseError::Header.into()),
}
}
}
}
/// Reads a field boundary from the payload buffer (and discards it).
///
/// Reads "in-between" and "final" boundaries. E.g. for boundary = "foo":
///
/// ```plain
/// --foo <-- in-between fields
/// --foo-- <-- end of request body, should be followed by EOF
/// ```
///
/// Returns:
///
/// - `Ok(Some(true))` - final field boundary read (EOF)
/// - `Ok(Some(false))` - field boundary read
/// - `Ok(None)` - boundary not found, more data needs reading
/// - `Err(BoundaryMissing)` - multipart boundary is missing
fn read_boundary(payload: &mut PayloadBuffer, boundary: &str) -> Result<Option<bool>, Error> {
// TODO: need to read epilogue
let chunk = match payload.readline_or_eof()? {
// TODO: this might be okay as a let Some() else return Ok(None)
None => return Ok(payload.eof.then_some(true)),
Some(chunk) => chunk,
};
const BOUNDARY_MARKER: &[u8] = b"--";
const LINE_BREAK: &[u8] = b"\r\n";
let boundary_len = boundary.len();
if chunk.len() < boundary_len + 2 + 2
|| !chunk.starts_with(BOUNDARY_MARKER)
|| &chunk[2..boundary_len + 2] != boundary.as_bytes()
{
return Err(Error::BoundaryMissing);
}
// chunk facts:
// - long enough to contain boundary + 2 markers or 1 marker and line-break
// - starts with boundary marker
// - chunk contains correct boundary
if &chunk[boundary_len + 2..] == LINE_BREAK {
// boundary is followed by line-break, indicating more fields to come
return Ok(Some(false));
}
// boundary is followed by marker
if &chunk[boundary_len + 2..boundary_len + 4] == BOUNDARY_MARKER
&& (
// chunk is exactly boundary len + 2 markers
chunk.len() == boundary_len + 2 + 2
// final boundary is allowed to end with a line-break
|| &chunk[boundary_len + 4..] == LINE_BREAK
)
{
return Ok(Some(true));
}
Err(Error::BoundaryMissing)
}
fn skip_until_boundary(
payload: &mut PayloadBuffer,
boundary: &str,
) -> Result<Option<bool>, Error> {
let mut eof = false;
loop {
match payload.readline()? {
Some(chunk) => {
if chunk.is_empty() {
return Err(Error::BoundaryMissing);
}
if chunk.len() < boundary.len() {
continue;
}
if &chunk[..2] == b"--" && &chunk[2..chunk.len() - 2] == boundary.as_bytes() {
break;
} else {
if chunk.len() < boundary.len() + 2 {
continue;
}
let b: &[u8] = boundary.as_ref();
if &chunk[..boundary.len()] == b
&& &chunk[boundary.len()..boundary.len() + 2] == b"--"
{
eof = true;
break;
}
}
}
None => {
return if payload.eof {
Err(Error::Incomplete)
} else {
Ok(None)
};
}
}
}
Ok(Some(eof))
}
fn poll(&mut self, safety: &Safety, cx: &Context<'_>) -> Poll<Option<Result<Field, Error>>> {
if self.state == State::Eof {
Poll::Ready(None)
} else {
// release field
loop {
// Nested multipart streams of fields has to be consumed
// before switching to next
if safety.current() {
let stop = match self.item {
Item::Field(ref mut field) => match field.borrow_mut().poll(safety) {
Poll::Pending => return Poll::Pending,
Poll::Ready(Some(Ok(_))) => continue,
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
Poll::Ready(None) => true,
},
Item::None => false,
};
if stop {
self.item = Item::None;
}
if let Item::None = self.item {
break;
}
}
}
let field_headers = if let Some(mut payload) = self.payload.get_mut(safety) {
match self.state {
// read until first boundary
State::FirstBoundary => {
match Inner::skip_until_boundary(&mut payload, &self.boundary)? {
None => return Poll::Pending,
Some(eof) => {
if eof {
self.state = State::Eof;
return Poll::Ready(None);
} else {
self.state = State::Headers;
}
}
}
}
// read boundary
State::Boundary => match Inner::read_boundary(&mut payload, &self.boundary)? {
None => return Poll::Pending,
Some(eof) => {
if eof {
self.state = State::Eof;
return Poll::Ready(None);
} else {
self.state = State::Headers;
}
}
},
_ => {}
}
// read field headers for next field
if self.state == State::Headers {
if let Some(headers) = Inner::read_field_headers(&mut payload)? {
self.state = State::Boundary;
headers
} else {
return Poll::Pending;
}
} else {
unreachable!()
}
} else {
log::debug!("NotReady: field is in flight");
return Poll::Pending;
};
let field_content_disposition = field_headers
.get(&header::CONTENT_DISPOSITION)
.and_then(|cd| ContentDisposition::from_raw(cd).ok())
.filter(|content_disposition| {
matches!(
content_disposition.disposition,
header::DispositionType::FormData,
)
});
let form_field_name = if self.content_type.subtype() == mime::FORM_DATA {
// According to RFC 7578 §4.2, which relates to "multipart/form-data" requests
// specifically, fields must have a Content-Disposition header, its disposition
// type must be set as "form-data", and it must have a name parameter.
let Some(cd) = &field_content_disposition else {
return Poll::Ready(Some(Err(Error::ContentDispositionMissing)));
};
let Some(field_name) = cd.get_name() else {
return Poll::Ready(Some(Err(Error::ContentDispositionNameMissing)));
};
Some(field_name.to_owned())
} else {
None
};
// TODO: check out other multipart/* RFCs for specific requirements
let field_content_type: Option<Mime> = field_headers
.get(&header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
.and_then(|ct| ct.parse().ok());
self.state = State::Boundary;
// nested multipart stream is not supported
if let Some(mime) = &field_content_type {
if mime.type_() == mime::MULTIPART {
return Poll::Ready(Some(Err(Error::Nested)));
}
}
let field_inner =
InnerField::new_in_rc(self.payload.clone(), self.boundary.clone(), &field_headers)?;
self.item = Item::Field(Rc::clone(&field_inner));
Poll::Ready(Some(Ok(Field::new(
field_content_type,
field_content_disposition,
form_field_name,
field_headers,
safety.clone(cx),
field_inner,
))))
}
}
}
impl Drop for Inner {
fn drop(&mut self) {
// InnerMultipartItem::Field has to be dropped first because of Safety.
self.item = Item::None;
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use actix_http::h1;
use actix_web::{
http::header::{DispositionParam, DispositionType},
rt,
test::TestRequest,
web::{BufMut as _, BytesMut},
FromRequest,
};
use assert_matches::assert_matches;
use futures_test::stream::StreamTestExt as _;
use futures_util::{future::lazy, stream, StreamExt as _};
use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream;
use super::*;
const BOUNDARY: &str = "abbc761f78ff4d7cb7573b5a23f96ef0";
#[actix_rt::test]
async fn test_boundary() {
let headers = HeaderMap::new();
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::ContentTypeMissing) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("test"),
);
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::ContentTypeParse) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("multipart/mixed"),
);
match Multipart::find_ct_and_boundary(&headers) {
Err(Error::BoundaryMissing) => {}
_ => unreachable!("should not happen"),
}
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"5c02368e880e436dab70ed54e1c58209\"",
),
);
assert_eq!(
Multipart::find_ct_and_boundary(&headers).unwrap().1,
"5c02368e880e436dab70ed54e1c58209",
);
}
fn create_stream() -> (
mpsc::UnboundedSender<Result<Bytes, PayloadError>>,
impl Stream<Item = Result<Bytes, PayloadError>>,
) {
let (tx, rx) = mpsc::unbounded_channel();
(
tx,
UnboundedReceiverStream::new(rx).map(|res| res.map_err(|_| panic!())),
)
}
fn create_simple_request_with_header() -> (Bytes, HeaderMap) {
let (body, headers) = crate::test::create_form_data_payload_and_headers_with_boundary(
BOUNDARY,
"file",
Some("fn.txt".to_owned()),
Some(mime::TEXT_PLAIN_UTF_8),
Bytes::from_static(b"data"),
);
let mut buf = BytesMut::with_capacity(body.len() + 14);
// add junk before form to test pre-boundary data rejection
buf.put("testasdadsad\r\n".as_bytes());
buf.put(body);
(buf.freeze(), headers)
}
// TODO: use test utility when multi-file support is introduced
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
data\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
(bytes, headers)
}
#[actix_rt::test]
async fn test_multipart_no_end_crlf() {
let (sender, payload) = create_stream();
let (mut bytes, headers) = create_double_request_with_header();
let bytes_stripped = bytes.split_to(bytes.len()); // strip crlf
sender.send(Ok(bytes_stripped)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await.unwrap() {
Ok(_) => {}
_ => unreachable!(),
}
match multipart.next().await.unwrap() {
Ok(_) => {}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn test_multipart() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_double_request_with_header();
sender.send(Ok(bytes)).unwrap();
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await {
Some(Ok(mut field)) => {
let cd = field.content_disposition().unwrap();
assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
match field.next().await.unwrap() {
Ok(chunk) => assert_eq!(chunk, "test"),
_ => unreachable!(),
}
match field.next().await {
None => {}
_ => unreachable!(),
}
}
_ => unreachable!(),
}
match multipart.next().await.unwrap() {
Ok(mut field) => {
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
match field.next().await {
Some(Ok(chunk)) => assert_eq!(chunk, "data"),
_ => unreachable!(),
}
match field.next().await {
None => {}
_ => unreachable!(),
}
}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
// Loops, collecting all bytes until end-of-field
async fn get_whole_field(field: &mut Field) -> BytesMut {
let mut b = BytesMut::new();
loop {
match field.next().await {
Some(Ok(chunk)) => b.extend_from_slice(&chunk),
None => return b,
_ => unreachable!(),
}
}
}
#[actix_rt::test]
async fn test_stream() {
let (bytes, headers) = create_double_request_with_header();
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
match multipart.next().await.unwrap() {
Ok(mut field) => {
let cd = field.content_disposition().unwrap();
assert_eq!(cd.disposition, DispositionType::FormData);
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
assert_eq!(get_whole_field(&mut field).await, "test");
}
_ => unreachable!(),
}
match multipart.next().await {
Some(Ok(mut field)) => {
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
assert_eq!(get_whole_field(&mut field).await, "data");
}
_ => unreachable!(),
}
match multipart.next().await {
None => {}
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn test_basic() {
let (_, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(payload.buf.len(), 0);
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(None, payload.read_max(1).unwrap());
}
#[actix_rt::test]
async fn test_eof() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_max(4).unwrap());
sender.feed_data(Bytes::from("data"));
sender.feed_eof();
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(Some(Bytes::from("data")), payload.read_max(4).unwrap());
assert_eq!(payload.buf.len(), 0);
assert!(payload.read_max(1).is_err());
assert!(payload.eof);
}
#[actix_rt::test]
async fn test_err() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_max(1).unwrap());
sender.set_error(PayloadError::Incomplete(None));
lazy(|cx| payload.poll_stream(cx)).await.err().unwrap();
}
#[actix_rt::test]
async fn read_max() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(payload.buf.len(), 10);
assert_eq!(Some(Bytes::from("line1")), payload.read_max(5).unwrap());
assert_eq!(payload.buf.len(), 5);
assert_eq!(Some(Bytes::from("line2")), payload.read_max(5).unwrap());
assert_eq!(payload.buf.len(), 0);
}
#[actix_rt::test]
async fn read_exactly() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_exact(2));
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(Some(Bytes::from_static(b"li")), payload.read_exact(2));
assert_eq!(payload.buf.len(), 8);
assert_eq!(Some(Bytes::from_static(b"ne1l")), payload.read_exact(4));
assert_eq!(payload.buf.len(), 4);
}
#[actix_rt::test]
async fn read_until() {
let (mut sender, payload) = h1::Payload::create(false);
let mut payload = PayloadBuffer::new(payload);
assert_eq!(None, payload.read_until(b"ne").unwrap());
sender.feed_data(Bytes::from("line1"));
sender.feed_data(Bytes::from("line2"));
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
assert_eq!(
Some(Bytes::from("line")),
payload.read_until(b"ne").unwrap()
);
assert_eq!(payload.buf.len(), 6);
assert_eq!(
Some(Bytes::from("1line2")),
payload.read_until(b"2").unwrap()
);
assert_eq!(payload.buf.len(), 0);
}
#[actix_rt::test]
async fn test_multipart_from_error() {
let err = Error::ContentTypeMissing;
let mut multipart = Multipart::from_error(err);
assert!(multipart.next().await.unwrap().is_err())
}
#[actix_rt::test]
async fn test_multipart_from_boundary() {
let (_, payload) = create_stream();
let (_, headers) = create_simple_request_with_header();
let (ct, boundary) = Multipart::find_ct_and_boundary(&headers).unwrap();
let _ = Multipart::from_ct_and_boundary(ct, boundary, payload);
}
#[actix_rt::test]
async fn test_multipart_payload_consumption() {
// with sample payload and HttpRequest with no headers
let (_, inner_payload) = h1::Payload::create(false);
let mut payload = actix_web::dev::Payload::from(inner_payload);
let req = TestRequest::default().to_http_request();
// multipart should generate an error
let mut mp = Multipart::from_request(&req, &mut payload).await.unwrap();
assert!(mp.next().await.unwrap().is_err());
// and should not consume the payload
match payload {
actix_web::dev::Payload::H1 { .. } => {} //expected
_ => unreachable!(),
}
}
#[actix_rt::test]
async fn no_content_disposition_form_data() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert_matches!(
res.expect_err(
"according to RFC 7578, form-data fields require a content-disposition header"
),
Error::ContentDispositionMissing
);
}
#[actix_rt::test]
async fn no_content_disposition_non_form_data() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
res.unwrap();
}
#[actix_rt::test]
async fn no_name_in_form_data_content_disposition() {
let bytes = Bytes::from(
"testasdadsad\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
Content-Disposition: form-data; filename=\"fn.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 4\r\n\
\r\n\
test\r\n\
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static(
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
),
);
let payload = stream::iter(bytes)
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
.interleave_pending();
let mut multipart = Multipart::new(&headers, payload);
let res = multipart.next().await.unwrap();
assert_matches!(
res.expect_err("according to RFC 7578, form-data fields require a name attribute"),
Error::ContentDispositionNameMissing
);
}
#[actix_rt::test]
async fn test_drop_multipart_dont_hang() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_simple_request_with_header();
sender.send(Ok(bytes)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
let mut field = multipart.next().await.unwrap().unwrap();
drop(multipart);
// should fail immediately
match field.next().await {
Some(Err(Error::NotConsumed)) => {}
_ => panic!(),
};
}
#[actix_rt::test]
async fn test_drop_field_awaken_multipart() {
let (sender, payload) = create_stream();
let (bytes, headers) = create_double_request_with_header();
sender.send(Ok(bytes)).unwrap();
drop(sender); // eof
let mut multipart = Multipart::new(&headers, payload);
let mut field = multipart.next().await.unwrap().unwrap();
let task = rt::spawn(async move {
rt::time::sleep(Duration::from_millis(500)).await;
assert_eq!(field.next().await.unwrap().unwrap(), "test");
drop(field);
});
// dropping field should awaken current task
let _ = multipart.next().await.unwrap().unwrap();
task.await.unwrap();
}
}

View File

@@ -1,147 +0,0 @@
use std::{
cell::{RefCell, RefMut},
cmp, mem,
pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_web::{
error::PayloadError,
web::{Bytes, BytesMut},
};
use futures_core::stream::{LocalBoxStream, Stream};
use crate::{error::Error, safety::Safety};
pub(crate) struct PayloadRef {
payload: Rc<RefCell<PayloadBuffer>>,
}
impl PayloadRef {
pub(crate) fn new(payload: PayloadBuffer) -> PayloadRef {
PayloadRef {
payload: Rc::new(RefCell::new(payload)),
}
}
pub(crate) fn get_mut(&self, safety: &Safety) -> Option<RefMut<'_, PayloadBuffer>> {
if safety.current() {
Some(self.payload.borrow_mut())
} else {
None
}
}
}
impl Clone for PayloadRef {
fn clone(&self) -> PayloadRef {
PayloadRef {
payload: Rc::clone(&self.payload),
}
}
}
/// Payload buffer.
pub(crate) struct PayloadBuffer {
pub(crate) stream: LocalBoxStream<'static, Result<Bytes, PayloadError>>,
pub(crate) buf: BytesMut,
/// EOF flag. If true, no more payload reads will be attempted.
pub(crate) eof: bool,
}
impl PayloadBuffer {
/// Constructs new payload buffer.
pub(crate) fn new<S>(stream: S) -> Self
where
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
{
PayloadBuffer {
stream: Box::pin(stream),
buf: BytesMut::with_capacity(1_024), // pre-allocate 1KiB
eof: false,
}
}
pub(crate) fn poll_stream(&mut self, cx: &mut Context<'_>) -> Result<(), PayloadError> {
loop {
match Pin::new(&mut self.stream).poll_next(cx) {
Poll::Ready(Some(Ok(data))) => {
self.buf.extend_from_slice(&data);
// try to read more data
continue;
}
Poll::Ready(Some(Err(err))) => return Err(err),
Poll::Ready(None) => {
self.eof = true;
return Ok(());
}
Poll::Pending => return Ok(()),
}
}
}
/// Reads exact number of bytes.
#[cfg(test)]
pub(crate) fn read_exact(&mut self, size: usize) -> Option<Bytes> {
if size <= self.buf.len() {
Some(self.buf.split_to(size).freeze())
} else {
None
}
}
pub(crate) fn read_max(&mut self, size: u64) -> Result<Option<Bytes>, Error> {
if !self.buf.is_empty() {
let size = cmp::min(self.buf.len() as u64, size) as usize;
Ok(Some(self.buf.split_to(size).freeze()))
} else if self.eof {
Err(Error::Incomplete)
} else {
Ok(None)
}
}
/// Reads until specified ending.
///
/// Returns:
///
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
/// - `Ok(None)` - `needle` is not found otherwise
pub(crate) fn read_until(&mut self, needle: &[u8]) -> Result<Option<Bytes>, Error> {
match memchr::memmem::find(&self.buf, needle) {
// buffer exhausted and EOF without finding needle
None if self.eof => Err(Error::Incomplete),
// needle not yet found
None => Ok(None),
// needle found, split chunk out of buf
Some(idx) => Ok(Some(self.buf.split_to(idx + needle.len()).freeze())),
}
}
/// Reads bytes until new line delimiter.
#[inline]
pub(crate) fn readline(&mut self) -> Result<Option<Bytes>, Error> {
self.read_until(b"\n")
}
/// Reads bytes until new line delimiter or until EOF.
#[inline]
pub(crate) fn readline_or_eof(&mut self) -> Result<Option<Bytes>, Error> {
match self.readline() {
Err(Error::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())),
line => line,
}
}
/// Puts unprocessed data back to the buffer.
pub(crate) fn unprocessed(&mut self, data: Bytes) {
// TODO: use BytesMut::from when it's released, see https://github.com/tokio-rs/bytes/pull/710
let buf = BytesMut::from(&data[..]);
let buf = mem::replace(&mut self.buf, buf);
self.buf.extend_from_slice(&buf);
}
}

View File

@@ -1,60 +0,0 @@
use std::{cell::Cell, marker::PhantomData, rc::Rc, task};
use local_waker::LocalWaker;
/// Counter. It tracks of number of clones of payloads and give access to payload only to top most.
///
/// - When dropped, parent task is awakened. This is to support the case where `Field` is dropped in
/// a separate task than `Multipart`.
/// - Assumes that parent owners don't move to different tasks; only the top-most is allowed to.
/// - If dropped and is not top most owner, is_clean flag is set to false.
#[derive(Debug)]
pub(crate) struct Safety {
task: LocalWaker,
level: usize,
payload: Rc<PhantomData<bool>>,
clean: Rc<Cell<bool>>,
}
impl Safety {
pub(crate) fn new() -> Safety {
let payload = Rc::new(PhantomData);
Safety {
task: LocalWaker::new(),
level: Rc::strong_count(&payload),
clean: Rc::new(Cell::new(true)),
payload,
}
}
pub(crate) fn current(&self) -> bool {
Rc::strong_count(&self.payload) == self.level && self.clean.get()
}
pub(crate) fn is_clean(&self) -> bool {
self.clean.get()
}
pub(crate) fn clone(&self, cx: &task::Context<'_>) -> Safety {
let payload = Rc::clone(&self.payload);
let s = Safety {
task: LocalWaker::new(),
level: Rc::strong_count(&payload),
clean: self.clean.clone(),
payload,
};
s.task.register(cx.waker());
s
}
}
impl Drop for Safety {
fn drop(&mut self) {
if Rc::strong_count(&self.payload) != self.level {
// Multipart dropped leaving a Field
self.clean.set(false);
}
self.task.wake();
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,220 +0,0 @@
//! Multipart testing utilities.
use actix_web::{
http::header::{self, HeaderMap},
web::{BufMut as _, Bytes, BytesMut},
};
use mime::Mime;
use rand::{
distributions::{Alphanumeric, DistString as _},
thread_rng,
};
const CRLF: &[u8] = b"\r\n";
const CRLF_CRLF: &[u8] = b"\r\n\r\n";
const HYPHENS: &[u8] = b"--";
const BOUNDARY_PREFIX: &str = "------------------------";
/// Constructs a `multipart/form-data` payload from bytes and metadata.
///
/// Returned header map can be extended or merged with existing headers.
///
/// Multipart boundary used is a random alphanumeric string.
///
/// # Examples
///
/// ```
/// use actix_multipart::test::create_form_data_payload_and_headers;
/// use actix_web::{test::TestRequest, web::Bytes};
/// use memchr::memmem::find;
///
/// let (body, headers) = create_form_data_payload_and_headers(
/// "foo",
/// Some("lorem.txt".to_owned()),
/// Some(mime::TEXT_PLAIN_UTF_8),
/// Bytes::from_static(b"Lorem ipsum."),
/// );
///
/// assert!(find(&body, b"foo").is_some());
/// assert!(find(&body, b"lorem.txt").is_some());
/// assert!(find(&body, b"text/plain; charset=utf-8").is_some());
/// assert!(find(&body, b"Lorem ipsum.").is_some());
///
/// let req = TestRequest::default();
///
/// // merge header map into existing test request and set multipart body
/// let req = headers
/// .into_iter()
/// .fold(req, |req, hdr| req.insert_header(hdr))
/// .set_payload(body)
/// .to_http_request();
///
/// assert!(
/// req.headers()
/// .get("content-type")
/// .unwrap()
/// .to_str()
/// .unwrap()
/// .starts_with("multipart/form-data; boundary=\"")
/// );
/// ```
pub fn create_form_data_payload_and_headers(
name: &str,
filename: Option<String>,
content_type: Option<Mime>,
file: Bytes,
) -> (Bytes, HeaderMap) {
let boundary = Alphanumeric.sample_string(&mut thread_rng(), 32);
create_form_data_payload_and_headers_with_boundary(
&boundary,
name,
filename,
content_type,
file,
)
}
/// Constructs a `multipart/form-data` payload from bytes and metadata with a fixed boundary.
///
/// See [`create_form_data_payload_and_headers`] for more details.
pub fn create_form_data_payload_and_headers_with_boundary(
boundary: &str,
name: &str,
filename: Option<String>,
content_type: Option<Mime>,
file: Bytes,
) -> (Bytes, HeaderMap) {
let mut buf = BytesMut::with_capacity(file.len() + 128);
let boundary_str = [BOUNDARY_PREFIX, boundary].concat();
let boundary = boundary_str.as_bytes();
buf.put(HYPHENS);
buf.put(boundary);
buf.put(CRLF);
buf.put(format!("Content-Disposition: form-data; name=\"{name}\"").as_bytes());
if let Some(filename) = filename {
buf.put(format!("; filename=\"{filename}\"").as_bytes());
}
buf.put(CRLF);
if let Some(ct) = content_type {
buf.put(format!("Content-Type: {ct}").as_bytes());
buf.put(CRLF);
}
buf.put(format!("Content-Length: {}", file.len()).as_bytes());
buf.put(CRLF_CRLF);
buf.put(file);
buf.put(CRLF);
buf.put(HYPHENS);
buf.put(boundary);
buf.put(HYPHENS);
buf.put(CRLF);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
format!("multipart/form-data; boundary=\"{boundary_str}\"")
.parse()
.unwrap(),
);
(buf.freeze(), headers)
}
#[cfg(test)]
mod tests {
use std::convert::Infallible;
use futures_util::stream;
use super::*;
fn find_boundary(headers: &HeaderMap) -> String {
headers
.get("content-type")
.unwrap()
.to_str()
.unwrap()
.parse::<mime::Mime>()
.unwrap()
.get_param(mime::BOUNDARY)
.unwrap()
.as_str()
.to_owned()
}
#[test]
fn wire_format() {
let (pl, headers) = create_form_data_payload_and_headers_with_boundary(
"qWeRtYuIoP",
"foo",
None,
None,
Bytes::from_static(b"Lorem ipsum dolor\nsit ame."),
);
assert_eq!(
find_boundary(&headers),
"------------------------qWeRtYuIoP",
);
assert_eq!(
std::str::from_utf8(&pl).unwrap(),
"--------------------------qWeRtYuIoP\r\n\
Content-Disposition: form-data; name=\"foo\"\r\n\
Content-Length: 26\r\n\
\r\n\
Lorem ipsum dolor\n\
sit ame.\r\n\
--------------------------qWeRtYuIoP--\r\n",
);
let (pl, _headers) = create_form_data_payload_and_headers_with_boundary(
"qWeRtYuIoP",
"foo",
Some("Lorem.txt".to_owned()),
Some(mime::TEXT_PLAIN_UTF_8),
Bytes::from_static(b"Lorem ipsum dolor\nsit ame."),
);
assert_eq!(
std::str::from_utf8(&pl).unwrap(),
"--------------------------qWeRtYuIoP\r\n\
Content-Disposition: form-data; name=\"foo\"; filename=\"Lorem.txt\"\r\n\
Content-Type: text/plain; charset=utf-8\r\n\
Content-Length: 26\r\n\
\r\n\
Lorem ipsum dolor\n\
sit ame.\r\n\
--------------------------qWeRtYuIoP--\r\n",
);
}
/// Test using an external library to prevent the two-wrongs-make-a-right class of errors.
#[actix_web::test]
async fn ecosystem_compat() {
let (pl, headers) = create_form_data_payload_and_headers(
"foo",
None,
None,
Bytes::from_static(b"Lorem ipsum dolor\nsit ame."),
);
let boundary = find_boundary(&headers);
let pl = stream::once(async { Ok::<_, Infallible>(pl) });
let mut form = multer::Multipart::new(pl, boundary);
let field = form.next_field().await.unwrap().unwrap();
assert_eq!(field.name().unwrap(), "foo");
assert_eq!(field.file_name(), None);
assert_eq!(field.content_type(), None);
assert!(field.bytes().await.unwrap().starts_with(b"Lorem"));
}
}

View File

@@ -2,13 +2,6 @@
## Unreleased ## Unreleased
## 0.5.3
- Add `unicode` crate feature (on-by-default) to switch between `regex` and `regex-lite` as a trade-off between full unicode support and binary size.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.5.2
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 0.5.1 ## 0.5.1

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "actix-router" name = "actix-router"
version = "0.5.3" version = "0.5.1"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>", "Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
@@ -12,23 +12,17 @@ repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
[package.metadata.cargo_check_external_types] [lib]
allowed_external_types = [ name = "actix_router"
"http::*", path = "src/lib.rs"
"serde::*",
]
[features] [features]
default = ["http", "unicode"] default = ["http"]
http = ["dep:http"]
unicode = ["dep:regex"]
[dependencies] [dependencies]
bytestring = ">=0.1.5, <2" bytestring = ">=0.1.5, <2"
cfg-if = "1"
http = { version = "0.2.7", optional = true } http = { version = "0.2.7", optional = true }
regex = { version = "1.5", optional = true } regex = "1.5"
regex-lite = "0.1"
serde = "1" serde = "1"
tracing = { version = "0.1.30", default-features = false, features = ["log"] } tracing = { version = "0.1.30", default-features = false, features = ["log"] }
@@ -41,7 +35,6 @@ percent-encoding = "2.1"
[[bench]] [[bench]]
name = "router" name = "router"
harness = false harness = false
required-features = ["unicode"]
[[bench]] [[bench]]
name = "quoter" name = "quoter"

View File

@@ -1,18 +1,14 @@
# `actix-router` # `actix-router`
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-router?label=latest)](https://crates.io/crates/actix-router) [![crates.io](https://img.shields.io/crates/v/actix-router?label=latest)](https://crates.io/crates/actix-router)
[![Documentation](https://docs.rs/actix-router/badge.svg?version=0.5.3)](https://docs.rs/actix-router/0.5.3) [![Documentation](https://docs.rs/actix-router/badge.svg?version=0.5.1)](https://docs.rs/actix-router/0.5.1)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-router.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-router.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-router/0.5.3/status.svg)](https://deps.rs/crate/actix-router/0.5.3) [![dependency status](https://deps.rs/crate/actix-router/0.5.1/status.svg)](https://deps.rs/crate/actix-router/0.5.1)
[![Download](https://img.shields.io/crates/d/actix-router.svg)](https://crates.io/crates/actix-router) [![Download](https://img.shields.io/crates/d/actix-router.svg)](https://crates.io/crates/actix-router)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->
<!-- cargo-rdme start --> <!-- cargo-rdme start -->
Resource path matching and router. Resource path matching and router.

View File

@@ -500,10 +500,10 @@ impl<'de> de::VariantAccess<'de> for UnitVariant {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use serde::Deserialize; use serde::{de, Deserialize};
use super::*; use super::*;
use crate::{router::Router, ResourceDef}; use crate::{path::Path, router::Router, ResourceDef};
#[derive(Deserialize)] #[derive(Deserialize)]
struct MyStruct { struct MyStruct {

View File

@@ -10,7 +10,6 @@ mod de;
mod path; mod path;
mod pattern; mod pattern;
mod quoter; mod quoter;
mod regex_set;
mod resource; mod resource;
mod resource_path; mod resource_path;
mod router; mod router;

View File

@@ -154,11 +154,15 @@ impl<T: ResourcePath> Path<T> {
None None
} }
/// Returns matched parameter by name. /// Get matched parameter by name.
/// ///
/// If keyed parameter is not available empty string is used as default value. /// If keyed parameter is not available empty string is used as default value.
pub fn query(&self, key: &str) -> &str { pub fn query(&self, key: &str) -> &str {
self.get(key).unwrap_or_default() if let Some(s) = self.get(key) {
s
} else {
""
}
} }
/// Return iterator to items in parameter container. /// Return iterator to items in parameter container.

View File

@@ -1,66 +0,0 @@
//! Abstraction over `regex` and `regex-lite` depending on whether we have `unicode` crate feature
//! enabled.
use cfg_if::cfg_if;
#[cfg(feature = "unicode")]
pub(crate) use regex::{escape, Regex};
#[cfg(not(feature = "unicode"))]
pub(crate) use regex_lite::{escape, Regex};
#[cfg(feature = "unicode")]
#[derive(Debug, Clone)]
pub(crate) struct RegexSet(regex::RegexSet);
#[cfg(not(feature = "unicode"))]
#[derive(Debug, Clone)]
pub(crate) struct RegexSet(Vec<regex_lite::Regex>);
impl RegexSet {
/// Create a new regex set.
///
/// # Panics
///
/// Panics if any path patterns are malformed.
pub(crate) fn new(re_set: Vec<String>) -> Self {
cfg_if! {
if #[cfg(feature = "unicode")] {
Self(regex::RegexSet::new(re_set).unwrap())
} else {
Self(re_set.iter().map(|re| Regex::new(re).unwrap()).collect())
}
}
}
/// Create a new empty regex set.
pub(crate) fn empty() -> Self {
cfg_if! {
if #[cfg(feature = "unicode")] {
Self(regex::RegexSet::empty())
} else {
Self(Vec::new())
}
}
}
/// Returns true if regex set matches `path`.
pub(crate) fn is_match(&self, path: &str) -> bool {
cfg_if! {
if #[cfg(feature = "unicode")] {
self.0.is_match(path)
} else {
self.0.iter().any(|re| re.is_match(path))
}
}
}
/// Returns index within `path` of first match.
pub(crate) fn first_match_idx(&self, path: &str) -> Option<usize> {
cfg_if! {
if #[cfg(feature = "unicode")] {
self.0.matches(path).into_iter().next()
} else {
Some(self.0.iter().enumerate().find(|(_, re)| re.is_match(path))?.0)
}
}
}
}

View File

@@ -5,13 +5,10 @@ use std::{
mem, mem,
}; };
use regex::{escape, Regex, RegexSet};
use tracing::error; use tracing::error;
use crate::{ use crate::{path::PathItem, IntoPatterns, Patterns, Resource, ResourcePath};
path::PathItem,
regex_set::{escape, Regex, RegexSet},
IntoPatterns, Patterns, Resource, ResourcePath,
};
const MAX_DYNAMIC_SEGMENTS: usize = 16; const MAX_DYNAMIC_SEGMENTS: usize = 16;
@@ -236,7 +233,7 @@ enum PatternSegment {
Var(String), Var(String),
} }
#[derive(Debug, Clone)] #[derive(Clone, Debug)]
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
enum PatternType { enum PatternType {
/// Single constant/literal segment. /// Single constant/literal segment.
@@ -606,7 +603,7 @@ impl ResourceDef {
PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()), PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()),
PatternType::DynamicSet(re, params) => { PatternType::DynamicSet(re, params) => {
let idx = re.first_match_idx(path)?; let idx = re.matches(path).into_iter().next()?;
let (ref pattern, _) = params[idx]; let (ref pattern, _) = params[idx];
Some(pattern.captures(path)?[1].len()) Some(pattern.captures(path)?[1].len())
} }
@@ -709,7 +706,7 @@ impl ResourceDef {
PatternType::DynamicSet(re, params) => { PatternType::DynamicSet(re, params) => {
let path = path.unprocessed(); let path = path.unprocessed();
let (pattern, names) = match re.first_match_idx(path) { let (pattern, names) = match re.matches(path).into_iter().next() {
Some(idx) => &params[idx], Some(idx) => &params[idx],
_ => return false, _ => return false,
}; };
@@ -873,7 +870,7 @@ impl ResourceDef {
} }
} }
let pattern_re_set = RegexSet::new(re_set); let pattern_re_set = RegexSet::new(re_set).unwrap();
let segments = segments.unwrap_or_default(); let segments = segments.unwrap_or_default();
( (

View File

@@ -2,21 +2,6 @@
## Unreleased ## Unreleased
## 0.1.5
- Add `TestServerConfig::listen_address()` method.
## 0.1.4
- Add `TestServerConfig::rustls_0_23()` method for Rustls v0.23 support behind new `rustls-0_23` crate feature.
- Add `TestServerConfig::disable_redirects()` method.
- Various types from `awc`, such as `ClientRequest` and `ClientResponse`, are now re-exported.
- Minimum supported Rust version (MSRV) is now 1.72.
## 0.1.3
- Add `TestServerConfig::rustls_0_22()` method for Rustls v0.22 support behind new `rustls-0_22` crate feature.
## 0.1.2 ## 0.1.2
- Add `TestServerConfig::rustls_021()` method for Rustls v0.21 support behind new `rustls-0_21` crate feature. - Add `TestServerConfig::rustls_021()` method for Rustls v0.21 support behind new `rustls-0_21` crate feature.

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "actix-test" name = "actix-test"
version = "0.1.5" version = "0.1.2"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
@@ -18,22 +18,6 @@ categories = [
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_http_test::*",
"actix_http::*",
"actix_service::*",
"actix_web::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"openssl::*",
"rustls::*",
"tokio::*",
]
[features] [features]
default = [] default = []
@@ -43,23 +27,19 @@ rustls = ["rustls-0_20"]
rustls-0_20 = ["tls-rustls-0_20", "actix-http/rustls-0_20", "awc/rustls-0_20"] rustls-0_20 = ["tls-rustls-0_20", "actix-http/rustls-0_20", "awc/rustls-0_20"]
# TLS via Rustls v0.21 # TLS via Rustls v0.21
rustls-0_21 = ["tls-rustls-0_21", "actix-http/rustls-0_21", "awc/rustls-0_21"] rustls-0_21 = ["tls-rustls-0_21", "actix-http/rustls-0_21", "awc/rustls-0_21"]
# TLS via Rustls v0.22
rustls-0_22 = ["tls-rustls-0_22", "actix-http/rustls-0_22", "awc/rustls-0_22-webpki-roots"]
# TLS via Rustls v0.23
rustls-0_23 = ["tls-rustls-0_23", "actix-http/rustls-0_23", "awc/rustls-0_23-webpki-roots"]
# TLS via OpenSSL # TLS via OpenSSL
openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"] openssl = ["tls-openssl", "actix-http/openssl", "awc/openssl"]
[dependencies] [dependencies]
actix-codec = "0.5" actix-codec = "0.5"
actix-http = "3.7" actix-http = "3"
actix-http-test = "3" actix-http-test = "3"
actix-rt = "2.1" actix-rt = "2.1"
actix-service = "2" actix-service = "2"
actix-utils = "3" actix-utils = "3"
actix-web = { version = "4.6", default-features = false, features = ["cookies"] } actix-web = { version = "4", default-features = false, features = ["cookies"] }
awc = { version = "3.5", default-features = false, features = ["cookies"] } awc = { version = "3", default-features = false, features = ["cookies"] }
futures-core = { version = "0.3.17", default-features = false, features = ["std"] } futures-core = { version = "0.3.17", default-features = false, features = ["std"] }
futures-util = { version = "0.3.17", default-features = false, features = [] } futures-util = { version = "0.3.17", default-features = false, features = [] }
@@ -70,6 +50,4 @@ serde_urlencoded = "0.7"
tls-openssl = { package = "openssl", version = "0.10.55", optional = true } tls-openssl = { package = "openssl", version = "0.10.55", optional = true }
tls-rustls-0_20 = { package = "rustls", version = "0.20", optional = true } tls-rustls-0_20 = { package = "rustls", version = "0.20", optional = true }
tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true } tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true }
tls-rustls-0_22 = { package = "rustls", version = "0.22", optional = true }
tls-rustls-0_23 = { package = "rustls", version = "0.23", default-features = false, optional = true }
tokio = { version = "1.24.2", features = ["sync"] } tokio = { version = "1.24.2", features = ["sync"] }

View File

@@ -1,45 +0,0 @@
# `actix-test`
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-test?label=latest)](https://crates.io/crates/actix-test)
[![Documentation](https://docs.rs/actix-test/badge.svg?version=0.1.5)](https://docs.rs/actix-test/0.1.5)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-test.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-test/0.1.5/status.svg)](https://deps.rs/crate/actix-test/0.1.5)
[![Download](https://img.shields.io/crates/d/actix-test.svg)](https://crates.io/crates/actix-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->
<!-- cargo-rdme start -->
Integration testing tools for Actix Web applications.
The main integration testing tool is [`TestServer`]. It spawns a real HTTP server on an unused port and provides methods that use a real HTTP client. Therefore, it is much closer to real-world cases than using `init_service`, which skips HTTP encoding and decoding.
## Examples
```rust
use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
#[get("/")]
async fn my_handler() -> Result<impl Responder, Error> {
Ok(HttpResponse::Ok())
}
#[actix_rt::test]
async fn test_example() {
let srv = actix_test::start(||
App::new().service(my_handler)
);
let req = srv.get("/");
let res = req.send().await.unwrap();
assert!(res.status().is_success());
}
```
<!-- cargo-rdme end -->

View File

@@ -5,7 +5,6 @@
//! real-world cases than using `init_service`, which skips HTTP encoding and decoding. //! real-world cases than using `init_service`, which skips HTTP encoding and decoding.
//! //!
//! # Examples //! # Examples
//!
//! ``` //! ```
//! use actix_web::{get, web, test, App, HttpResponse, Error, Responder}; //! use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
//! //!
@@ -53,7 +52,7 @@ use actix_web::{
rt::{self, System}, rt::{self, System},
web, Error, web, Error,
}; };
pub use awc::{error::PayloadError, Client, ClientRequest, ClientResponse, Connector}; use awc::{error::PayloadError, Client, ClientRequest, ClientResponse, Connector};
use futures_core::Stream; use futures_core::Stream;
use tokio::sync::mpsc; use tokio::sync::mpsc;
@@ -144,18 +143,12 @@ where
StreamType::Rustls020(_) => true, StreamType::Rustls020(_) => true,
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
StreamType::Rustls021(_) => true, StreamType::Rustls021(_) => true,
#[cfg(feature = "rustls-0_22")]
StreamType::Rustls022(_) => true,
#[cfg(feature = "rustls-0_23")]
StreamType::Rustls023(_) => true,
}; };
let client_cfg = cfg.clone();
// run server in separate orphaned thread // run server in separate orphaned thread
thread::spawn(move || { thread::spawn(move || {
rt::System::new().block_on(async move { rt::System::new().block_on(async move {
let tcp = net::TcpListener::bind((cfg.listen_address.clone(), cfg.port)).unwrap(); let tcp = net::TcpListener::bind(("127.0.0.1", cfg.port)).unwrap();
let local_addr = tcp.local_addr().unwrap(); let local_addr = tcp.local_addr().unwrap();
let factory = factory.clone(); let factory = factory.clone();
let srv_cfg = cfg.clone(); let srv_cfg = cfg.clone();
@@ -334,90 +327,6 @@ where
.rustls_021(config.clone()) .rustls_021(config.clone())
}), }),
}, },
#[cfg(feature = "rustls-0_22")]
StreamType::Rustls022(config) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h1(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_22(config.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h2(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_22(config.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.finish(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_22(config.clone())
}),
},
#[cfg(feature = "rustls-0_23")]
StreamType::Rustls023(config) => match cfg.tp {
HttpVer::Http1 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h1(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_23(config.clone())
}),
HttpVer::Http2 => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.h2(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_23(config.clone())
}),
HttpVer::Both => builder.listen("test", tcp, move || {
let app_cfg =
AppConfig::__priv_test_new(false, local_addr.to_string(), local_addr);
let fac = factory()
.into_factory()
.map_err(|err| err.into().error_response());
HttpService::build()
.client_request_timeout(timeout)
.finish(map_config(fac, move |_| app_cfg.clone()))
.rustls_0_23(config.clone())
}),
},
} }
.expect("test server could not be created"); .expect("test server could not be created");
@@ -463,13 +372,7 @@ where
} }
}; };
let mut client_builder = Client::builder().connector(connector); Client::builder().connector(connector).finish()
if client_cfg.disable_redirects {
client_builder = client_builder.disable_redirects();
}
client_builder.finish()
}; };
TestServer { TestServer {
@@ -489,7 +392,6 @@ enum HttpVer {
Both, Both,
} }
#[allow(clippy::large_enum_variant)]
#[derive(Clone)] #[derive(Clone)]
enum StreamType { enum StreamType {
Tcp, Tcp,
@@ -499,10 +401,6 @@ enum StreamType {
Rustls020(tls_rustls_0_20::ServerConfig), Rustls020(tls_rustls_0_20::ServerConfig),
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
Rustls021(tls_rustls_0_21::ServerConfig), Rustls021(tls_rustls_0_21::ServerConfig),
#[cfg(feature = "rustls-0_22")]
Rustls022(tls_rustls_0_22::ServerConfig),
#[cfg(feature = "rustls-0_23")]
Rustls023(tls_rustls_0_23::ServerConfig),
} }
/// Create default test server config. /// Create default test server config.
@@ -515,10 +413,8 @@ pub struct TestServerConfig {
tp: HttpVer, tp: HttpVer,
stream: StreamType, stream: StreamType,
client_request_timeout: Duration, client_request_timeout: Duration,
listen_address: String,
port: u16, port: u16,
workers: usize, workers: usize,
disable_redirects: bool,
} }
impl Default for TestServerConfig { impl Default for TestServerConfig {
@@ -528,96 +424,56 @@ impl Default for TestServerConfig {
} }
impl TestServerConfig { impl TestServerConfig {
/// Constructs default server configuration. /// Create default server configuration
pub(crate) fn new() -> TestServerConfig { pub(crate) fn new() -> TestServerConfig {
TestServerConfig { TestServerConfig {
tp: HttpVer::Both, tp: HttpVer::Both,
stream: StreamType::Tcp, stream: StreamType::Tcp,
client_request_timeout: Duration::from_secs(5), client_request_timeout: Duration::from_secs(5),
listen_address: "127.0.0.1".to_string(),
port: 0, port: 0,
workers: 1, workers: 1,
disable_redirects: false,
} }
} }
/// Accepts HTTP/1.1 only. /// Accept HTTP/1.1 only.
pub fn h1(mut self) -> Self { pub fn h1(mut self) -> Self {
self.tp = HttpVer::Http1; self.tp = HttpVer::Http1;
self self
} }
/// Accepts HTTP/2 only. /// Accept HTTP/2 only.
pub fn h2(mut self) -> Self { pub fn h2(mut self) -> Self {
self.tp = HttpVer::Http2; self.tp = HttpVer::Http2;
self self
} }
/// Accepts secure connections via OpenSSL. /// Accept secure connections via OpenSSL.
#[cfg(feature = "openssl")] #[cfg(feature = "openssl")]
pub fn openssl(mut self, acceptor: openssl::ssl::SslAcceptor) -> Self { pub fn openssl(mut self, acceptor: openssl::ssl::SslAcceptor) -> Self {
self.stream = StreamType::Openssl(acceptor); self.stream = StreamType::Openssl(acceptor);
self self
} }
#[doc(hidden)] /// Accept secure connections via Rustls.
#[deprecated(note = "Renamed to `rustls_0_20()`.")]
#[cfg(feature = "rustls-0_20")] #[cfg(feature = "rustls-0_20")]
pub fn rustls(mut self, config: tls_rustls_0_20::ServerConfig) -> Self { pub fn rustls(mut self, config: tls_rustls_0_20::ServerConfig) -> Self {
self.stream = StreamType::Rustls020(config); self.stream = StreamType::Rustls020(config);
self self
} }
/// Accepts secure connections via Rustls v0.20. /// Accept secure connections via Rustls.
#[cfg(feature = "rustls-0_20")]
pub fn rustls_0_20(mut self, config: tls_rustls_0_20::ServerConfig) -> Self {
self.stream = StreamType::Rustls020(config);
self
}
#[doc(hidden)]
#[deprecated(note = "Renamed to `rustls_0_21()`.")]
#[cfg(feature = "rustls-0_21")] #[cfg(feature = "rustls-0_21")]
pub fn rustls_021(mut self, config: tls_rustls_0_21::ServerConfig) -> Self { pub fn rustls_021(mut self, config: tls_rustls_0_21::ServerConfig) -> Self {
self.stream = StreamType::Rustls021(config); self.stream = StreamType::Rustls021(config);
self self
} }
/// Accepts secure connections via Rustls v0.21. /// Set client timeout for first request.
#[cfg(feature = "rustls-0_21")]
pub fn rustls_0_21(mut self, config: tls_rustls_0_21::ServerConfig) -> Self {
self.stream = StreamType::Rustls021(config);
self
}
/// Accepts secure connections via Rustls v0.22.
#[cfg(feature = "rustls-0_22")]
pub fn rustls_0_22(mut self, config: tls_rustls_0_22::ServerConfig) -> Self {
self.stream = StreamType::Rustls022(config);
self
}
/// Accepts secure connections via Rustls v0.23.
#[cfg(feature = "rustls-0_23")]
pub fn rustls_0_23(mut self, config: tls_rustls_0_23::ServerConfig) -> Self {
self.stream = StreamType::Rustls023(config);
self
}
/// Sets client timeout for first request.
pub fn client_request_timeout(mut self, dur: Duration) -> Self { pub fn client_request_timeout(mut self, dur: Duration) -> Self {
self.client_request_timeout = dur; self.client_request_timeout = dur;
self self
} }
/// Sets the address the server will listen on.
///
/// By default, only listens on `127.0.0.1`.
pub fn listen_address(mut self, addr: impl Into<String>) -> Self {
self.listen_address = addr.into();
self
}
/// Sets test server port. /// Sets test server port.
/// ///
/// By default, a random free port is determined by the OS. /// By default, a random free port is determined by the OS.
@@ -633,15 +489,6 @@ impl TestServerConfig {
self.workers = workers; self.workers = workers;
self self
} }
/// Instruct the client to not follow redirects.
///
/// By default, the client will follow up to 10 consecutive redirects
/// before giving up.
pub fn disable_redirects(mut self) -> Self {
self.disable_redirects = true;
self
}
} }
/// A basic HTTP server controller that simplifies the process of writing integration tests for /// A basic HTTP server controller that simplifies the process of writing integration tests for
@@ -668,9 +515,9 @@ impl TestServer {
let scheme = if self.tls { "https" } else { "http" }; let scheme = if self.tls { "https" } else { "http" };
if uri.starts_with('/') { if uri.starts_with('/') {
format!("{}://{}{}", scheme, self.addr, uri) format!("{}://localhost:{}{}", scheme, self.addr.port(), uri)
} else { } else {
format!("{}://{}/{}", scheme, self.addr, uri) format!("{}://localhost:{}/{}", scheme, self.addr.port(), uri)
} }
} }

View File

@@ -2,11 +2,6 @@
## Unreleased ## Unreleased
- Take the encoded buffer when yielding bytes in the response stream rather than splitting the buffer, reducing memory use
- Minimum supported Rust version (MSRV) is now 1.72.
## 4.3.0
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency. - Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
## 4.2.0 ## 4.2.0

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "actix-web-actors" name = "actix-web-actors"
version = "4.3.0" version = "4.2.0"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix actors support for Actix Web" description = "Actix actors support for Actix Web"
keywords = ["actix", "http", "web", "framework", "async"] keywords = ["actix", "http", "web", "framework", "async"]
@@ -9,15 +9,9 @@ repository = "https://github.com/actix/actix-web"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2021" edition = "2021"
[package.metadata.cargo_check_external_types] [lib]
allowed_external_types = [ name = "actix_web_actors"
"actix::*", path = "src/lib.rs"
"actix_http::*",
"actix_web::*",
"bytes::*",
"bytestring::*",
"futures_core::*",
]
[dependencies] [dependencies]
actix = { version = ">=0.12, <0.14", default-features = false } actix = { version = ">=0.12, <0.14", default-features = false }
@@ -38,6 +32,6 @@ actix-test = "0.1"
awc = { version = "3", default-features = false } awc = { version = "3", default-features = false }
actix-web = { version = "4", features = ["macros"] } actix-web = { version = "4", features = ["macros"] }
env_logger = "0.11" env_logger = "0.10"
futures-util = { version = "0.3.17", default-features = false, features = ["std"] } futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
mime = "0.3" mime = "0.3"

View File

@@ -1,16 +1,17 @@
# `actix-web-actors` # actix-web-actors
> Actix actors support for Actix Web. > Actix actors support for Actix Web.
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors) [![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.3.0)](https://docs.rs/actix-web-actors/4.3.0) [![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.2.0)](https://docs.rs/actix-web-actors/4.2.0)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-web-actors.svg) ![License](https://img.shields.io/crates/l/actix-web-actors.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-actors/4.3.0/status.svg)](https://deps.rs/crate/actix-web-actors/4.3.0) [![dependency status](https://deps.rs/crate/actix-web-actors/4.2.0/status.svg)](https://deps.rs/crate/actix-web-actors/4.2.0)
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors) [![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-web-actors)
- Minimum Supported Rust Version (MSRV): 1.68

View File

@@ -248,11 +248,13 @@ where
mod tests { mod tests {
use std::time::Duration; use std::time::Duration;
use actix::Actor;
use actix_web::{ use actix_web::{
http::StatusCode, http::StatusCode,
test::{call_service, init_service, read_body, TestRequest}, test::{call_service, init_service, read_body, TestRequest},
web, App, HttpResponse, web, App, HttpResponse,
}; };
use bytes::Bytes;
use super::*; use super::*;

View File

@@ -710,7 +710,7 @@ where
} }
if !this.buf.is_empty() { if !this.buf.is_empty() {
Poll::Ready(Some(Ok(std::mem::take(&mut this.buf).freeze()))) Poll::Ready(Some(Ok(this.buf.split().freeze())))
} else if this.fut.alive() && !this.closed { } else if this.fut.alive() && !this.closed {
Poll::Pending Poll::Pending
} else { } else {
@@ -817,7 +817,10 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use actix_web::test::TestRequest; use actix_web::{
http::{header, Method},
test::TestRequest,
};
use super::*; use super::*;

View File

@@ -2,13 +2,6 @@
## Unreleased ## Unreleased
## 4.3.0
- Add `#[scope]` macro.
- Add `compat-routing-macros-force-pub` crate feature which, on-by-default, which when disabled causes handlers to inherit their attached function's visibility.
- Prevent inclusion of default `actix-router` features.
- Minimum supported Rust version (MSRV) is now 1.72.
## 4.2.2 ## 4.2.2
- Fix regression when declaring `wrap` attribute using an expression. - Fix regression when declaring `wrap` attribute using an expression.

View File

@@ -1,26 +1,21 @@
[package] [package]
name = "actix-web-codegen" name = "actix-web-codegen"
version = "4.3.0" version = "4.2.2"
description = "Routing and runtime macros for Actix Web" description = "Routing and runtime macros for Actix Web"
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
] ]
homepage.workspace = true license = "MIT OR Apache-2.0"
repository.workspace = true edition = "2021"
license.workspace = true
edition.workspace = true
rust-version.workspace = true
[lib] [lib]
proc-macro = true proc-macro = true
[features]
default = ["compat-routing-macros-force-pub"]
compat-routing-macros-force-pub = []
[dependencies] [dependencies]
actix-router = { version = "0.5", default-features = false } actix-router = "0.5"
proc-macro2 = "1" proc-macro2 = "1"
quote = "1" quote = "1"
syn = { version = "2", features = ["full", "extra-traits"] } syn = { version = "2", features = ["full", "extra-traits"] }

View File

@@ -1,19 +1,20 @@
# `actix-web-codegen` # actix-web-codegen
> Routing and runtime macros for Actix Web. > Routing and runtime macros for Actix Web.
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen) [![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen)
[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=4.3.0)](https://docs.rs/actix-web-codegen/4.3.0) [![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=4.2.2)](https://docs.rs/actix-web-codegen/4.2.2)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg) ![Version](https://img.shields.io/badge/rustc-1.68+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-web-codegen.svg) ![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-codegen/4.3.0/status.svg)](https://deps.rs/crate/actix-web-codegen/4.3.0) [![dependency status](https://deps.rs/crate/actix-web-codegen/4.2.2/status.svg)](https://deps.rs/crate/actix-web-codegen/4.2.2)
[![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen) [![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end --> ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-web-codegen)
- Minimum Supported Rust Version (MSRV): 1.68
## Compile Testing ## Compile Testing

View File

@@ -83,7 +83,6 @@ use proc_macro::TokenStream;
use quote::quote; use quote::quote;
mod route; mod route;
mod scope;
/// Creates resource handler, allowing multiple HTTP method guards. /// Creates resource handler, allowing multiple HTTP method guards.
/// ///
@@ -198,43 +197,6 @@ method_macro!(Options, options);
method_macro!(Trace, trace); method_macro!(Trace, trace);
method_macro!(Patch, patch); method_macro!(Patch, patch);
/// Prepends a path prefix to all handlers using routing macros inside the attached module.
///
/// # Syntax
///
/// ```
/// # use actix_web_codegen::scope;
/// #[scope("/prefix")]
/// mod api {
/// // ...
/// }
/// ```
///
/// # Arguments
///
/// - `"/prefix"` - Raw literal string to be prefixed onto contained handlers' paths.
///
/// # Example
///
/// ```
/// # use actix_web_codegen::{scope, get};
/// # use actix_web::Responder;
/// #[scope("/api")]
/// mod api {
/// # use super::*;
/// #[get("/hello")]
/// pub async fn hello() -> impl Responder {
/// // this has path /api/hello
/// "Hello, world!"
/// }
/// }
/// # fn main() {}
/// ```
#[proc_macro_attribute]
pub fn scope(args: TokenStream, input: TokenStream) -> TokenStream {
scope::with_scope(args, input)
}
/// Marks async main function as the Actix Web system entry-point. /// Marks async main function as the Actix Web system entry-point.
/// ///
/// Note that Actix Web also works under `#[tokio::main]` since version 4.0. However, this macro is /// Note that Actix Web also works under `#[tokio::main]` since version 4.0. However, this macro is
@@ -278,15 +240,3 @@ pub fn test(_: TokenStream, item: TokenStream) -> TokenStream {
output.extend(item); output.extend(item);
output output
} }
/// Converts the error to a token stream and appends it to the original input.
///
/// Returning the original input in addition to the error is good for IDEs which can gracefully
/// recover and show more precise errors within the macro body.
///
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
let compile_err = TokenStream::from(err.to_compile_error());
item.extend(compile_err);
item
}

View File

@@ -6,12 +6,10 @@ use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{quote, ToTokens, TokenStreamExt}; use quote::{quote, ToTokens, TokenStreamExt};
use syn::{punctuated::Punctuated, Ident, LitStr, Path, Token}; use syn::{punctuated::Punctuated, Ident, LitStr, Path, Token};
use crate::input_and_compile_error;
#[derive(Debug)] #[derive(Debug)]
pub struct RouteArgs { pub struct RouteArgs {
pub(crate) path: syn::LitStr, path: syn::LitStr,
pub(crate) options: Punctuated<syn::MetaNameValue, Token![,]>, options: Punctuated<syn::MetaNameValue, Token![,]>,
} }
impl syn::parse::Parse for RouteArgs { impl syn::parse::Parse for RouteArgs {
@@ -80,7 +78,7 @@ macro_rules! standard_method_type {
} }
} }
pub(crate) fn from_path(method: &Path) -> Result<Self, ()> { fn from_path(method: &Path) -> Result<Self, ()> {
match () { match () {
$(_ if method.is_ident(stringify!($lower)) => Ok(Self::$variant),)+ $(_ if method.is_ident(stringify!($lower)) => Ok(Self::$variant),)+
_ => Err(()), _ => Err(()),
@@ -413,13 +411,6 @@ impl ToTokens for Route {
doc_attributes, doc_attributes,
} = self; } = self;
#[allow(unused_variables)] // used when force-pub feature is disabled
let vis = &ast.vis;
// TODO(breaking): remove this force-pub forwards-compatibility feature
#[cfg(feature = "compat-routing-macros-force-pub")]
let vis = syn::Visibility::Public(<Token![pub]>::default());
let registrations: TokenStream2 = args let registrations: TokenStream2 = args
.iter() .iter()
.map(|args| { .map(|args| {
@@ -467,7 +458,7 @@ impl ToTokens for Route {
let stream = quote! { let stream = quote! {
#(#doc_attributes)* #(#doc_attributes)*
#[allow(non_camel_case_types, missing_docs)] #[allow(non_camel_case_types, missing_docs)]
#vis struct #name; pub struct #name;
impl ::actix_web::dev::HttpServiceFactory for #name { impl ::actix_web::dev::HttpServiceFactory for #name {
fn register(self, __config: &mut actix_web::dev::AppService) { fn register(self, __config: &mut actix_web::dev::AppService) {
@@ -551,3 +542,15 @@ pub(crate) fn with_methods(input: TokenStream) -> TokenStream {
Err(err) => input_and_compile_error(input, err), Err(err) => input_and_compile_error(input, err),
} }
} }
/// Converts the error to a token stream and appends it to the original input.
///
/// Returning the original input in addition to the error is good for IDEs which can gracefully
/// recover and show more precise errors within the macro body.
///
/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.
fn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {
let compile_err = TokenStream::from(err.to_compile_error());
item.extend(compile_err);
item
}

View File

@@ -1,103 +0,0 @@
use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{quote, ToTokens as _};
use crate::{
input_and_compile_error,
route::{MethodType, RouteArgs},
};
pub fn with_scope(args: TokenStream, input: TokenStream) -> TokenStream {
match with_scope_inner(args, input.clone()) {
Ok(stream) => stream,
Err(err) => input_and_compile_error(input, err),
}
}
fn with_scope_inner(args: TokenStream, input: TokenStream) -> syn::Result<TokenStream> {
if args.is_empty() {
return Err(syn::Error::new(
Span::call_site(),
"missing arguments for scope macro, expected: #[scope(\"/prefix\")]",
));
}
let scope_prefix = syn::parse::<syn::LitStr>(args.clone()).map_err(|err| {
syn::Error::new(
err.span(),
"argument to scope macro is not a string literal, expected: #[scope(\"/prefix\")]",
)
})?;
let scope_prefix_value = scope_prefix.value();
if scope_prefix_value.ends_with('/') {
// trailing slashes cause non-obvious problems
// it's better to point them out to developers rather than
return Err(syn::Error::new(
scope_prefix.span(),
"scopes should not have trailing slashes; see https://docs.rs/actix-web/4/actix_web/struct.Scope.html#avoid-trailing-slashes",
));
}
let mut module = syn::parse::<syn::ItemMod>(input).map_err(|err| {
syn::Error::new(err.span(), "#[scope] macro must be attached to a module")
})?;
// modify any routing macros (method or route[s]) attached to
// functions by prefixing them with this scope macro's argument
if let Some((_, items)) = &mut module.content {
for item in items {
if let syn::Item::Fn(fun) = item {
fun.attrs = fun
.attrs
.iter()
.map(|attr| modify_attribute_with_scope(attr, &scope_prefix_value))
.collect();
}
}
}
Ok(module.to_token_stream().into())
}
/// Checks if the attribute is a method type and has a route path, then modifies it.
fn modify_attribute_with_scope(attr: &syn::Attribute, scope_path: &str) -> syn::Attribute {
match (attr.parse_args::<RouteArgs>(), attr.clone().meta) {
(Ok(route_args), syn::Meta::List(meta_list)) if has_allowed_methods_in_scope(attr) => {
let modified_path = format!("{}{}", scope_path, route_args.path.value());
let options_tokens: Vec<TokenStream2> = route_args
.options
.iter()
.map(|option| {
quote! { ,#option }
})
.collect();
let combined_options_tokens: TokenStream2 =
options_tokens
.into_iter()
.fold(TokenStream2::new(), |mut acc, ts| {
acc.extend(std::iter::once(ts));
acc
});
syn::Attribute {
meta: syn::Meta::List(syn::MetaList {
tokens: quote! { #modified_path #combined_options_tokens },
..meta_list.clone()
}),
..attr.clone()
}
}
_ => attr.clone(),
}
}
fn has_allowed_methods_in_scope(attr: &syn::Attribute) -> bool {
MethodType::from_path(attr.path()).is_ok()
|| attr.path().is_ident("route")
|| attr.path().is_ident("ROUTE")
}

View File

@@ -1,200 +0,0 @@
use actix_web::{guard::GuardContext, http, http::header, web, App, HttpResponse, Responder};
use actix_web_codegen::{delete, get, post, route, routes, scope};
pub fn image_guard(ctx: &GuardContext) -> bool {
ctx.header::<header::Accept>()
.map(|h| h.preference() == "image/*")
.unwrap_or(false)
}
#[scope("/test")]
mod scope_module {
// ensure that imports can be brought into the scope
use super::*;
#[get("/test/guard", guard = "image_guard")]
pub async fn guard() -> impl Responder {
HttpResponse::Ok()
}
#[get("/test")]
pub async fn test() -> impl Responder {
HttpResponse::Ok().finish()
}
#[get("/twice-test/{value}")]
pub async fn twice(value: web::Path<String>) -> impl actix_web::Responder {
let int_value: i32 = value.parse().unwrap_or(0);
let doubled = int_value * 2;
HttpResponse::Ok().body(format!("Twice value: {}", doubled))
}
#[post("/test")]
pub async fn post() -> impl Responder {
HttpResponse::Ok().body("post works")
}
#[delete("/test")]
pub async fn delete() -> impl Responder {
"delete works"
}
#[route("/test", method = "PUT", method = "PATCH", method = "CUSTOM")]
pub async fn multiple_shared_path() -> impl Responder {
HttpResponse::Ok().finish()
}
#[routes]
#[head("/test1")]
#[connect("/test2")]
#[options("/test3")]
#[trace("/test4")]
pub async fn multiple_separate_paths() -> impl Responder {
HttpResponse::Ok().finish()
}
// test calling this from other mod scope with scope attribute...
pub fn mod_common(message: String) -> impl actix_web::Responder {
HttpResponse::Ok().body(message)
}
}
/// Scope doc string to check in cargo expand.
#[scope("/v1")]
mod mod_scope_v1 {
use super::*;
/// Route doc string to check in cargo expand.
#[get("/test")]
pub async fn test() -> impl Responder {
scope_module::mod_common("version1 works".to_string())
}
}
#[scope("/v2")]
mod mod_scope_v2 {
use super::*;
// check to make sure non-function tokens in the scope block are preserved...
enum TestEnum {
Works,
}
#[get("/test")]
pub async fn test() -> impl Responder {
// make sure this type still exists...
let test_enum = TestEnum::Works;
match test_enum {
TestEnum::Works => scope_module::mod_common("version2 works".to_string()),
}
}
}
#[actix_rt::test]
async fn scope_get_async() {
let srv = actix_test::start(|| App::new().service(scope_module::test));
let request = srv.request(http::Method::GET, srv.url("/test/test"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn scope_get_param_async() {
let srv = actix_test::start(|| App::new().service(scope_module::twice));
let request = srv.request(http::Method::GET, srv.url("/test/twice-test/4"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "Twice value: 8");
}
#[actix_rt::test]
async fn scope_post_async() {
let srv = actix_test::start(|| App::new().service(scope_module::post));
let request = srv.request(http::Method::POST, srv.url("/test/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "post works");
}
#[actix_rt::test]
async fn multiple_shared_path_async() {
let srv = actix_test::start(|| App::new().service(scope_module::multiple_shared_path));
let request = srv.request(http::Method::PUT, srv.url("/test/test"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::PATCH, srv.url("/test/test"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn multiple_multi_path_async() {
let srv = actix_test::start(|| App::new().service(scope_module::multiple_separate_paths));
let request = srv.request(http::Method::HEAD, srv.url("/test/test1"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::CONNECT, srv.url("/test/test2"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::OPTIONS, srv.url("/test/test3"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
let request = srv.request(http::Method::TRACE, srv.url("/test/test4"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn scope_delete_async() {
let srv = actix_test::start(|| App::new().service(scope_module::delete));
let request = srv.request(http::Method::DELETE, srv.url("/test/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "delete works");
}
#[actix_rt::test]
async fn scope_get_with_guard_async() {
let srv = actix_test::start(|| App::new().service(scope_module::guard));
let request = srv
.request(http::Method::GET, srv.url("/test/test/guard"))
.insert_header(("Accept", "image/*"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn scope_v1_v2_async() {
let srv = actix_test::start(|| {
App::new()
.service(mod_scope_v1::test)
.service(mod_scope_v2::test)
});
let request = srv.request(http::Method::GET, srv.url("/v1/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "version1 works");
let request = srv.request(http::Method::GET, srv.url("/v2/test"));
let mut response = request.send().await.unwrap();
let body = response.body().await.unwrap();
let body_str = String::from_utf8(body.to_vec()).unwrap();
assert_eq!(body_str, "version2 works");
}

View File

@@ -1,4 +1,4 @@
#[rustversion::stable(1.72)] // MSRV #[rustversion::stable(1.68)] // MSRV
#[test] #[test]
fn compile_macros() { fn compile_macros() {
let t = trybuild::TestCases::new(); let t = trybuild::TestCases::new();
@@ -18,11 +18,6 @@ fn compile_macros() {
t.compile_fail("tests/trybuild/routes-missing-method-fail.rs"); t.compile_fail("tests/trybuild/routes-missing-method-fail.rs");
t.compile_fail("tests/trybuild/routes-missing-args-fail.rs"); t.compile_fail("tests/trybuild/routes-missing-args-fail.rs");
t.compile_fail("tests/trybuild/scope-on-handler.rs");
t.compile_fail("tests/trybuild/scope-missing-args.rs");
t.compile_fail("tests/trybuild/scope-invalid-args.rs");
t.compile_fail("tests/trybuild/scope-trailing-slash.rs");
t.pass("tests/trybuild/docstring-ok.rs"); t.pass("tests/trybuild/docstring-ok.rs");
t.pass("tests/trybuild/test-runtime.rs"); t.pass("tests/trybuild/test-runtime.rs");

View File

@@ -13,20 +13,17 @@ error[E0277]: the trait bound `fn() -> impl std::future::Future<Output = String>
| required by a bound introduced by this call | required by a bound introduced by this call
| |
= help: the following other types implement trait `HttpServiceFactory`: = help: the following other types implement trait `HttpServiceFactory`:
Resource<T>
actix_web::Scope<T>
Vec<T>
Redirect
(A,)
(A, B) (A, B)
(A, B, C) (A, B, C)
(A, B, C, D) (A, B, C, D)
(A, B, C, D, E)
(A, B, C, D, E, F)
(A, B, C, D, E, F, G)
(A, B, C, D, E, F, G, H)
(A, B, C, D, E, F, G, H, I)
and $N others and $N others
note: required by a bound in `App::<T>::service` note: required by a bound in `App::<T>::service`
--> $WORKSPACE/actix-web/src/app.rs --> $WORKSPACE/actix-web/src/app.rs
| |
| pub fn service<F>(mut self, factory: F) -> Self
| ------- required by a bound in this associated function
| where
| F: HttpServiceFactory + 'static, | F: HttpServiceFactory + 'static,
| ^^^^^^^^^^^^^^^^^^ required by this bound in `App::<T>::service` | ^^^^^^^^^^^^^^^^^^ required by this bound in `App::<T>::service`

View File

@@ -13,20 +13,17 @@ error[E0277]: the trait bound `fn() -> impl std::future::Future<Output = String>
| required by a bound introduced by this call | required by a bound introduced by this call
| |
= help: the following other types implement trait `HttpServiceFactory`: = help: the following other types implement trait `HttpServiceFactory`:
Resource<T>
actix_web::Scope<T>
Vec<T>
Redirect
(A,)
(A, B) (A, B)
(A, B, C) (A, B, C)
(A, B, C, D) (A, B, C, D)
(A, B, C, D, E)
(A, B, C, D, E, F)
(A, B, C, D, E, F, G)
(A, B, C, D, E, F, G, H)
(A, B, C, D, E, F, G, H, I)
and $N others and $N others
note: required by a bound in `App::<T>::service` note: required by a bound in `App::<T>::service`
--> $WORKSPACE/actix-web/src/app.rs --> $WORKSPACE/actix-web/src/app.rs
| |
| pub fn service<F>(mut self, factory: F) -> Self
| ------- required by a bound in this associated function
| where
| F: HttpServiceFactory + 'static, | F: HttpServiceFactory + 'static,
| ^^^^^^^^^^^^^^^^^^ required by this bound in `App::<T>::service` | ^^^^^^^^^^^^^^^^^^ required by this bound in `App::<T>::service`

View File

@@ -20,7 +20,10 @@ error: custom attribute panicked
13 | #[get("/{}")] 13 | #[get("/{}")]
| ^^^^^^^^^^^^^ | ^^^^^^^^^^^^^
| |
= help: message: Wrong path pattern: "/{}" empty capture group names are not allowed = help: message: Wrong path pattern: "/{}" regex parse error:
((?s-m)^/(?P<>[^/]+))$
^
error: empty capture group name
error: custom attribute panicked error: custom attribute panicked
--> $DIR/route-malformed-path-fail.rs:23:1 --> $DIR/route-malformed-path-fail.rs:23:1

Some files were not shown because too many files have changed in this diff Show More