mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-03 09:36:36 +02:00
Compare commits
41 Commits
error-resp
...
multipart-
Author | SHA1 | Date | |
---|---|---|---|
01d60f3315 | |||
6ae131ce29 | |||
5c9e6e7c1d | |||
611154beb2 | |||
210c9a5eb3 | |||
00c185f617 | |||
7326707599 | |||
befb9c8196 | |||
2136e07bdd | |||
e189e4a3bf | |||
71cd3a31f9 | |||
668b8e5745 | |||
763c58445a | |||
0b193c7106 | |||
4db4251b8f | |||
9f45be03e1 | |||
4222f92bd3 | |||
d92a73eacd | |||
c612b5ce94 | |||
cbb55ba27d | |||
643d64581a | |||
66905efd7b | |||
c076e34b5d | |||
3ecaff5f5b | |||
fa74ab3dfb | |||
188206a903 | |||
0ce488e57a | |||
132b84d3b1 | |||
cc5030c542 | |||
cd301a6932 | |||
4c4c279938 | |||
0fd85bae2a | |||
9b3de1f1fe | |||
9553e7afff | |||
d9579cf58a | |||
7a2313cc4b | |||
2ee92d778e | |||
59e42c1446 | |||
53086a90a6 | |||
7f529e35b2 | |||
4908fd7dea |
@ -1,10 +0,0 @@
|
||||
[alias]
|
||||
lint = "clippy --workspace --all-targets -- -Dclippy::todo"
|
||||
lint-all = "clippy --workspace --all-features --all-targets -- -Dclippy::todo"
|
||||
|
||||
# lib checking
|
||||
ci-check-min = "hack --workspace check --no-default-features"
|
||||
ci-check-default = "hack --workspace check"
|
||||
ci-check-default-tests = "check --workspace --tests"
|
||||
ci-check-all-feature-powerset="hack --workspace --feature-powerset --depth=4 --skip=__compress,experimental-io-uring check"
|
||||
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --depth=4 --skip=__compress check"
|
26
.github/workflows/ci-post-merge.yml
vendored
26
.github/workflows/ci-post-merge.yml
vendored
@ -44,20 +44,20 @@ jobs:
|
||||
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Rust (${{ matrix.version.name }})
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: ${{ matrix.version.version }}
|
||||
|
||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||
uses: taiki-e/install-action@v2.34.0
|
||||
uses: taiki-e/install-action@v2.41.7
|
||||
with:
|
||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||
|
||||
- name: check minimal
|
||||
run: cargo ci-check-min
|
||||
run: just check-min
|
||||
|
||||
- name: check default
|
||||
run: cargo ci-check-default
|
||||
run: just check-default
|
||||
|
||||
- name: tests
|
||||
timeout-minutes: 60
|
||||
@ -76,16 +76,16 @@ jobs:
|
||||
- name: Free Disk Space
|
||||
run: ./scripts/free-disk-space.sh
|
||||
|
||||
- name: Setup mold linker
|
||||
uses: rui314/setup-mold@v1
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: taiki-e/install-action@v2.34.0
|
||||
- name: Install just, cargo-hack
|
||||
uses: taiki-e/install-action@v2.41.7
|
||||
with:
|
||||
tool: cargo-hack
|
||||
tool: just,cargo-hack
|
||||
|
||||
- name: check feature combinations
|
||||
run: cargo ci-check-all-feature-powerset
|
||||
|
||||
- name: check feature combinations
|
||||
run: cargo ci-check-all-feature-powerset-linux
|
||||
- name: Check feature combinations
|
||||
run: just check-feature-combinations
|
||||
|
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
@ -59,12 +59,12 @@ jobs:
|
||||
uses: rui314/setup-mold@v1
|
||||
|
||||
- name: Install Rust (${{ matrix.version.name }})
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: ${{ matrix.version.version }}
|
||||
|
||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||
uses: taiki-e/install-action@v2.34.0
|
||||
uses: taiki-e/install-action@v2.41.7
|
||||
with:
|
||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||
|
||||
@ -73,10 +73,10 @@ jobs:
|
||||
run: just downgrade-for-msrv
|
||||
|
||||
- name: check minimal
|
||||
run: cargo ci-check-min
|
||||
run: just check-min
|
||||
|
||||
- name: check default
|
||||
run: cargo ci-check-default
|
||||
run: just check-default
|
||||
|
||||
- name: tests
|
||||
timeout-minutes: 60
|
||||
@ -92,7 +92,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
|
||||
@ -108,12 +108,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
|
||||
- name: Install just
|
||||
uses: taiki-e/install-action@v2.34.0
|
||||
uses: taiki-e/install-action@v2.41.7
|
||||
with:
|
||||
tool: just
|
||||
|
||||
|
17
.github/workflows/coverage.yml
vendored
17
.github/workflows/coverage.yml
vendored
@ -17,21 +17,22 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
components: llvm-tools-preview
|
||||
toolchain: nightly
|
||||
components: llvm-tools
|
||||
|
||||
- name: Install just,cargo-llvm-cov
|
||||
uses: taiki-e/install-action@v2.34.0
|
||||
- name: Install just, cargo-llvm-cov, cargo-nextest
|
||||
uses: taiki-e/install-action@v2.41.7
|
||||
with:
|
||||
tool: just,cargo-llvm-cov
|
||||
tool: just,cargo-llvm-cov,cargo-nextest
|
||||
|
||||
- name: Generate code coverage
|
||||
run: cargo llvm-cov --workspace --all-features --codecov --output-path codecov.json
|
||||
run: just test-coverage-codecov
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4.4.1
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
with:
|
||||
files: codecov.json
|
||||
fail_ci_if_error: true
|
||||
|
35
.github/workflows/lint.yml
vendored
35
.github/workflows/lint.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: rustfmt
|
||||
@ -36,7 +36,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
components: clippy
|
||||
|
||||
@ -55,7 +55,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: rust-docs
|
||||
@ -65,6 +65,29 @@ jobs:
|
||||
RUSTDOCFLAGS: -D warnings
|
||||
run: cargo +nightly doc --no-deps --workspace --all-features
|
||||
|
||||
check-external-types:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly-2024-05-01)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: nightly-2024-05-01
|
||||
|
||||
- name: Install just
|
||||
uses: taiki-e/install-action@v2.41.7
|
||||
with:
|
||||
tool: just
|
||||
|
||||
- name: Install cargo-check-external-types
|
||||
uses: taiki-e/cache-cargo-install-action@v2.0.1
|
||||
with:
|
||||
tool: cargo-check-external-types
|
||||
|
||||
- name: check external types
|
||||
run: just check-external-types-all +nightly-2024-05-01
|
||||
|
||||
public-api-diff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@ -76,13 +99,13 @@ jobs:
|
||||
- name: Checkout PR branch
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
- name: Install Rust (nightly-2024-06-07)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: nightly-2024-06-07
|
||||
|
||||
- name: Install cargo-public-api
|
||||
uses: taiki-e/install-action@v2.34.0
|
||||
uses: taiki-e/install-action@v2.41.7
|
||||
with:
|
||||
tool: cargo-public-api
|
||||
|
||||
|
@ -13,9 +13,14 @@ categories = ["asynchronous", "web-programming::http-server"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "actix_files"
|
||||
path = "src/lib.rs"
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix_http::*",
|
||||
"actix_service::*",
|
||||
"actix_web::*",
|
||||
"http::*",
|
||||
"mime::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
||||
|
@ -307,11 +307,11 @@ mod tests {
|
||||
let resp = file.respond_to(&req);
|
||||
assert_eq!(
|
||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||
"application/javascript; charset=utf-8"
|
||||
"text/javascript",
|
||||
);
|
||||
assert_eq!(
|
||||
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
|
||||
"inline; filename=\"test.js\""
|
||||
"inline; filename=\"test.js\"",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -18,9 +18,17 @@ edition = "2021"
|
||||
[package.metadata.docs.rs]
|
||||
features = []
|
||||
|
||||
[lib]
|
||||
name = "actix_http_test"
|
||||
path = "src/lib.rs"
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix_codec::*",
|
||||
"actix_http::*",
|
||||
"actix_server::*",
|
||||
"awc::*",
|
||||
"bytes::*",
|
||||
"futures_core::*",
|
||||
"http::*",
|
||||
"tokio::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
@ -1,7 +1,5 @@
|
||||
# `actix-http-test`
|
||||
|
||||
> Various helpers for Actix applications to use during testing.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
@ -14,3 +12,9 @@
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- cargo-rdme start -->
|
||||
|
||||
Various helpers for Actix applications to use during testing.
|
||||
|
||||
<!-- cargo-rdme end -->
|
||||
|
@ -2,6 +2,8 @@
|
||||
|
||||
## Unreleased
|
||||
|
||||
## 3.8.0
|
||||
|
||||
### Added
|
||||
|
||||
- Add `error::InvalidStatusCode` re-export.
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.7.0"
|
||||
version = "3.8.0"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
@ -34,51 +34,72 @@ features = [
|
||||
"compress-zstd",
|
||||
]
|
||||
|
||||
[lib]
|
||||
name = "actix_http"
|
||||
path = "src/lib.rs"
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix_codec::*",
|
||||
"actix_service::*",
|
||||
"actix_tls::*",
|
||||
"actix_utils::*",
|
||||
"bytes::*",
|
||||
"bytestring::*",
|
||||
"encoding_rs::*",
|
||||
"futures_core::*",
|
||||
"h2::*",
|
||||
"http::*",
|
||||
"httparse::*",
|
||||
"language_tags::*",
|
||||
"mime::*",
|
||||
"openssl::*",
|
||||
"rustls::*",
|
||||
"tokio_util::*",
|
||||
"tokio::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
# HTTP/2 protocol support
|
||||
http2 = ["h2"]
|
||||
http2 = ["dep:h2"]
|
||||
|
||||
# WebSocket protocol implementation
|
||||
ws = [
|
||||
"local-channel",
|
||||
"base64",
|
||||
"rand",
|
||||
"sha1",
|
||||
"dep:local-channel",
|
||||
"dep:base64",
|
||||
"dep:rand",
|
||||
"dep:sha1",
|
||||
]
|
||||
|
||||
# TLS via OpenSSL
|
||||
openssl = ["actix-tls/accept", "actix-tls/openssl"]
|
||||
openssl = ["__tls", "actix-tls/accept", "actix-tls/openssl"]
|
||||
|
||||
# TLS via Rustls v0.20
|
||||
rustls = ["rustls-0_20"]
|
||||
rustls = ["__tls", "rustls-0_20"]
|
||||
|
||||
# TLS via Rustls v0.20
|
||||
rustls-0_20 = ["actix-tls/accept", "actix-tls/rustls-0_20"]
|
||||
rustls-0_20 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_20"]
|
||||
|
||||
# TLS via Rustls v0.21
|
||||
rustls-0_21 = ["actix-tls/accept", "actix-tls/rustls-0_21"]
|
||||
rustls-0_21 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_21"]
|
||||
|
||||
# TLS via Rustls v0.22
|
||||
rustls-0_22 = ["actix-tls/accept", "actix-tls/rustls-0_22"]
|
||||
rustls-0_22 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_22"]
|
||||
|
||||
# TLS via Rustls v0.23
|
||||
rustls-0_23 = ["actix-tls/accept", "actix-tls/rustls-0_23"]
|
||||
rustls-0_23 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_23"]
|
||||
|
||||
# Compression codecs
|
||||
compress-brotli = ["__compress", "brotli"]
|
||||
compress-gzip = ["__compress", "flate2"]
|
||||
compress-zstd = ["__compress", "zstd"]
|
||||
compress-brotli = ["__compress", "dep:brotli"]
|
||||
compress-gzip = ["__compress", "dep:flate2"]
|
||||
compress-zstd = ["__compress", "dep:zstd"]
|
||||
|
||||
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
||||
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
# Internal (PRIVATE!) features used to aid checking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__tls = []
|
||||
|
||||
[dependencies]
|
||||
actix-service = "2"
|
||||
actix-codec = "0.5"
|
||||
|
@ -5,11 +5,11 @@
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.7.0)
|
||||
[](https://docs.rs/actix-http/3.8.0)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.7.0)
|
||||
[](https://deps.rs/crate/actix-http/3.8.0)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
@ -6,10 +6,10 @@
|
||||
//! | ------------------- | ------------------------------------------- |
|
||||
//! | `http2` | HTTP/2 support via [h2]. |
|
||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||
//! | `rustls` | TLS support via [rustls] 0.20. |
|
||||
//! | `rustls-0_21` | TLS support via [rustls] 0.21. |
|
||||
//! | `rustls-0_22` | TLS support via [rustls] 0.22. |
|
||||
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
|
||||
//! | `rustls-0_20` | TLS support via rustls 0.20. |
|
||||
//! | `rustls-0_21` | TLS support via rustls 0.21. |
|
||||
//! | `rustls-0_22` | TLS support via rustls 0.22. |
|
||||
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
|
||||
//! | `compress-brotli` | Payload compression support: Brotli. |
|
||||
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
||||
//! | `compress-zstd` | Payload compression support: Zstd. |
|
||||
@ -61,13 +61,7 @@ pub mod ws;
|
||||
|
||||
#[allow(deprecated)]
|
||||
pub use self::payload::PayloadStream;
|
||||
#[cfg(any(
|
||||
feature = "openssl",
|
||||
feature = "rustls-0_20",
|
||||
feature = "rustls-0_21",
|
||||
feature = "rustls-0_22",
|
||||
feature = "rustls-0_23",
|
||||
))]
|
||||
#[cfg(feature = "__tls")]
|
||||
pub use self::service::TlsAcceptorConfig;
|
||||
pub use self::{
|
||||
builder::HttpServiceBuilder,
|
||||
|
@ -351,12 +351,9 @@ mod tests {
|
||||
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain");
|
||||
|
||||
let resp = Response::build(StatusCode::OK)
|
||||
.content_type(mime::APPLICATION_JAVASCRIPT_UTF_8)
|
||||
.content_type(mime::TEXT_JAVASCRIPT)
|
||||
.body(Bytes::new());
|
||||
assert_eq!(
|
||||
resp.headers().get(CONTENT_TYPE).unwrap(),
|
||||
"application/javascript; charset=utf-8"
|
||||
);
|
||||
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/javascript");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -241,25 +241,13 @@ where
|
||||
}
|
||||
|
||||
/// Configuration options used when accepting TLS connection.
|
||||
#[cfg(any(
|
||||
feature = "openssl",
|
||||
feature = "rustls-0_20",
|
||||
feature = "rustls-0_21",
|
||||
feature = "rustls-0_22",
|
||||
feature = "rustls-0_23",
|
||||
))]
|
||||
#[cfg(feature = "__tls")]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TlsAcceptorConfig {
|
||||
pub(crate) handshake_timeout: Option<std::time::Duration>,
|
||||
}
|
||||
|
||||
#[cfg(any(
|
||||
feature = "openssl",
|
||||
feature = "rustls-0_20",
|
||||
feature = "rustls-0_21",
|
||||
feature = "rustls-0_22",
|
||||
feature = "rustls-0_23",
|
||||
))]
|
||||
#[cfg(feature = "__tls")]
|
||||
impl TlsAcceptorConfig {
|
||||
/// Set TLS handshake timeout duration.
|
||||
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
|
||||
|
@ -25,7 +25,7 @@ quote = "1"
|
||||
syn = "2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-multipart = "0.6"
|
||||
actix-multipart = "0.7"
|
||||
actix-web = "4"
|
||||
rustversion = "1"
|
||||
trybuild = "1"
|
||||
|
@ -138,7 +138,7 @@ struct ParsedField<'t> {
|
||||
/// `#[multipart(duplicate_field = "<behavior>")]` attribute:
|
||||
///
|
||||
/// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted.
|
||||
/// - "deny": A `MultipartError::UnsupportedField` error response is returned.
|
||||
/// - "deny": A `MultipartError::UnknownField` error response is returned.
|
||||
/// - "replace": Each field is processed, but only the last one is persisted.
|
||||
///
|
||||
/// Note that `Vec` fields will ignore this option.
|
||||
@ -229,7 +229,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
|
||||
// Return value when a field name is not supported by the form
|
||||
let unknown_field_result = if attrs.deny_unknown_fields {
|
||||
quote!(::std::result::Result::Err(
|
||||
::actix_multipart::MultipartError::UnsupportedField(field.name().to_string())
|
||||
::actix_multipart::MultipartError::UnknownField(field.name().unwrap().to_string())
|
||||
))
|
||||
} else {
|
||||
quote!(::std::result::Result::Ok(()))
|
||||
@ -292,7 +292,7 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
|
||||
limits: &'t mut ::actix_multipart::form::Limits,
|
||||
state: &'t mut ::actix_multipart::form::State,
|
||||
) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> {
|
||||
match field.name() {
|
||||
match field.name().unwrap() {
|
||||
#handle_field_impl
|
||||
_ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)),
|
||||
}
|
||||
|
@ -2,6 +2,18 @@
|
||||
|
||||
## Unreleased
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- Add `MultipartError::ContentTypeIncompatible` variant.
|
||||
- Add `MultipartError::ContentDispositionNameMissing` variant.
|
||||
- Add `Field::bytes()` method.
|
||||
- Rename `MultipartError::{NoContentDisposition => ContentDispositionMissing}` variant.
|
||||
- Rename `MultipartError::{NoContentType => ContentTypeMissing}` variant.
|
||||
- Rename `MultipartError::{ParseContentType => ContentTypeParse}` variant.
|
||||
- Rename `MultipartError::{Boundary => BoundaryMissing}` variant.
|
||||
- Rename `MultipartError::{UnsupportedField => UnknownField}` variant.
|
||||
- Remove top-level re-exports of `test` utilities.
|
||||
|
||||
## 0.6.2
|
||||
|
||||
- Add testing utilities under new module `test`.
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-multipart"
|
||||
version = "0.6.2"
|
||||
version = "0.7.0"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Jacob Halsey <jacob@jhalsey.com>",
|
||||
@ -16,6 +16,21 @@ edition = "2021"
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix_http::*",
|
||||
"actix_multipart_derive::*",
|
||||
"actix_utils::*",
|
||||
"actix_web::*",
|
||||
"bytes::*",
|
||||
"futures_core::*",
|
||||
"mime::*",
|
||||
"serde_json::*",
|
||||
"serde_plain::*",
|
||||
"serde::*",
|
||||
"tempfile::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = ["tempfile", "derive"]
|
||||
derive = ["actix-multipart-derive"]
|
||||
@ -26,7 +41,6 @@ actix-multipart-derive = { version = "=0.6.1", optional = true }
|
||||
actix-utils = "3"
|
||||
actix-web = { version = "4", default-features = false }
|
||||
|
||||
bytes = "1"
|
||||
derive_more = "0.99.5"
|
||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
@ -48,8 +62,16 @@ actix-multipart-rfc7578 = "0.10"
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1"
|
||||
actix-web = "4"
|
||||
assert_matches = "1"
|
||||
awc = "3"
|
||||
env_logger = "0.11"
|
||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
futures-test = "0.3"
|
||||
multer = "3"
|
||||
tokio = { version = "1.24.2", features = ["sync"] }
|
||||
tokio-stream = "0.1"
|
||||
|
||||
[lints.rust]
|
||||
future_incompatible = { level = "deny" }
|
||||
rust_2018_idioms = { level = "deny" }
|
||||
nonstandard_style = { level = "deny" }
|
||||
|
@ -1,32 +1,23 @@
|
||||
# `actix-multipart`
|
||||
|
||||
> Multipart form support for Actix Web.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://docs.rs/actix-multipart/0.6.2)
|
||||
[](https://docs.rs/actix-multipart/0.7.0)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-multipart/0.6.2)
|
||||
[](https://deps.rs/crate/actix-multipart/0.7.0)
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
## Example
|
||||
<!-- cargo-rdme start -->
|
||||
|
||||
Dependencies:
|
||||
Multipart form support for Actix Web.
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
actix-multipart = "0.6"
|
||||
actix-web = "4.5"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
```
|
||||
|
||||
Code:
|
||||
## Examples
|
||||
|
||||
```rust
|
||||
use actix_web::{post, App, HttpServer, Responder};
|
||||
@ -63,15 +54,15 @@ async fn main() -> std::io::Result<()> {
|
||||
}
|
||||
```
|
||||
|
||||
Curl request :
|
||||
cURL request:
|
||||
|
||||
```bash
|
||||
```sh
|
||||
curl -v --request POST \
|
||||
--url http://localhost:8080/videos \
|
||||
-F 'json={"name": "Cargo.lock"};type=application/json' \
|
||||
-F file=@./Cargo.lock
|
||||
```
|
||||
|
||||
### Examples
|
||||
<!-- cargo-rdme end -->
|
||||
|
||||
https://github.com/actix/examples/tree/master/forms/multipart
|
||||
[More available in the examples repo →](https://github.com/actix/examples/tree/master/forms/multipart)
|
||||
|
36
actix-multipart/examples/form.rs
Normal file
36
actix-multipart/examples/form.rs
Normal file
@ -0,0 +1,36 @@
|
||||
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
|
||||
use actix_web::{middleware::Logger, post, App, HttpServer, Responder};
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Metadata {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, MultipartForm)]
|
||||
struct UploadForm {
|
||||
#[multipart(limit = "100MB")]
|
||||
file: TempFile,
|
||||
json: MpJson<Metadata>,
|
||||
}
|
||||
|
||||
#[post("/videos")]
|
||||
async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
|
||||
format!(
|
||||
"Uploaded file {}, with size: {}\ntemporary file ({}) was deleted\n",
|
||||
form.json.name,
|
||||
form.file.size,
|
||||
form.file.file.path().display(),
|
||||
)
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||
|
||||
HttpServer::new(move || App::new().service(post_video).wrap(Logger::default()))
|
||||
.workers(2)
|
||||
.bind(("127.0.0.1", 8080))?
|
||||
.run()
|
||||
.await
|
||||
}
|
@ -10,78 +10,96 @@ use derive_more::{Display, Error, From};
|
||||
/// A set of errors that can occur during parsing multipart streams.
|
||||
#[derive(Debug, Display, From, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum MultipartError {
|
||||
/// Content-Disposition header is not found or is not equal to "form-data".
|
||||
pub enum Error {
|
||||
/// Could not find Content-Type header.
|
||||
#[display(fmt = "Could not find Content-Type header")]
|
||||
ContentTypeMissing,
|
||||
|
||||
/// Could not parse Content-Type header.
|
||||
#[display(fmt = "Could not parse Content-Type header")]
|
||||
ContentTypeParse,
|
||||
|
||||
/// Parsed Content-Type did not have "multipart" top-level media type.
|
||||
///
|
||||
/// According to [RFC 7578 §4.2](https://datatracker.ietf.org/doc/html/rfc7578#section-4.2) a
|
||||
/// Content-Disposition header must always be present and equal to "form-data".
|
||||
#[display(fmt = "No Content-Disposition `form-data` header")]
|
||||
NoContentDisposition,
|
||||
/// Also raised when extracting a [`MultipartForm`] from a request that does not have the
|
||||
/// "multipart/form-data" media type.
|
||||
///
|
||||
/// [`MultipartForm`]: struct@crate::form::MultipartForm
|
||||
#[display(fmt = "Parsed Content-Type did not have "multipart" top-level media type")]
|
||||
ContentTypeIncompatible,
|
||||
|
||||
/// Content-Type header is not found
|
||||
#[display(fmt = "No Content-Type header found")]
|
||||
NoContentType,
|
||||
|
||||
/// Can not parse Content-Type header
|
||||
#[display(fmt = "Can not parse Content-Type header")]
|
||||
ParseContentType,
|
||||
|
||||
/// Multipart boundary is not found
|
||||
/// Multipart boundary is not found.
|
||||
#[display(fmt = "Multipart boundary is not found")]
|
||||
Boundary,
|
||||
BoundaryMissing,
|
||||
|
||||
/// Nested multipart is not supported
|
||||
/// Content-Disposition header was not found or not of disposition type "form-data" when parsing
|
||||
/// a "form-data" field.
|
||||
///
|
||||
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
|
||||
/// always be present and have a disposition type of "form-data".
|
||||
///
|
||||
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||
ContentDispositionMissing,
|
||||
|
||||
/// Content-Disposition name parameter was not found when parsing a "form-data" field.
|
||||
///
|
||||
/// As per [RFC 7578 §4.2], a "multipart/form-data" field's Content-Disposition header must
|
||||
/// always include a "name" parameter.
|
||||
///
|
||||
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||
ContentDispositionNameMissing,
|
||||
|
||||
/// Nested multipart is not supported.
|
||||
#[display(fmt = "Nested multipart is not supported")]
|
||||
Nested,
|
||||
|
||||
/// Multipart stream is incomplete
|
||||
/// Multipart stream is incomplete.
|
||||
#[display(fmt = "Multipart stream is incomplete")]
|
||||
Incomplete,
|
||||
|
||||
/// Error during field parsing
|
||||
#[display(fmt = "{}", _0)]
|
||||
/// Field parsing failed.
|
||||
#[display(fmt = "Error during field parsing")]
|
||||
Parse(ParseError),
|
||||
|
||||
/// Payload error
|
||||
#[display(fmt = "{}", _0)]
|
||||
/// HTTP payload error.
|
||||
#[display(fmt = "Payload error")]
|
||||
Payload(PayloadError),
|
||||
|
||||
/// Not consumed
|
||||
#[display(fmt = "Multipart stream is not consumed")]
|
||||
/// Stream is not consumed.
|
||||
#[display(fmt = "Stream is not consumed")]
|
||||
NotConsumed,
|
||||
|
||||
/// An error from a field handler in a form
|
||||
#[display(
|
||||
fmt = "An error occurred processing field `{}`: {}",
|
||||
field_name,
|
||||
source
|
||||
)]
|
||||
/// Form field handler raised error.
|
||||
#[display(fmt = "An error occurred processing field: {name}")]
|
||||
Field {
|
||||
field_name: String,
|
||||
name: String,
|
||||
source: actix_web::Error,
|
||||
},
|
||||
|
||||
/// Duplicate field
|
||||
#[display(fmt = "Duplicate field found for: `{}`", _0)]
|
||||
/// Duplicate field found (for structure that opted-in to denying duplicate fields).
|
||||
#[display(fmt = "Duplicate field found: {_0}")]
|
||||
#[from(ignore)]
|
||||
DuplicateField(#[error(not(source))] String),
|
||||
|
||||
/// Missing field
|
||||
#[display(fmt = "Field with name `{}` is required", _0)]
|
||||
/// Required field is missing.
|
||||
#[display(fmt = "Required field is missing: {_0}")]
|
||||
#[from(ignore)]
|
||||
MissingField(#[error(not(source))] String),
|
||||
|
||||
/// Unknown field
|
||||
#[display(fmt = "Unsupported field `{}`", _0)]
|
||||
/// Unknown field (for structure that opted-in to denying unknown fields).
|
||||
#[display(fmt = "Unknown field: {_0}")]
|
||||
#[from(ignore)]
|
||||
UnsupportedField(#[error(not(source))] String),
|
||||
UnknownField(#[error(not(source))] String),
|
||||
}
|
||||
|
||||
/// Return `BadRequest` for `MultipartError`
|
||||
impl ResponseError for MultipartError {
|
||||
/// Return `BadRequest` for `MultipartError`.
|
||||
impl ResponseError for Error {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match &self {
|
||||
MultipartError::Field { source, .. } => source.as_response_error().status_code(),
|
||||
Error::Field { source, .. } => source.as_response_error().status_code(),
|
||||
Error::ContentTypeIncompatible => StatusCode::UNSUPPORTED_MEDIA_TYPE,
|
||||
_ => StatusCode::BAD_REQUEST,
|
||||
}
|
||||
}
|
||||
@ -93,7 +111,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_multipart_error() {
|
||||
let resp = MultipartError::Boundary.error_response();
|
||||
let resp = Error::BoundaryMissing.error_response();
|
||||
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
@ -1,21 +1,20 @@
|
||||
//! Multipart payload support
|
||||
|
||||
use actix_utils::future::{ready, Ready};
|
||||
use actix_web::{dev::Payload, Error, FromRequest, HttpRequest};
|
||||
|
||||
use crate::server::Multipart;
|
||||
use crate::multipart::Multipart;
|
||||
|
||||
/// Get request's payload as multipart stream.
|
||||
/// Extract request's payload as multipart stream.
|
||||
///
|
||||
/// Content-type: multipart/form-data;
|
||||
/// Content-type: multipart/*;
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, HttpResponse, Error};
|
||||
/// use actix_web::{web, HttpResponse};
|
||||
/// use actix_multipart::Multipart;
|
||||
/// use futures_util::StreamExt as _;
|
||||
///
|
||||
/// async fn index(mut payload: Multipart) -> Result<HttpResponse, Error> {
|
||||
/// async fn index(mut payload: Multipart) -> actix_web::Result<HttpResponse> {
|
||||
/// // iterate over multipart stream
|
||||
/// while let Some(item) = payload.next().await {
|
||||
/// let mut field = item?;
|
||||
@ -26,7 +25,7 @@ use crate::server::Multipart;
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// Ok(HttpResponse::Ok().into())
|
||||
/// Ok(HttpResponse::Ok().finish())
|
||||
/// }
|
||||
/// ```
|
||||
impl FromRequest for Multipart {
|
||||
@ -35,9 +34,6 @@ impl FromRequest for Multipart {
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
ready(Ok(match Multipart::boundary(req.headers()) {
|
||||
Ok(boundary) => Multipart::from_boundary(boundary, payload.take()),
|
||||
Err(err) => Multipart::from_error(err),
|
||||
}))
|
||||
ready(Ok(Multipart::from_req(req, payload)))
|
||||
}
|
||||
}
|
||||
|
490
actix-multipart/src/field.rs
Normal file
490
actix-multipart/src/field.rs
Normal file
@ -0,0 +1,490 @@
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
cmp, fmt, mem,
|
||||
pin::Pin,
|
||||
rc::Rc,
|
||||
task::{ready, Context, Poll},
|
||||
};
|
||||
|
||||
use actix_utils::future::poll_fn;
|
||||
use actix_web::{
|
||||
error::PayloadError,
|
||||
http::header::{self, ContentDisposition, HeaderMap},
|
||||
web::{Bytes, BytesMut},
|
||||
};
|
||||
use derive_more::{Display, Error};
|
||||
use futures_core::Stream;
|
||||
use mime::Mime;
|
||||
|
||||
use crate::{
|
||||
error::Error,
|
||||
payload::{PayloadBuffer, PayloadRef},
|
||||
safety::Safety,
|
||||
};
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "limit exceeded")]
|
||||
pub struct LimitExceeded;
|
||||
|
||||
/// A single field in a multipart stream.
|
||||
pub struct Field {
|
||||
/// Field's Content-Type.
|
||||
content_type: Option<Mime>,
|
||||
|
||||
/// Field's Content-Disposition.
|
||||
content_disposition: Option<ContentDisposition>,
|
||||
|
||||
/// Form field name.
|
||||
///
|
||||
/// A non-optional storage for form field names to avoid unwraps in `form` module. Will be an
|
||||
/// empty string in non-form contexts.
|
||||
///
|
||||
// INVARIANT: always non-empty when request content-type is multipart/form-data.
|
||||
pub(crate) form_field_name: String,
|
||||
|
||||
/// Field's header map.
|
||||
headers: HeaderMap,
|
||||
|
||||
safety: Safety,
|
||||
inner: Rc<RefCell<InnerField>>,
|
||||
}
|
||||
|
||||
impl Field {
|
||||
pub(crate) fn new(
|
||||
content_type: Option<Mime>,
|
||||
content_disposition: Option<ContentDisposition>,
|
||||
form_field_name: Option<String>,
|
||||
headers: HeaderMap,
|
||||
safety: Safety,
|
||||
inner: Rc<RefCell<InnerField>>,
|
||||
) -> Self {
|
||||
Field {
|
||||
content_type,
|
||||
content_disposition,
|
||||
form_field_name: form_field_name.unwrap_or_default(),
|
||||
headers,
|
||||
inner,
|
||||
safety,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the field's header map.
|
||||
pub fn headers(&self) -> &HeaderMap {
|
||||
&self.headers
|
||||
}
|
||||
|
||||
/// Returns a reference to the field's content (mime) type, if it is supplied by the client.
|
||||
///
|
||||
/// According to [RFC 7578](https://www.rfc-editor.org/rfc/rfc7578#section-4.4), if it is not
|
||||
/// present, it should default to "text/plain". Note it is the responsibility of the client to
|
||||
/// provide the appropriate content type, there is no attempt to validate this by the server.
|
||||
pub fn content_type(&self) -> Option<&Mime> {
|
||||
self.content_type.as_ref()
|
||||
}
|
||||
|
||||
/// Returns this field's parsed Content-Disposition header, if set.
|
||||
///
|
||||
/// # Validation
|
||||
///
|
||||
/// Per [RFC 7578 §4.2], the parts of a multipart/form-data payload MUST contain a
|
||||
/// Content-Disposition header field where the disposition type is `form-data` and MUST also
|
||||
/// contain an additional parameter of `name` with its value being the original field name from
|
||||
/// the form. This requirement is enforced during extraction for multipart/form-data requests,
|
||||
/// but not other kinds of multipart requests (such as multipart/related).
|
||||
///
|
||||
/// As such, it is safe to `.unwrap()` calls `.content_disposition()` if you've verified.
|
||||
///
|
||||
/// The [`name()`](Self::name) method is also provided as a convenience for obtaining the
|
||||
/// aforementioned name parameter.
|
||||
///
|
||||
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||
pub fn content_disposition(&self) -> Option<&ContentDisposition> {
|
||||
self.content_disposition.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the field's name, if set.
|
||||
///
|
||||
/// See [`content_disposition()`](Self::content_disposition) regarding guarantees on presence of
|
||||
/// the "name" field.
|
||||
pub fn name(&self) -> Option<&str> {
|
||||
self.content_disposition()?.get_name()
|
||||
}
|
||||
|
||||
/// Collects the raw field data, up to `limit` bytes.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Any errors produced by the data stream are returned as `Ok(Err(Error))` immediately.
|
||||
///
|
||||
/// If the buffered data size would exceed `limit`, an `Err(LimitExceeded)` is returned. Note
|
||||
/// that, in this case, the full data stream is exhausted before returning the error so that
|
||||
/// subsequent fields can still be read. To better defend against malicious/infinite requests,
|
||||
/// it is advisable to also put a timeout on this call.
|
||||
pub async fn bytes(&mut self, limit: usize) -> Result<Result<Bytes, Error>, LimitExceeded> {
|
||||
/// Sensible default (2kB) for initial, bounded allocation when collecting body bytes.
|
||||
const INITIAL_ALLOC_BYTES: usize = 2 * 1024;
|
||||
|
||||
let mut exceeded_limit = false;
|
||||
let mut buf = BytesMut::with_capacity(INITIAL_ALLOC_BYTES);
|
||||
|
||||
let mut field = Pin::new(self);
|
||||
|
||||
match poll_fn(|cx| loop {
|
||||
match ready!(field.as_mut().poll_next(cx)) {
|
||||
// if already over limit, discard chunk to advance multipart request
|
||||
Some(Ok(_chunk)) if exceeded_limit => {}
|
||||
|
||||
// if limit is exceeded set flag to true and continue
|
||||
Some(Ok(chunk)) if buf.len() + chunk.len() > limit => {
|
||||
exceeded_limit = true;
|
||||
// eagerly de-allocate field data buffer
|
||||
let _ = mem::take(&mut buf);
|
||||
}
|
||||
|
||||
Some(Ok(chunk)) => buf.extend_from_slice(&chunk),
|
||||
|
||||
None => return Poll::Ready(Ok(())),
|
||||
Some(Err(err)) => return Poll::Ready(Err(err)),
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
// propagate error returned from body poll
|
||||
Err(err) => Ok(Err(err)),
|
||||
|
||||
// limit was exceeded while reading body
|
||||
Ok(()) if exceeded_limit => Err(LimitExceeded),
|
||||
|
||||
// otherwise return body buffer
|
||||
Ok(()) => Ok(Ok(buf.freeze())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for Field {
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let this = self.get_mut();
|
||||
let mut inner = this.inner.borrow_mut();
|
||||
|
||||
if let Some(mut buffer) = inner
|
||||
.payload
|
||||
.as_ref()
|
||||
.expect("Field should not be polled after completion")
|
||||
.get_mut(&this.safety)
|
||||
{
|
||||
// check safety and poll read payload to buffer.
|
||||
buffer.poll_stream(cx)?;
|
||||
} else if !this.safety.is_clean() {
|
||||
// safety violation
|
||||
return Poll::Ready(Some(Err(Error::NotConsumed)));
|
||||
} else {
|
||||
return Poll::Pending;
|
||||
}
|
||||
|
||||
inner.poll(&this.safety)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Field {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if let Some(ct) = &self.content_type {
|
||||
writeln!(f, "\nField: {}", ct)?;
|
||||
} else {
|
||||
writeln!(f, "\nField:")?;
|
||||
}
|
||||
writeln!(f, " boundary: {}", self.inner.borrow().boundary)?;
|
||||
writeln!(f, " headers:")?;
|
||||
for (key, val) in self.headers.iter() {
|
||||
writeln!(f, " {:?}: {:?}", key, val)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct InnerField {
|
||||
/// Payload is initialized as Some and is `take`n when the field stream finishes.
|
||||
payload: Option<PayloadRef>,
|
||||
boundary: String,
|
||||
eof: bool,
|
||||
length: Option<u64>,
|
||||
}
|
||||
|
||||
impl InnerField {
|
||||
pub(crate) fn new_in_rc(
|
||||
payload: PayloadRef,
|
||||
boundary: String,
|
||||
headers: &HeaderMap,
|
||||
) -> Result<Rc<RefCell<InnerField>>, PayloadError> {
|
||||
Self::new(payload, boundary, headers).map(|this| Rc::new(RefCell::new(this)))
|
||||
}
|
||||
|
||||
pub(crate) fn new(
|
||||
payload: PayloadRef,
|
||||
boundary: String,
|
||||
headers: &HeaderMap,
|
||||
) -> Result<InnerField, PayloadError> {
|
||||
let len = if let Some(len) = headers.get(&header::CONTENT_LENGTH) {
|
||||
match len.to_str().ok().and_then(|len| len.parse::<u64>().ok()) {
|
||||
Some(len) => Some(len),
|
||||
None => return Err(PayloadError::Incomplete(None)),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(InnerField {
|
||||
boundary,
|
||||
payload: Some(payload),
|
||||
eof: false,
|
||||
length: len,
|
||||
})
|
||||
}
|
||||
|
||||
/// Reads body part content chunk of the specified size.
|
||||
///
|
||||
/// The body part must has `Content-Length` header with proper value.
|
||||
pub(crate) fn read_len(
|
||||
payload: &mut PayloadBuffer,
|
||||
size: &mut u64,
|
||||
) -> Poll<Option<Result<Bytes, Error>>> {
|
||||
if *size == 0 {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
match payload.read_max(*size)? {
|
||||
Some(mut chunk) => {
|
||||
let len = cmp::min(chunk.len() as u64, *size);
|
||||
*size -= len;
|
||||
let ch = chunk.split_to(len as usize);
|
||||
if !chunk.is_empty() {
|
||||
payload.unprocessed(chunk);
|
||||
}
|
||||
Poll::Ready(Some(Ok(ch)))
|
||||
}
|
||||
None => {
|
||||
if payload.eof && (*size != 0) {
|
||||
Poll::Ready(Some(Err(Error::Incomplete)))
|
||||
} else {
|
||||
Poll::Pending
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads content chunk of body part with unknown length.
|
||||
///
|
||||
/// The `Content-Length` header for body part is not necessary.
|
||||
pub(crate) fn read_stream(
|
||||
payload: &mut PayloadBuffer,
|
||||
boundary: &str,
|
||||
) -> Poll<Option<Result<Bytes, Error>>> {
|
||||
let mut pos = 0;
|
||||
|
||||
let len = payload.buf.len();
|
||||
if len == 0 {
|
||||
return if payload.eof {
|
||||
Poll::Ready(Some(Err(Error::Incomplete)))
|
||||
} else {
|
||||
Poll::Pending
|
||||
};
|
||||
}
|
||||
|
||||
// check boundary
|
||||
if len > 4 && payload.buf[0] == b'\r' {
|
||||
let b_len = if &payload.buf[..2] == b"\r\n" && &payload.buf[2..4] == b"--" {
|
||||
Some(4)
|
||||
} else if &payload.buf[1..3] == b"--" {
|
||||
Some(3)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(b_len) = b_len {
|
||||
let b_size = boundary.len() + b_len;
|
||||
if len < b_size {
|
||||
return Poll::Pending;
|
||||
} else if &payload.buf[b_len..b_size] == boundary.as_bytes() {
|
||||
// found boundary
|
||||
return Poll::Ready(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop {
|
||||
return if let Some(idx) = memchr::memmem::find(&payload.buf[pos..], b"\r") {
|
||||
let cur = pos + idx;
|
||||
|
||||
// check if we have enough data for boundary detection
|
||||
if cur + 4 > len {
|
||||
if cur > 0 {
|
||||
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
|
||||
} else {
|
||||
Poll::Pending
|
||||
}
|
||||
} else {
|
||||
// check boundary
|
||||
if (&payload.buf[cur..cur + 2] == b"\r\n"
|
||||
&& &payload.buf[cur + 2..cur + 4] == b"--")
|
||||
|| (&payload.buf[cur..=cur] == b"\r"
|
||||
&& &payload.buf[cur + 1..cur + 3] == b"--")
|
||||
{
|
||||
if cur != 0 {
|
||||
// return buffer
|
||||
Poll::Ready(Some(Ok(payload.buf.split_to(cur).freeze())))
|
||||
} else {
|
||||
pos = cur + 1;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
// not boundary
|
||||
pos = cur + 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Poll::Ready(Some(Ok(payload.buf.split().freeze())))
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn poll(&mut self, safety: &Safety) -> Poll<Option<Result<Bytes, Error>>> {
|
||||
if self.payload.is_none() {
|
||||
return Poll::Ready(None);
|
||||
}
|
||||
|
||||
let result = if let Some(mut payload) = self
|
||||
.payload
|
||||
.as_ref()
|
||||
.expect("Field should not be polled after completion")
|
||||
.get_mut(safety)
|
||||
{
|
||||
if !self.eof {
|
||||
let res = if let Some(ref mut len) = self.length {
|
||||
InnerField::read_len(&mut payload, len)
|
||||
} else {
|
||||
InnerField::read_stream(&mut payload, &self.boundary)
|
||||
};
|
||||
|
||||
match res {
|
||||
Poll::Pending => return Poll::Pending,
|
||||
Poll::Ready(Some(Ok(bytes))) => return Poll::Ready(Some(Ok(bytes))),
|
||||
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
|
||||
Poll::Ready(None) => self.eof = true,
|
||||
}
|
||||
}
|
||||
|
||||
match payload.readline() {
|
||||
Ok(None) => Poll::Pending,
|
||||
Ok(Some(line)) => {
|
||||
if line.as_ref() != b"\r\n" {
|
||||
log::warn!("multipart field did not read all the data or it is malformed");
|
||||
}
|
||||
Poll::Ready(None)
|
||||
}
|
||||
Err(err) => Poll::Ready(Some(Err(err))),
|
||||
}
|
||||
} else {
|
||||
Poll::Pending
|
||||
};
|
||||
|
||||
if let Poll::Ready(None) = result {
|
||||
// drop payload buffer and make future un-poll-able
|
||||
let _ = self.payload.take();
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use futures_util::{stream, StreamExt as _};
|
||||
|
||||
use super::*;
|
||||
use crate::Multipart;
|
||||
|
||||
// TODO: use test utility when multi-file support is introduced
|
||||
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
|
||||
let bytes = Bytes::from(
|
||||
"testasdadsad\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\n\
|
||||
\r\n\
|
||||
one+one+one\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\n\
|
||||
\r\n\
|
||||
two+two+two\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||
),
|
||||
);
|
||||
(bytes, headers)
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn bytes_unlimited() {
|
||||
let (body, headers) = create_double_request_with_header();
|
||||
|
||||
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
|
||||
|
||||
let field = multipart
|
||||
.next()
|
||||
.await
|
||||
.expect("multipart should have two fields")
|
||||
.expect("multipart body should be well formatted")
|
||||
.bytes(usize::MAX)
|
||||
.await
|
||||
.expect("field data should not be size limited")
|
||||
.expect("reading field data should not error");
|
||||
assert_eq!(field, "one+one+one");
|
||||
|
||||
let field = multipart
|
||||
.next()
|
||||
.await
|
||||
.expect("multipart should have two fields")
|
||||
.expect("multipart body should be well formatted")
|
||||
.bytes(usize::MAX)
|
||||
.await
|
||||
.expect("field data should not be size limited")
|
||||
.expect("reading field data should not error");
|
||||
assert_eq!(field, "two+two+two");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn bytes_limited() {
|
||||
let (body, headers) = create_double_request_with_header();
|
||||
|
||||
let mut multipart = Multipart::new(&headers, stream::iter([Ok(body)]));
|
||||
|
||||
multipart
|
||||
.next()
|
||||
.await
|
||||
.expect("multipart should have two fields")
|
||||
.expect("multipart body should be well formatted")
|
||||
.bytes(8) // smaller than data size
|
||||
.await
|
||||
.expect_err("field data should be size limited");
|
||||
|
||||
// next field still readable
|
||||
let field = multipart
|
||||
.next()
|
||||
.await
|
||||
.expect("multipart should have two fields")
|
||||
.expect("multipart body should be well formatted")
|
||||
.bytes(usize::MAX)
|
||||
.await
|
||||
.expect("field data should not be size limited")
|
||||
.expect("reading field data should not error");
|
||||
assert_eq!(field, "two+two+two");
|
||||
}
|
||||
}
|
@ -1,7 +1,6 @@
|
||||
//! Reads a field into memory.
|
||||
|
||||
use actix_web::HttpRequest;
|
||||
use bytes::BytesMut;
|
||||
use actix_web::{web::BytesMut, HttpRequest};
|
||||
use futures_core::future::LocalBoxFuture;
|
||||
use futures_util::TryStreamExt as _;
|
||||
use mime::Mime;
|
||||
@ -15,7 +14,7 @@ use crate::{
|
||||
#[derive(Debug)]
|
||||
pub struct Bytes {
|
||||
/// The data.
|
||||
pub data: bytes::Bytes,
|
||||
pub data: actix_web::web::Bytes,
|
||||
|
||||
/// The value of the `Content-Type` header.
|
||||
pub content_type: Option<Mime>,
|
||||
@ -41,8 +40,9 @@ impl<'t> FieldReader<'t> for Bytes {
|
||||
content_type: field.content_type().map(ToOwned::to_owned),
|
||||
file_name: field
|
||||
.content_disposition()
|
||||
.expect("multipart form fields should have a content-disposition header")
|
||||
.get_filename()
|
||||
.map(str::to_owned),
|
||||
.map(ToOwned::to_owned),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -32,7 +32,6 @@ where
|
||||
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
|
||||
Box::pin(async move {
|
||||
let config = JsonConfig::from_req(req);
|
||||
let field_name = field.name().to_owned();
|
||||
|
||||
if config.validate_content_type {
|
||||
let valid = if let Some(mime) = field.content_type() {
|
||||
@ -43,17 +42,19 @@ where
|
||||
|
||||
if !valid {
|
||||
return Err(MultipartError::Field {
|
||||
field_name,
|
||||
name: field.form_field_name,
|
||||
source: config.map_error(req, JsonFieldError::ContentType),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let form_field_name = field.form_field_name.clone();
|
||||
|
||||
let bytes = Bytes::read_field(req, field, limits).await?;
|
||||
|
||||
Ok(Json(serde_json::from_slice(bytes.data.as_ref()).map_err(
|
||||
|err| MultipartError::Field {
|
||||
field_name,
|
||||
name: form_field_name,
|
||||
source: config.map_error(req, JsonFieldError::Deserialize(err)),
|
||||
},
|
||||
)?))
|
||||
@ -133,8 +134,7 @@ impl Default for JsonConfig {
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use actix_web::{http::StatusCode, web, App, HttpResponse, Responder};
|
||||
use bytes::Bytes;
|
||||
use actix_web::{http::StatusCode, web, web::Bytes, App, HttpResponse, Responder};
|
||||
|
||||
use crate::form::{
|
||||
json::{Json, JsonConfig},
|
||||
|
@ -33,6 +33,14 @@ pub trait FieldReader<'t>: Sized + Any {
|
||||
type Future: Future<Output = Result<Self, MultipartError>>;
|
||||
|
||||
/// The form will call this function to handle the field.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// When reading the `field` payload using its `Stream` implementation, polling (manually or via
|
||||
/// `next()`/`try_next()`) may panic after the payload is exhausted. If this is a problem for
|
||||
/// your implementation of this method, you should [`fuse()`] the `Field` first.
|
||||
///
|
||||
/// [`fuse()`]: futures_util::stream::StreamExt::fuse()
|
||||
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future;
|
||||
}
|
||||
|
||||
@ -72,13 +80,13 @@ where
|
||||
state: &'t mut State,
|
||||
duplicate_field: DuplicateField,
|
||||
) -> Self::Future {
|
||||
if state.contains_key(field.name()) {
|
||||
if state.contains_key(&field.form_field_name) {
|
||||
match duplicate_field {
|
||||
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
|
||||
|
||||
DuplicateField::Deny => {
|
||||
return Box::pin(ready(Err(MultipartError::DuplicateField(
|
||||
field.name().to_owned(),
|
||||
field.form_field_name,
|
||||
))))
|
||||
}
|
||||
|
||||
@ -87,7 +95,7 @@ where
|
||||
}
|
||||
|
||||
Box::pin(async move {
|
||||
let field_name = field.name().to_owned();
|
||||
let field_name = field.form_field_name.clone();
|
||||
let t = T::read_field(req, field, limits).await?;
|
||||
state.insert(field_name, Box::new(t));
|
||||
Ok(())
|
||||
@ -115,10 +123,8 @@ where
|
||||
Box::pin(async move {
|
||||
// Note: Vec GroupReader always allows duplicates
|
||||
|
||||
let field_name = field.name().to_owned();
|
||||
|
||||
let vec = state
|
||||
.entry(field_name)
|
||||
.entry(field.form_field_name.clone())
|
||||
.or_insert_with(|| Box::<Vec<T>>::default())
|
||||
.downcast_mut::<Vec<T>>()
|
||||
.unwrap();
|
||||
@ -151,13 +157,13 @@ where
|
||||
state: &'t mut State,
|
||||
duplicate_field: DuplicateField,
|
||||
) -> Self::Future {
|
||||
if state.contains_key(field.name()) {
|
||||
if state.contains_key(&field.form_field_name) {
|
||||
match duplicate_field {
|
||||
DuplicateField::Ignore => return Box::pin(ready(Ok(()))),
|
||||
|
||||
DuplicateField::Deny => {
|
||||
return Box::pin(ready(Err(MultipartError::DuplicateField(
|
||||
field.name().to_owned(),
|
||||
field.form_field_name,
|
||||
))))
|
||||
}
|
||||
|
||||
@ -166,7 +172,7 @@ where
|
||||
}
|
||||
|
||||
Box::pin(async move {
|
||||
let field_name = field.name().to_owned();
|
||||
let field_name = field.form_field_name.clone();
|
||||
let t = T::read_field(req, field, limits).await?;
|
||||
state.insert(field_name, Box::new(t));
|
||||
Ok(())
|
||||
@ -273,6 +279,9 @@ impl Limits {
|
||||
/// [`MultipartCollect`] trait. You should use the [`macro@MultipartForm`] macro to derive this
|
||||
/// for your struct.
|
||||
///
|
||||
/// Note that this extractor rejects requests with any other Content-Type such as `multipart/mixed`,
|
||||
/// `multipart/related`, or non-multipart media types.
|
||||
///
|
||||
/// Add a [`MultipartFormConfig`] to your app data to configure extraction.
|
||||
#[derive(Deref, DerefMut)]
|
||||
pub struct MultipartForm<T: MultipartCollect>(pub T);
|
||||
@ -286,14 +295,24 @@ impl<T: MultipartCollect> MultipartForm<T> {
|
||||
|
||||
impl<T> FromRequest for MultipartForm<T>
|
||||
where
|
||||
T: MultipartCollect,
|
||||
T: MultipartCollect + 'static,
|
||||
{
|
||||
type Error = Error;
|
||||
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
|
||||
let mut payload = Multipart::new(req.headers(), payload.take());
|
||||
let mut multipart = Multipart::from_req(req, payload);
|
||||
|
||||
let content_type = match multipart.content_type_or_bail() {
|
||||
Ok(content_type) => content_type,
|
||||
Err(err) => return Box::pin(ready(Err(err.into()))),
|
||||
};
|
||||
|
||||
if content_type.subtype() != mime::FORM_DATA {
|
||||
// this extractor only supports multipart/form-data
|
||||
return Box::pin(ready(Err(MultipartError::ContentTypeIncompatible.into())));
|
||||
};
|
||||
|
||||
let config = MultipartFormConfig::from_req(req);
|
||||
let mut limits = Limits::new(config.total_limit, config.memory_limit);
|
||||
@ -305,14 +324,20 @@ where
|
||||
Box::pin(
|
||||
async move {
|
||||
let mut state = State::default();
|
||||
// We need to ensure field limits are shared for all instances of this field name
|
||||
|
||||
// ensure limits are shared for all fields with this name
|
||||
let mut field_limits = HashMap::<String, Option<usize>>::new();
|
||||
|
||||
while let Some(field) = payload.try_next().await? {
|
||||
while let Some(field) = multipart.try_next().await? {
|
||||
debug_assert!(
|
||||
!field.form_field_name.is_empty(),
|
||||
"multipart form fields should have names",
|
||||
);
|
||||
|
||||
// Retrieve the limit for this field
|
||||
let entry = field_limits
|
||||
.entry(field.name().to_owned())
|
||||
.or_insert_with(|| T::limit(field.name()));
|
||||
.entry(field.form_field_name.clone())
|
||||
.or_insert_with(|| T::limit(&field.form_field_name));
|
||||
|
||||
limits.field_limit_remaining.clone_from(entry);
|
||||
|
||||
@ -321,6 +346,7 @@ where
|
||||
// Update the stored limit
|
||||
*entry = limits.field_limit_remaining;
|
||||
}
|
||||
|
||||
let inner = T::from_state(state)?;
|
||||
Ok(MultipartForm(inner))
|
||||
}
|
||||
@ -396,11 +422,20 @@ mod tests {
|
||||
use actix_http::encoding::Decoder;
|
||||
use actix_multipart_rfc7578::client::multipart;
|
||||
use actix_test::TestServer;
|
||||
use actix_web::{dev::Payload, http::StatusCode, web, App, HttpResponse, Responder};
|
||||
use actix_web::{
|
||||
dev::Payload, http::StatusCode, web, App, HttpRequest, HttpResponse, Resource, Responder,
|
||||
};
|
||||
use awc::{Client, ClientResponse};
|
||||
use futures_core::future::LocalBoxFuture;
|
||||
use futures_util::TryStreamExt as _;
|
||||
|
||||
use super::MultipartForm;
|
||||
use crate::form::{bytes::Bytes, tempfile::TempFile, text::Text, MultipartFormConfig};
|
||||
use crate::{
|
||||
form::{
|
||||
bytes::Bytes, tempfile::TempFile, text::Text, FieldReader, Limits, MultipartFormConfig,
|
||||
},
|
||||
Field, MultipartError,
|
||||
};
|
||||
|
||||
pub async fn send_form(
|
||||
srv: &TestServer,
|
||||
@ -734,4 +769,84 @@ mod tests {
|
||||
let response = send_form(&srv, form, "/").await;
|
||||
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn non_multipart_form_data() {
|
||||
#[derive(MultipartForm)]
|
||||
struct TestNonMultipartFormData {
|
||||
#[allow(unused)]
|
||||
#[multipart(limit = "30B")]
|
||||
foo: Text<String>,
|
||||
}
|
||||
|
||||
async fn non_multipart_form_data_route(
|
||||
_form: MultipartForm<TestNonMultipartFormData>,
|
||||
) -> String {
|
||||
unreachable!("request is sent with multipart/mixed");
|
||||
}
|
||||
|
||||
let srv = actix_test::start(|| {
|
||||
App::new().route("/", web::post().to(non_multipart_form_data_route))
|
||||
});
|
||||
|
||||
let mut form = multipart::Form::default();
|
||||
form.add_text("foo", "foo");
|
||||
|
||||
// mangle content-type, keeping the boundary
|
||||
let ct = form.content_type().replacen("/form-data", "/mixed", 1);
|
||||
|
||||
let res = Client::default()
|
||||
.post(srv.url("/"))
|
||||
.content_type(ct)
|
||||
.send_body(multipart::Body::from(form))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(res.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
|
||||
}
|
||||
|
||||
#[should_panic(expected = "called `Result::unwrap()` on an `Err` value: Connect(Disconnected)")]
|
||||
#[actix_web::test]
|
||||
async fn field_try_next_panic() {
|
||||
#[derive(Debug)]
|
||||
struct NullSink;
|
||||
|
||||
impl<'t> FieldReader<'t> for NullSink {
|
||||
type Future = LocalBoxFuture<'t, Result<Self, MultipartError>>;
|
||||
|
||||
fn read_field(
|
||||
_: &'t HttpRequest,
|
||||
mut field: Field,
|
||||
_limits: &'t mut Limits,
|
||||
) -> Self::Future {
|
||||
Box::pin(async move {
|
||||
// exhaust field stream
|
||||
while let Some(_chunk) = field.try_next().await? {}
|
||||
|
||||
// poll again, crash
|
||||
let _post = field.try_next().await;
|
||||
|
||||
Ok(Self)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(MultipartForm)]
|
||||
struct NullSinkForm {
|
||||
foo: NullSink,
|
||||
}
|
||||
|
||||
async fn null_sink(_form: MultipartForm<NullSinkForm>) -> impl Responder {
|
||||
"unreachable"
|
||||
}
|
||||
|
||||
let srv = actix_test::start(|| App::new().service(Resource::new("/").post(null_sink)));
|
||||
|
||||
let mut form = multipart::Form::default();
|
||||
form.add_text("foo", "data is not important to this test");
|
||||
|
||||
// panics with Err(Connect(Disconnected)) due to form NullSink panic
|
||||
let _res = send_form(&srv, form, "/").await;
|
||||
}
|
||||
}
|
||||
|
@ -42,38 +42,36 @@ impl<'t> FieldReader<'t> for TempFile {
|
||||
fn read_field(req: &'t HttpRequest, mut field: Field, limits: &'t mut Limits) -> Self::Future {
|
||||
Box::pin(async move {
|
||||
let config = TempFileConfig::from_req(req);
|
||||
let field_name = field.name().to_owned();
|
||||
let mut size = 0;
|
||||
|
||||
let file = config
|
||||
.create_tempfile()
|
||||
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
|
||||
let file = config.create_tempfile().map_err(|err| {
|
||||
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||
})?;
|
||||
|
||||
let mut file_async =
|
||||
tokio::fs::File::from_std(file.reopen().map_err(|err| {
|
||||
config.map_error(req, &field_name, TempFileError::FileIo(err))
|
||||
})?);
|
||||
let mut file_async = tokio::fs::File::from_std(file.reopen().map_err(|err| {
|
||||
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||
})?);
|
||||
|
||||
while let Some(chunk) = field.try_next().await? {
|
||||
limits.try_consume_limits(chunk.len(), false)?;
|
||||
size += chunk.len();
|
||||
file_async.write_all(chunk.as_ref()).await.map_err(|err| {
|
||||
config.map_error(req, &field_name, TempFileError::FileIo(err))
|
||||
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||
})?;
|
||||
}
|
||||
|
||||
file_async
|
||||
.flush()
|
||||
.await
|
||||
.map_err(|err| config.map_error(req, &field_name, TempFileError::FileIo(err)))?;
|
||||
file_async.flush().await.map_err(|err| {
|
||||
config.map_error(req, &field.form_field_name, TempFileError::FileIo(err))
|
||||
})?;
|
||||
|
||||
Ok(TempFile {
|
||||
file,
|
||||
content_type: field.content_type().map(ToOwned::to_owned),
|
||||
file_name: field
|
||||
.content_disposition()
|
||||
.expect("multipart form fields should have a content-disposition header")
|
||||
.get_filename()
|
||||
.map(str::to_owned),
|
||||
.map(ToOwned::to_owned),
|
||||
size,
|
||||
})
|
||||
})
|
||||
@ -137,7 +135,7 @@ impl TempFileConfig {
|
||||
};
|
||||
|
||||
MultipartError::Field {
|
||||
field_name: field_name.to_owned(),
|
||||
name: field_name.to_owned(),
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
@ -36,7 +36,6 @@ where
|
||||
fn read_field(req: &'t HttpRequest, field: Field, limits: &'t mut Limits) -> Self::Future {
|
||||
Box::pin(async move {
|
||||
let config = TextConfig::from_req(req);
|
||||
let field_name = field.name().to_owned();
|
||||
|
||||
if config.validate_content_type {
|
||||
let valid = if let Some(mime) = field.content_type() {
|
||||
@ -49,22 +48,24 @@ where
|
||||
|
||||
if !valid {
|
||||
return Err(MultipartError::Field {
|
||||
field_name,
|
||||
name: field.form_field_name,
|
||||
source: config.map_error(req, TextError::ContentType),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let form_field_name = field.form_field_name.clone();
|
||||
|
||||
let bytes = Bytes::read_field(req, field, limits).await?;
|
||||
|
||||
let text = str::from_utf8(&bytes.data).map_err(|err| MultipartError::Field {
|
||||
field_name: field_name.clone(),
|
||||
name: form_field_name.clone(),
|
||||
source: config.map_error(req, TextError::Utf8Error(err)),
|
||||
})?;
|
||||
|
||||
Ok(Text(serde_plain::from_str(text).map_err(|err| {
|
||||
MultipartError::Field {
|
||||
field_name,
|
||||
name: form_field_name,
|
||||
source: config.map_error(req, TextError::Deserialize(err)),
|
||||
}
|
||||
})?))
|
||||
|
@ -1,9 +1,11 @@
|
||||
//! Multipart form support for Actix Web.
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```no_run
|
||||
//! use actix_web::{post, App, HttpServer, Responder};
|
||||
//!
|
||||
//! use actix_multipart::form::{json::Json as MPJson, tempfile::TempFile, MultipartForm};
|
||||
//! use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
|
||||
//! use serde::Deserialize;
|
||||
//!
|
||||
//! #[derive(Debug, Deserialize)]
|
||||
@ -15,7 +17,7 @@
|
||||
//! struct UploadForm {
|
||||
//! #[multipart(limit = "100MB")]
|
||||
//! file: TempFile,
|
||||
//! json: MPJson<Metadata>,
|
||||
//! json: MpJson<Metadata>,
|
||||
//! }
|
||||
//!
|
||||
//! #[post("/videos")]
|
||||
@ -34,10 +36,16 @@
|
||||
//! .await
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! cURL request:
|
||||
//!
|
||||
//! ```sh
|
||||
//! curl -v --request POST \
|
||||
//! --url http://localhost:8080/videos \
|
||||
//! -F 'json={"name": "Cargo.lock"};type=application/json' \
|
||||
//! -F file=@./Cargo.lock
|
||||
//! ```
|
||||
|
||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||
#![warn(future_incompatible)]
|
||||
#![allow(clippy::borrow_interior_mutable_const)]
|
||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
@ -48,14 +56,11 @@ extern crate self as actix_multipart;
|
||||
|
||||
mod error;
|
||||
mod extractor;
|
||||
pub(crate) mod field;
|
||||
pub mod form;
|
||||
mod server;
|
||||
mod multipart;
|
||||
pub(crate) mod payload;
|
||||
pub(crate) mod safety;
|
||||
pub mod test;
|
||||
|
||||
pub use self::{
|
||||
error::MultipartError,
|
||||
server::{Field, Multipart},
|
||||
test::{
|
||||
create_form_data_payload_and_headers, create_form_data_payload_and_headers_with_boundary,
|
||||
},
|
||||
};
|
||||
pub use self::{error::Error as MultipartError, field::Field, multipart::Multipart};
|
||||
|
977
actix-multipart/src/multipart.rs
Normal file
977
actix-multipart/src/multipart.rs
Normal file
@ -0,0 +1,977 @@
|
||||
//! Multipart response payload support.
|
||||
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
pin::Pin,
|
||||
rc::Rc,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use actix_web::{
|
||||
dev,
|
||||
error::{ParseError, PayloadError},
|
||||
http::header::{self, ContentDisposition, HeaderMap, HeaderName, HeaderValue},
|
||||
web::Bytes,
|
||||
HttpRequest,
|
||||
};
|
||||
use futures_core::stream::Stream;
|
||||
use mime::Mime;
|
||||
|
||||
use crate::{
|
||||
error::Error,
|
||||
field::InnerField,
|
||||
payload::{PayloadBuffer, PayloadRef},
|
||||
safety::Safety,
|
||||
Field,
|
||||
};
|
||||
|
||||
const MAX_HEADERS: usize = 32;
|
||||
|
||||
/// The server-side implementation of `multipart/form-data` requests.
|
||||
///
|
||||
/// This will parse the incoming stream into `MultipartItem` instances via its `Stream`
|
||||
/// implementation. `MultipartItem::Field` contains multipart field. `MultipartItem::Multipart` is
|
||||
/// used for nested multipart streams.
|
||||
pub struct Multipart {
|
||||
flow: Flow,
|
||||
safety: Safety,
|
||||
}
|
||||
|
||||
enum Flow {
|
||||
InFlight(Inner),
|
||||
|
||||
/// Error container is Some until an error is returned out of the flow.
|
||||
Error(Option<Error>),
|
||||
}
|
||||
|
||||
impl Multipart {
|
||||
/// Creates multipart instance from parts.
|
||||
pub fn new<S>(headers: &HeaderMap, stream: S) -> Self
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||
{
|
||||
match Self::find_ct_and_boundary(headers) {
|
||||
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, stream),
|
||||
Err(err) => Self::from_error(err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates multipart instance from parts.
|
||||
pub(crate) fn from_req(req: &HttpRequest, payload: &mut dev::Payload) -> Self {
|
||||
match Self::find_ct_and_boundary(req.headers()) {
|
||||
Ok((ct, boundary)) => Self::from_ct_and_boundary(ct, boundary, payload.take()),
|
||||
Err(err) => Self::from_error(err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract Content-Type and boundary info from headers.
|
||||
pub(crate) fn find_ct_and_boundary(headers: &HeaderMap) -> Result<(Mime, String), Error> {
|
||||
let content_type = headers
|
||||
.get(&header::CONTENT_TYPE)
|
||||
.ok_or(Error::ContentTypeMissing)?
|
||||
.to_str()
|
||||
.ok()
|
||||
.and_then(|content_type| content_type.parse::<Mime>().ok())
|
||||
.ok_or(Error::ContentTypeParse)?;
|
||||
|
||||
if content_type.type_() != mime::MULTIPART {
|
||||
return Err(Error::ContentTypeIncompatible);
|
||||
}
|
||||
|
||||
let boundary = content_type
|
||||
.get_param(mime::BOUNDARY)
|
||||
.ok_or(Error::BoundaryMissing)?
|
||||
.as_str()
|
||||
.to_owned();
|
||||
|
||||
Ok((content_type, boundary))
|
||||
}
|
||||
|
||||
/// Constructs a new multipart reader from given Content-Type, boundary, and stream.
|
||||
pub(crate) fn from_ct_and_boundary<S>(ct: Mime, boundary: String, stream: S) -> Multipart
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||
{
|
||||
Multipart {
|
||||
safety: Safety::new(),
|
||||
flow: Flow::InFlight(Inner {
|
||||
payload: PayloadRef::new(PayloadBuffer::new(stream)),
|
||||
content_type: ct,
|
||||
boundary,
|
||||
state: State::FirstBoundary,
|
||||
item: Item::None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs a new multipart reader from given `MultipartError`.
|
||||
pub(crate) fn from_error(err: Error) -> Multipart {
|
||||
Multipart {
|
||||
flow: Flow::Error(Some(err)),
|
||||
safety: Safety::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return requests parsed Content-Type or raise the stored error.
|
||||
pub(crate) fn content_type_or_bail(&mut self) -> Result<mime::Mime, Error> {
|
||||
match self.flow {
|
||||
Flow::InFlight(ref inner) => Ok(inner.content_type.clone()),
|
||||
Flow::Error(ref mut err) => Err(err
|
||||
.take()
|
||||
.expect("error should not be taken after it was returned")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for Multipart {
|
||||
type Item = Result<Field, Error>;
|
||||
|
||||
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let this = self.get_mut();
|
||||
|
||||
match this.flow {
|
||||
Flow::InFlight(ref mut inner) => {
|
||||
if let Some(mut buffer) = inner.payload.get_mut(&this.safety) {
|
||||
// check safety and poll read payload to buffer.
|
||||
buffer.poll_stream(cx)?;
|
||||
} else if !this.safety.is_clean() {
|
||||
// safety violation
|
||||
return Poll::Ready(Some(Err(Error::NotConsumed)));
|
||||
} else {
|
||||
return Poll::Pending;
|
||||
}
|
||||
|
||||
inner.poll(&this.safety, cx)
|
||||
}
|
||||
|
||||
Flow::Error(ref mut err) => Poll::Ready(Some(Err(err
|
||||
.take()
|
||||
.expect("Multipart polled after finish")))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug)]
|
||||
enum State {
|
||||
/// Skip data until first boundary.
|
||||
FirstBoundary,
|
||||
|
||||
/// Reading boundary.
|
||||
Boundary,
|
||||
|
||||
/// Reading Headers.
|
||||
Headers,
|
||||
|
||||
/// Stream EOF.
|
||||
Eof,
|
||||
}
|
||||
|
||||
enum Item {
|
||||
None,
|
||||
Field(Rc<RefCell<InnerField>>),
|
||||
}
|
||||
|
||||
struct Inner {
|
||||
/// Request's payload stream & buffer.
|
||||
payload: PayloadRef,
|
||||
|
||||
/// Request's Content-Type.
|
||||
///
|
||||
/// Guaranteed to have "multipart" top-level media type, i.e., `multipart/*`.
|
||||
content_type: Mime,
|
||||
|
||||
/// Field boundary.
|
||||
boundary: String,
|
||||
|
||||
state: State,
|
||||
item: Item,
|
||||
}
|
||||
|
||||
impl Inner {
|
||||
fn read_field_headers(payload: &mut PayloadBuffer) -> Result<Option<HeaderMap>, Error> {
|
||||
match payload.read_until(b"\r\n\r\n")? {
|
||||
None => {
|
||||
if payload.eof {
|
||||
Err(Error::Incomplete)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
Some(bytes) => {
|
||||
let mut hdrs = [httparse::EMPTY_HEADER; MAX_HEADERS];
|
||||
|
||||
match httparse::parse_headers(&bytes, &mut hdrs).map_err(ParseError::from)? {
|
||||
httparse::Status::Complete((_, hdrs)) => {
|
||||
// convert headers
|
||||
let mut headers = HeaderMap::with_capacity(hdrs.len());
|
||||
|
||||
for h in hdrs {
|
||||
let name =
|
||||
HeaderName::try_from(h.name).map_err(|_| ParseError::Header)?;
|
||||
let value =
|
||||
HeaderValue::try_from(h.value).map_err(|_| ParseError::Header)?;
|
||||
headers.append(name, value);
|
||||
}
|
||||
|
||||
Ok(Some(headers))
|
||||
}
|
||||
|
||||
httparse::Status::Partial => Err(ParseError::Header.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads a field boundary from the payload buffer (and discards it).
|
||||
///
|
||||
/// Reads "in-between" and "final" boundaries. E.g. for boundary = "foo":
|
||||
///
|
||||
/// ```plain
|
||||
/// --foo <-- in-between fields
|
||||
/// --foo-- <-- end of request body, should be followed by EOF
|
||||
/// ```
|
||||
///
|
||||
/// Returns:
|
||||
///
|
||||
/// - `Ok(Some(true))` - final field boundary read (EOF)
|
||||
/// - `Ok(Some(false))` - field boundary read
|
||||
/// - `Ok(None)` - boundary not found, more data needs reading
|
||||
/// - `Err(BoundaryMissing)` - multipart boundary is missing
|
||||
fn read_boundary(payload: &mut PayloadBuffer, boundary: &str) -> Result<Option<bool>, Error> {
|
||||
// TODO: need to read epilogue
|
||||
let chunk = match payload.readline_or_eof()? {
|
||||
// TODO: this might be okay as a let Some() else return Ok(None)
|
||||
None => return Ok(payload.eof.then_some(true)),
|
||||
Some(chunk) => chunk,
|
||||
};
|
||||
|
||||
const BOUNDARY_MARKER: &[u8] = b"--";
|
||||
const LINE_BREAK: &[u8] = b"\r\n";
|
||||
|
||||
let boundary_len = boundary.len();
|
||||
|
||||
if chunk.len() < boundary_len + 2 + 2
|
||||
|| !chunk.starts_with(BOUNDARY_MARKER)
|
||||
|| &chunk[2..boundary_len + 2] != boundary.as_bytes()
|
||||
{
|
||||
return Err(Error::BoundaryMissing);
|
||||
}
|
||||
|
||||
// chunk facts:
|
||||
// - long enough to contain boundary + 2 markers or 1 marker and line-break
|
||||
// - starts with boundary marker
|
||||
// - chunk contains correct boundary
|
||||
|
||||
if &chunk[boundary_len + 2..] == LINE_BREAK {
|
||||
// boundary is followed by line-break, indicating more fields to come
|
||||
return Ok(Some(false));
|
||||
}
|
||||
|
||||
// boundary is followed by marker
|
||||
if &chunk[boundary_len + 2..boundary_len + 4] == BOUNDARY_MARKER
|
||||
&& (
|
||||
// chunk is exactly boundary len + 2 markers
|
||||
chunk.len() == boundary_len + 2 + 2
|
||||
// final boundary is allowed to end with a line-break
|
||||
|| &chunk[boundary_len + 4..] == LINE_BREAK
|
||||
)
|
||||
{
|
||||
return Ok(Some(true));
|
||||
}
|
||||
|
||||
Err(Error::BoundaryMissing)
|
||||
}
|
||||
|
||||
fn skip_until_boundary(
|
||||
payload: &mut PayloadBuffer,
|
||||
boundary: &str,
|
||||
) -> Result<Option<bool>, Error> {
|
||||
let mut eof = false;
|
||||
|
||||
loop {
|
||||
match payload.readline()? {
|
||||
Some(chunk) => {
|
||||
if chunk.is_empty() {
|
||||
return Err(Error::BoundaryMissing);
|
||||
}
|
||||
if chunk.len() < boundary.len() {
|
||||
continue;
|
||||
}
|
||||
if &chunk[..2] == b"--" && &chunk[2..chunk.len() - 2] == boundary.as_bytes() {
|
||||
break;
|
||||
} else {
|
||||
if chunk.len() < boundary.len() + 2 {
|
||||
continue;
|
||||
}
|
||||
let b: &[u8] = boundary.as_ref();
|
||||
if &chunk[..boundary.len()] == b
|
||||
&& &chunk[boundary.len()..boundary.len() + 2] == b"--"
|
||||
{
|
||||
eof = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return if payload.eof {
|
||||
Err(Error::Incomplete)
|
||||
} else {
|
||||
Ok(None)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Some(eof))
|
||||
}
|
||||
|
||||
fn poll(&mut self, safety: &Safety, cx: &Context<'_>) -> Poll<Option<Result<Field, Error>>> {
|
||||
if self.state == State::Eof {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
// release field
|
||||
loop {
|
||||
// Nested multipart streams of fields has to be consumed
|
||||
// before switching to next
|
||||
if safety.current() {
|
||||
let stop = match self.item {
|
||||
Item::Field(ref mut field) => match field.borrow_mut().poll(safety) {
|
||||
Poll::Pending => return Poll::Pending,
|
||||
Poll::Ready(Some(Ok(_))) => continue,
|
||||
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
|
||||
Poll::Ready(None) => true,
|
||||
},
|
||||
Item::None => false,
|
||||
};
|
||||
if stop {
|
||||
self.item = Item::None;
|
||||
}
|
||||
if let Item::None = self.item {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let field_headers = if let Some(mut payload) = self.payload.get_mut(safety) {
|
||||
match self.state {
|
||||
// read until first boundary
|
||||
State::FirstBoundary => {
|
||||
match Inner::skip_until_boundary(&mut payload, &self.boundary)? {
|
||||
None => return Poll::Pending,
|
||||
Some(eof) => {
|
||||
if eof {
|
||||
self.state = State::Eof;
|
||||
return Poll::Ready(None);
|
||||
} else {
|
||||
self.state = State::Headers;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// read boundary
|
||||
State::Boundary => match Inner::read_boundary(&mut payload, &self.boundary)? {
|
||||
None => return Poll::Pending,
|
||||
Some(eof) => {
|
||||
if eof {
|
||||
self.state = State::Eof;
|
||||
return Poll::Ready(None);
|
||||
} else {
|
||||
self.state = State::Headers;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// read field headers for next field
|
||||
if self.state == State::Headers {
|
||||
if let Some(headers) = Inner::read_field_headers(&mut payload)? {
|
||||
self.state = State::Boundary;
|
||||
headers
|
||||
} else {
|
||||
return Poll::Pending;
|
||||
}
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
} else {
|
||||
log::debug!("NotReady: field is in flight");
|
||||
return Poll::Pending;
|
||||
};
|
||||
|
||||
let field_content_disposition = field_headers
|
||||
.get(&header::CONTENT_DISPOSITION)
|
||||
.and_then(|cd| ContentDisposition::from_raw(cd).ok())
|
||||
.filter(|content_disposition| {
|
||||
matches!(
|
||||
content_disposition.disposition,
|
||||
header::DispositionType::FormData,
|
||||
)
|
||||
});
|
||||
|
||||
let form_field_name = if self.content_type.subtype() == mime::FORM_DATA {
|
||||
// According to RFC 7578 §4.2, which relates to "multipart/form-data" requests
|
||||
// specifically, fields must have a Content-Disposition header, its disposition
|
||||
// type must be set as "form-data", and it must have a name parameter.
|
||||
|
||||
let Some(cd) = &field_content_disposition else {
|
||||
return Poll::Ready(Some(Err(Error::ContentDispositionMissing)));
|
||||
};
|
||||
|
||||
let Some(field_name) = cd.get_name() else {
|
||||
return Poll::Ready(Some(Err(Error::ContentDispositionNameMissing)));
|
||||
};
|
||||
|
||||
Some(field_name.to_owned())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// TODO: check out other multipart/* RFCs for specific requirements
|
||||
|
||||
let field_content_type: Option<Mime> = field_headers
|
||||
.get(&header::CONTENT_TYPE)
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
.and_then(|ct| ct.parse().ok());
|
||||
|
||||
self.state = State::Boundary;
|
||||
|
||||
// nested multipart stream is not supported
|
||||
if let Some(mime) = &field_content_type {
|
||||
if mime.type_() == mime::MULTIPART {
|
||||
return Poll::Ready(Some(Err(Error::Nested)));
|
||||
}
|
||||
}
|
||||
|
||||
let field_inner =
|
||||
InnerField::new_in_rc(self.payload.clone(), self.boundary.clone(), &field_headers)?;
|
||||
|
||||
self.item = Item::Field(Rc::clone(&field_inner));
|
||||
|
||||
Poll::Ready(Some(Ok(Field::new(
|
||||
field_content_type,
|
||||
field_content_disposition,
|
||||
form_field_name,
|
||||
field_headers,
|
||||
safety.clone(cx),
|
||||
field_inner,
|
||||
))))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Inner {
|
||||
fn drop(&mut self) {
|
||||
// InnerMultipartItem::Field has to be dropped first because of Safety.
|
||||
self.item = Item::None;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_http::h1;
|
||||
use actix_web::{
|
||||
http::header::{DispositionParam, DispositionType},
|
||||
rt,
|
||||
test::TestRequest,
|
||||
web::{BufMut as _, BytesMut},
|
||||
FromRequest,
|
||||
};
|
||||
use assert_matches::assert_matches;
|
||||
use futures_test::stream::StreamTestExt as _;
|
||||
use futures_util::{future::lazy, stream, StreamExt as _};
|
||||
use tokio::sync::mpsc;
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
|
||||
use super::*;
|
||||
|
||||
const BOUNDARY: &str = "abbc761f78ff4d7cb7573b5a23f96ef0";
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_boundary() {
|
||||
let headers = HeaderMap::new();
|
||||
match Multipart::find_ct_and_boundary(&headers) {
|
||||
Err(Error::ContentTypeMissing) => {}
|
||||
_ => unreachable!("should not happen"),
|
||||
}
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static("test"),
|
||||
);
|
||||
|
||||
match Multipart::find_ct_and_boundary(&headers) {
|
||||
Err(Error::ContentTypeParse) => {}
|
||||
_ => unreachable!("should not happen"),
|
||||
}
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static("multipart/mixed"),
|
||||
);
|
||||
match Multipart::find_ct_and_boundary(&headers) {
|
||||
Err(Error::BoundaryMissing) => {}
|
||||
_ => unreachable!("should not happen"),
|
||||
}
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/mixed; boundary=\"5c02368e880e436dab70ed54e1c58209\"",
|
||||
),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Multipart::find_ct_and_boundary(&headers).unwrap().1,
|
||||
"5c02368e880e436dab70ed54e1c58209",
|
||||
);
|
||||
}
|
||||
|
||||
fn create_stream() -> (
|
||||
mpsc::UnboundedSender<Result<Bytes, PayloadError>>,
|
||||
impl Stream<Item = Result<Bytes, PayloadError>>,
|
||||
) {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
|
||||
(
|
||||
tx,
|
||||
UnboundedReceiverStream::new(rx).map(|res| res.map_err(|_| panic!())),
|
||||
)
|
||||
}
|
||||
|
||||
fn create_simple_request_with_header() -> (Bytes, HeaderMap) {
|
||||
let (body, headers) = crate::test::create_form_data_payload_and_headers_with_boundary(
|
||||
BOUNDARY,
|
||||
"file",
|
||||
Some("fn.txt".to_owned()),
|
||||
Some(mime::TEXT_PLAIN_UTF_8),
|
||||
Bytes::from_static(b"data"),
|
||||
);
|
||||
|
||||
let mut buf = BytesMut::with_capacity(body.len() + 14);
|
||||
|
||||
// add junk before form to test pre-boundary data rejection
|
||||
buf.put("testasdadsad\r\n".as_bytes());
|
||||
|
||||
buf.put(body);
|
||||
|
||||
(buf.freeze(), headers)
|
||||
}
|
||||
|
||||
// TODO: use test utility when multi-file support is introduced
|
||||
fn create_double_request_with_header() -> (Bytes, HeaderMap) {
|
||||
let bytes = Bytes::from(
|
||||
"testasdadsad\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||
test\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Disposition: form-data; name=\"file\"; filename=\"fn.txt\"\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\n\
|
||||
data\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0--\r\n",
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||
),
|
||||
);
|
||||
(bytes, headers)
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_multipart_no_end_crlf() {
|
||||
let (sender, payload) = create_stream();
|
||||
let (mut bytes, headers) = create_double_request_with_header();
|
||||
let bytes_stripped = bytes.split_to(bytes.len()); // strip crlf
|
||||
|
||||
sender.send(Ok(bytes_stripped)).unwrap();
|
||||
drop(sender); // eof
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
|
||||
match multipart.next().await.unwrap() {
|
||||
Ok(_) => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match multipart.next().await.unwrap() {
|
||||
Ok(_) => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match multipart.next().await {
|
||||
None => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_multipart() {
|
||||
let (sender, payload) = create_stream();
|
||||
let (bytes, headers) = create_double_request_with_header();
|
||||
|
||||
sender.send(Ok(bytes)).unwrap();
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
match multipart.next().await {
|
||||
Some(Ok(mut field)) => {
|
||||
let cd = field.content_disposition().unwrap();
|
||||
assert_eq!(cd.disposition, DispositionType::FormData);
|
||||
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
|
||||
|
||||
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||
|
||||
match field.next().await.unwrap() {
|
||||
Ok(chunk) => assert_eq!(chunk, "test"),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
match field.next().await {
|
||||
None => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match multipart.next().await.unwrap() {
|
||||
Ok(mut field) => {
|
||||
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||
|
||||
match field.next().await {
|
||||
Some(Ok(chunk)) => assert_eq!(chunk, "data"),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
match field.next().await {
|
||||
None => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match multipart.next().await {
|
||||
None => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
// Loops, collecting all bytes until end-of-field
|
||||
async fn get_whole_field(field: &mut Field) -> BytesMut {
|
||||
let mut b = BytesMut::new();
|
||||
loop {
|
||||
match field.next().await {
|
||||
Some(Ok(chunk)) => b.extend_from_slice(&chunk),
|
||||
None => return b,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_stream() {
|
||||
let (bytes, headers) = create_double_request_with_header();
|
||||
let payload = stream::iter(bytes)
|
||||
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||
.interleave_pending();
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
match multipart.next().await.unwrap() {
|
||||
Ok(mut field) => {
|
||||
let cd = field.content_disposition().unwrap();
|
||||
assert_eq!(cd.disposition, DispositionType::FormData);
|
||||
assert_eq!(cd.parameters[0], DispositionParam::Name("file".into()));
|
||||
|
||||
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||
|
||||
assert_eq!(get_whole_field(&mut field).await, "test");
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match multipart.next().await {
|
||||
Some(Ok(mut field)) => {
|
||||
assert_eq!(field.content_type().unwrap().type_(), mime::TEXT);
|
||||
assert_eq!(field.content_type().unwrap().subtype(), mime::PLAIN);
|
||||
|
||||
assert_eq!(get_whole_field(&mut field).await, "data");
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match multipart.next().await {
|
||||
None => {}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_basic() {
|
||||
let (_, payload) = h1::Payload::create(false);
|
||||
let mut payload = PayloadBuffer::new(payload);
|
||||
|
||||
assert_eq!(payload.buf.len(), 0);
|
||||
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||
assert_eq!(None, payload.read_max(1).unwrap());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_eof() {
|
||||
let (mut sender, payload) = h1::Payload::create(false);
|
||||
let mut payload = PayloadBuffer::new(payload);
|
||||
|
||||
assert_eq!(None, payload.read_max(4).unwrap());
|
||||
sender.feed_data(Bytes::from("data"));
|
||||
sender.feed_eof();
|
||||
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||
|
||||
assert_eq!(Some(Bytes::from("data")), payload.read_max(4).unwrap());
|
||||
assert_eq!(payload.buf.len(), 0);
|
||||
assert!(payload.read_max(1).is_err());
|
||||
assert!(payload.eof);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_err() {
|
||||
let (mut sender, payload) = h1::Payload::create(false);
|
||||
let mut payload = PayloadBuffer::new(payload);
|
||||
assert_eq!(None, payload.read_max(1).unwrap());
|
||||
sender.set_error(PayloadError::Incomplete(None));
|
||||
lazy(|cx| payload.poll_stream(cx)).await.err().unwrap();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn read_max() {
|
||||
let (mut sender, payload) = h1::Payload::create(false);
|
||||
let mut payload = PayloadBuffer::new(payload);
|
||||
|
||||
sender.feed_data(Bytes::from("line1"));
|
||||
sender.feed_data(Bytes::from("line2"));
|
||||
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||
assert_eq!(payload.buf.len(), 10);
|
||||
|
||||
assert_eq!(Some(Bytes::from("line1")), payload.read_max(5).unwrap());
|
||||
assert_eq!(payload.buf.len(), 5);
|
||||
|
||||
assert_eq!(Some(Bytes::from("line2")), payload.read_max(5).unwrap());
|
||||
assert_eq!(payload.buf.len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn read_exactly() {
|
||||
let (mut sender, payload) = h1::Payload::create(false);
|
||||
let mut payload = PayloadBuffer::new(payload);
|
||||
|
||||
assert_eq!(None, payload.read_exact(2));
|
||||
|
||||
sender.feed_data(Bytes::from("line1"));
|
||||
sender.feed_data(Bytes::from("line2"));
|
||||
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||
|
||||
assert_eq!(Some(Bytes::from_static(b"li")), payload.read_exact(2));
|
||||
assert_eq!(payload.buf.len(), 8);
|
||||
|
||||
assert_eq!(Some(Bytes::from_static(b"ne1l")), payload.read_exact(4));
|
||||
assert_eq!(payload.buf.len(), 4);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn read_until() {
|
||||
let (mut sender, payload) = h1::Payload::create(false);
|
||||
let mut payload = PayloadBuffer::new(payload);
|
||||
|
||||
assert_eq!(None, payload.read_until(b"ne").unwrap());
|
||||
|
||||
sender.feed_data(Bytes::from("line1"));
|
||||
sender.feed_data(Bytes::from("line2"));
|
||||
lazy(|cx| payload.poll_stream(cx)).await.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(Bytes::from("line")),
|
||||
payload.read_until(b"ne").unwrap()
|
||||
);
|
||||
assert_eq!(payload.buf.len(), 6);
|
||||
|
||||
assert_eq!(
|
||||
Some(Bytes::from("1line2")),
|
||||
payload.read_until(b"2").unwrap()
|
||||
);
|
||||
assert_eq!(payload.buf.len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_multipart_from_error() {
|
||||
let err = Error::ContentTypeMissing;
|
||||
let mut multipart = Multipart::from_error(err);
|
||||
assert!(multipart.next().await.unwrap().is_err())
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_multipart_from_boundary() {
|
||||
let (_, payload) = create_stream();
|
||||
let (_, headers) = create_simple_request_with_header();
|
||||
let (ct, boundary) = Multipart::find_ct_and_boundary(&headers).unwrap();
|
||||
let _ = Multipart::from_ct_and_boundary(ct, boundary, payload);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_multipart_payload_consumption() {
|
||||
// with sample payload and HttpRequest with no headers
|
||||
let (_, inner_payload) = h1::Payload::create(false);
|
||||
let mut payload = actix_web::dev::Payload::from(inner_payload);
|
||||
let req = TestRequest::default().to_http_request();
|
||||
|
||||
// multipart should generate an error
|
||||
let mut mp = Multipart::from_request(&req, &mut payload).await.unwrap();
|
||||
assert!(mp.next().await.unwrap().is_err());
|
||||
|
||||
// and should not consume the payload
|
||||
match payload {
|
||||
actix_web::dev::Payload::H1 { .. } => {} //expected
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn no_content_disposition_form_data() {
|
||||
let bytes = Bytes::from(
|
||||
"testasdadsad\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\n\
|
||||
Content-Length: 4\r\n\
|
||||
\r\n\
|
||||
test\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||
),
|
||||
);
|
||||
let payload = stream::iter(bytes)
|
||||
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||
.interleave_pending();
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let res = multipart.next().await.unwrap();
|
||||
assert_matches!(
|
||||
res.expect_err(
|
||||
"according to RFC 7578, form-data fields require a content-disposition header"
|
||||
),
|
||||
Error::ContentDispositionMissing
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn no_content_disposition_non_form_data() {
|
||||
let bytes = Bytes::from(
|
||||
"testasdadsad\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\n\
|
||||
Content-Length: 4\r\n\
|
||||
\r\n\
|
||||
test\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/mixed; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||
),
|
||||
);
|
||||
let payload = stream::iter(bytes)
|
||||
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||
.interleave_pending();
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let res = multipart.next().await.unwrap();
|
||||
res.unwrap();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn no_name_in_form_data_content_disposition() {
|
||||
let bytes = Bytes::from(
|
||||
"testasdadsad\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n\
|
||||
Content-Disposition: form-data; filename=\"fn.txt\"\r\n\
|
||||
Content-Type: text/plain; charset=utf-8\r\n\
|
||||
Content-Length: 4\r\n\
|
||||
\r\n\
|
||||
test\r\n\
|
||||
--abbc761f78ff4d7cb7573b5a23f96ef0\r\n",
|
||||
);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
header::HeaderValue::from_static(
|
||||
"multipart/form-data; boundary=\"abbc761f78ff4d7cb7573b5a23f96ef0\"",
|
||||
),
|
||||
);
|
||||
let payload = stream::iter(bytes)
|
||||
.map(|byte| Ok(Bytes::copy_from_slice(&[byte])))
|
||||
.interleave_pending();
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let res = multipart.next().await.unwrap();
|
||||
assert_matches!(
|
||||
res.expect_err("according to RFC 7578, form-data fields require a name attribute"),
|
||||
Error::ContentDispositionNameMissing
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_drop_multipart_dont_hang() {
|
||||
let (sender, payload) = create_stream();
|
||||
let (bytes, headers) = create_simple_request_with_header();
|
||||
sender.send(Ok(bytes)).unwrap();
|
||||
drop(sender); // eof
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let mut field = multipart.next().await.unwrap().unwrap();
|
||||
|
||||
drop(multipart);
|
||||
|
||||
// should fail immediately
|
||||
match field.next().await {
|
||||
Some(Err(Error::NotConsumed)) => {}
|
||||
_ => panic!(),
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_drop_field_awaken_multipart() {
|
||||
let (sender, payload) = create_stream();
|
||||
let (bytes, headers) = create_double_request_with_header();
|
||||
sender.send(Ok(bytes)).unwrap();
|
||||
drop(sender); // eof
|
||||
|
||||
let mut multipart = Multipart::new(&headers, payload);
|
||||
let mut field = multipart.next().await.unwrap().unwrap();
|
||||
|
||||
let task = rt::spawn(async move {
|
||||
rt::time::sleep(Duration::from_millis(500)).await;
|
||||
assert_eq!(field.next().await.unwrap().unwrap(), "test");
|
||||
drop(field);
|
||||
});
|
||||
|
||||
// dropping field should awaken current task
|
||||
let _ = multipart.next().await.unwrap().unwrap();
|
||||
task.await.unwrap();
|
||||
}
|
||||
}
|
147
actix-multipart/src/payload.rs
Normal file
147
actix-multipart/src/payload.rs
Normal file
@ -0,0 +1,147 @@
|
||||
use std::{
|
||||
cell::{RefCell, RefMut},
|
||||
cmp, mem,
|
||||
pin::Pin,
|
||||
rc::Rc,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use actix_web::{
|
||||
error::PayloadError,
|
||||
web::{Bytes, BytesMut},
|
||||
};
|
||||
use futures_core::stream::{LocalBoxStream, Stream};
|
||||
|
||||
use crate::{error::Error, safety::Safety};
|
||||
|
||||
pub(crate) struct PayloadRef {
|
||||
payload: Rc<RefCell<PayloadBuffer>>,
|
||||
}
|
||||
|
||||
impl PayloadRef {
|
||||
pub(crate) fn new(payload: PayloadBuffer) -> PayloadRef {
|
||||
PayloadRef {
|
||||
payload: Rc::new(RefCell::new(payload)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_mut(&self, safety: &Safety) -> Option<RefMut<'_, PayloadBuffer>> {
|
||||
if safety.current() {
|
||||
Some(self.payload.borrow_mut())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for PayloadRef {
|
||||
fn clone(&self) -> PayloadRef {
|
||||
PayloadRef {
|
||||
payload: Rc::clone(&self.payload),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Payload buffer.
|
||||
pub(crate) struct PayloadBuffer {
|
||||
pub(crate) stream: LocalBoxStream<'static, Result<Bytes, PayloadError>>,
|
||||
pub(crate) buf: BytesMut,
|
||||
/// EOF flag. If true, no more payload reads will be attempted.
|
||||
pub(crate) eof: bool,
|
||||
}
|
||||
|
||||
impl PayloadBuffer {
|
||||
/// Constructs new payload buffer.
|
||||
pub(crate) fn new<S>(stream: S) -> Self
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, PayloadError>> + 'static,
|
||||
{
|
||||
PayloadBuffer {
|
||||
stream: Box::pin(stream),
|
||||
buf: BytesMut::with_capacity(1_024), // pre-allocate 1KiB
|
||||
eof: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn poll_stream(&mut self, cx: &mut Context<'_>) -> Result<(), PayloadError> {
|
||||
loop {
|
||||
match Pin::new(&mut self.stream).poll_next(cx) {
|
||||
Poll::Ready(Some(Ok(data))) => {
|
||||
self.buf.extend_from_slice(&data);
|
||||
// try to read more data
|
||||
continue;
|
||||
}
|
||||
Poll::Ready(Some(Err(err))) => return Err(err),
|
||||
Poll::Ready(None) => {
|
||||
self.eof = true;
|
||||
return Ok(());
|
||||
}
|
||||
Poll::Pending => return Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads exact number of bytes.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn read_exact(&mut self, size: usize) -> Option<Bytes> {
|
||||
if size <= self.buf.len() {
|
||||
Some(self.buf.split_to(size).freeze())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn read_max(&mut self, size: u64) -> Result<Option<Bytes>, Error> {
|
||||
if !self.buf.is_empty() {
|
||||
let size = cmp::min(self.buf.len() as u64, size) as usize;
|
||||
Ok(Some(self.buf.split_to(size).freeze()))
|
||||
} else if self.eof {
|
||||
Err(Error::Incomplete)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads until specified ending.
|
||||
///
|
||||
/// Returns:
|
||||
///
|
||||
/// - `Ok(Some(chunk))` - `needle` is found, with chunk ending after needle
|
||||
/// - `Err(Incomplete)` - `needle` is not found and we're at EOF
|
||||
/// - `Ok(None)` - `needle` is not found otherwise
|
||||
pub(crate) fn read_until(&mut self, needle: &[u8]) -> Result<Option<Bytes>, Error> {
|
||||
match memchr::memmem::find(&self.buf, needle) {
|
||||
// buffer exhausted and EOF without finding needle
|
||||
None if self.eof => Err(Error::Incomplete),
|
||||
|
||||
// needle not yet found
|
||||
None => Ok(None),
|
||||
|
||||
// needle found, split chunk out of buf
|
||||
Some(idx) => Ok(Some(self.buf.split_to(idx + needle.len()).freeze())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads bytes until new line delimiter.
|
||||
#[inline]
|
||||
pub(crate) fn readline(&mut self) -> Result<Option<Bytes>, Error> {
|
||||
self.read_until(b"\n")
|
||||
}
|
||||
|
||||
/// Reads bytes until new line delimiter or until EOF.
|
||||
#[inline]
|
||||
pub(crate) fn readline_or_eof(&mut self) -> Result<Option<Bytes>, Error> {
|
||||
match self.readline() {
|
||||
Err(Error::Incomplete) if self.eof => Ok(Some(self.buf.split().freeze())),
|
||||
line => line,
|
||||
}
|
||||
}
|
||||
|
||||
/// Puts unprocessed data back to the buffer.
|
||||
pub(crate) fn unprocessed(&mut self, data: Bytes) {
|
||||
// TODO: use BytesMut::from when it's released, see https://github.com/tokio-rs/bytes/pull/710
|
||||
let buf = BytesMut::from(&data[..]);
|
||||
let buf = mem::replace(&mut self.buf, buf);
|
||||
self.buf.extend_from_slice(&buf);
|
||||
}
|
||||
}
|
60
actix-multipart/src/safety.rs
Normal file
60
actix-multipart/src/safety.rs
Normal file
@ -0,0 +1,60 @@
|
||||
use std::{cell::Cell, marker::PhantomData, rc::Rc, task};
|
||||
|
||||
use local_waker::LocalWaker;
|
||||
|
||||
/// Counter. It tracks of number of clones of payloads and give access to payload only to top most.
|
||||
///
|
||||
/// - When dropped, parent task is awakened. This is to support the case where `Field` is dropped in
|
||||
/// a separate task than `Multipart`.
|
||||
/// - Assumes that parent owners don't move to different tasks; only the top-most is allowed to.
|
||||
/// - If dropped and is not top most owner, is_clean flag is set to false.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Safety {
|
||||
task: LocalWaker,
|
||||
level: usize,
|
||||
payload: Rc<PhantomData<bool>>,
|
||||
clean: Rc<Cell<bool>>,
|
||||
}
|
||||
|
||||
impl Safety {
|
||||
pub(crate) fn new() -> Safety {
|
||||
let payload = Rc::new(PhantomData);
|
||||
Safety {
|
||||
task: LocalWaker::new(),
|
||||
level: Rc::strong_count(&payload),
|
||||
clean: Rc::new(Cell::new(true)),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn current(&self) -> bool {
|
||||
Rc::strong_count(&self.payload) == self.level && self.clean.get()
|
||||
}
|
||||
|
||||
pub(crate) fn is_clean(&self) -> bool {
|
||||
self.clean.get()
|
||||
}
|
||||
|
||||
pub(crate) fn clone(&self, cx: &task::Context<'_>) -> Safety {
|
||||
let payload = Rc::clone(&self.payload);
|
||||
let s = Safety {
|
||||
task: LocalWaker::new(),
|
||||
level: Rc::strong_count(&payload),
|
||||
clean: self.clean.clone(),
|
||||
payload,
|
||||
};
|
||||
s.task.register(cx.waker());
|
||||
s
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Safety {
|
||||
fn drop(&mut self) {
|
||||
if Rc::strong_count(&self.payload) != self.level {
|
||||
// Multipart dropped leaving a Field
|
||||
self.clean.set(false);
|
||||
}
|
||||
|
||||
self.task.wake();
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,9 @@
|
||||
use actix_web::http::header::{self, HeaderMap};
|
||||
use bytes::{BufMut as _, Bytes, BytesMut};
|
||||
//! Multipart testing utilities.
|
||||
|
||||
use actix_web::{
|
||||
http::header::{self, HeaderMap},
|
||||
web::{BufMut as _, Bytes, BytesMut},
|
||||
};
|
||||
use mime::Mime;
|
||||
use rand::{
|
||||
distributions::{Alphanumeric, DistString as _},
|
||||
@ -21,8 +25,7 @@ const BOUNDARY_PREFIX: &str = "------------------------";
|
||||
///
|
||||
/// ```
|
||||
/// use actix_multipart::test::create_form_data_payload_and_headers;
|
||||
/// use actix_web::test::TestRequest;
|
||||
/// use bytes::Bytes;
|
||||
/// use actix_web::{test::TestRequest, web::Bytes};
|
||||
/// use memchr::memmem::find;
|
||||
///
|
||||
/// let (body, headers) = create_form_data_payload_and_headers(
|
||||
|
@ -12,9 +12,11 @@ repository = "https://github.com/actix/actix-web"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "actix_router"
|
||||
path = "src/lib.rs"
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"http::*",
|
||||
"serde::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = ["http", "unicode"]
|
||||
|
@ -18,6 +18,22 @@ categories = [
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix_codec::*",
|
||||
"actix_http_test::*",
|
||||
"actix_http::*",
|
||||
"actix_service::*",
|
||||
"actix_web::*",
|
||||
"awc::*",
|
||||
"bytes::*",
|
||||
"futures_core::*",
|
||||
"http::*",
|
||||
"openssl::*",
|
||||
"rustls::*",
|
||||
"tokio::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
|
45
actix-test/README.md
Normal file
45
actix-test/README.md
Normal file
@ -0,0 +1,45 @@
|
||||
# `actix-test`
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/actix-test)
|
||||
[](https://docs.rs/actix-test/0.1.5)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-test/0.1.5)
|
||||
[](https://crates.io/crates/actix-test)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- cargo-rdme start -->
|
||||
|
||||
Integration testing tools for Actix Web applications.
|
||||
|
||||
The main integration testing tool is [`TestServer`]. It spawns a real HTTP server on an unused port and provides methods that use a real HTTP client. Therefore, it is much closer to real-world cases than using `init_service`, which skips HTTP encoding and decoding.
|
||||
|
||||
## Examples
|
||||
|
||||
```rust
|
||||
use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
|
||||
|
||||
#[get("/")]
|
||||
async fn my_handler() -> Result<impl Responder, Error> {
|
||||
Ok(HttpResponse::Ok())
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_example() {
|
||||
let srv = actix_test::start(||
|
||||
App::new().service(my_handler)
|
||||
);
|
||||
|
||||
let req = srv.get("/");
|
||||
let res = req.send().await.unwrap();
|
||||
|
||||
assert!(res.status().is_success());
|
||||
}
|
||||
```
|
||||
|
||||
<!-- cargo-rdme end -->
|
@ -5,6 +5,7 @@
|
||||
//! real-world cases than using `init_service`, which skips HTTP encoding and decoding.
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```
|
||||
//! use actix_web::{get, web, test, App, HttpResponse, Error, Responder};
|
||||
//!
|
||||
|
@ -9,9 +9,15 @@ repository = "https://github.com/actix/actix-web"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "actix_web_actors"
|
||||
path = "src/lib.rs"
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix::*",
|
||||
"actix_http::*",
|
||||
"actix_web::*",
|
||||
"bytes::*",
|
||||
"bytestring::*",
|
||||
"futures_core::*",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
actix = { version = ">=0.12, <0.14", default-features = false }
|
||||
|
@ -2,9 +2,18 @@
|
||||
|
||||
## Unreleased
|
||||
|
||||
## 4.8.0
|
||||
|
||||
### Added
|
||||
|
||||
- Add `web::Html` responder.
|
||||
- Add `HttpRequest::full_url()` method to get the complete URL of the request.
|
||||
|
||||
### Fixed
|
||||
|
||||
- `ConnectionInfo::realip_remote_addr()` now handles IPv6 addresses from `Forwarded` header correctly. Previously, it sometimes returned the forwarded port as well.
|
||||
- Always remove port from return value of `ConnectionInfo::realip_remote_addr()` when handling IPv6 addresses. from the `Forwarded` header.
|
||||
- The `UrlencodedError::ContentType` variant (relevant to the `Form` extractor) now uses the 415 (Media Type Unsupported) status code in it's `ResponseError` implementation.
|
||||
- Apply `HttpServer::max_connection_rate()` setting when using rustls v0.22 or v0.23.
|
||||
|
||||
## 4.7.0
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-web"
|
||||
version = "4.7.0"
|
||||
version = "4.8.0"
|
||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
@ -35,9 +35,31 @@ features = [
|
||||
"secure-cookies",
|
||||
]
|
||||
|
||||
[lib]
|
||||
name = "actix_web"
|
||||
path = "src/lib.rs"
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix_http::*",
|
||||
"actix_router::*",
|
||||
"actix_rt::*",
|
||||
"actix_server::*",
|
||||
"actix_service::*",
|
||||
"actix_utils::*",
|
||||
"actix_web_codegen::*",
|
||||
"bytes::*",
|
||||
"cookie::*",
|
||||
"cookie",
|
||||
"futures_core::*",
|
||||
"http::*",
|
||||
"language_tags::*",
|
||||
"mime::*",
|
||||
"openssl::*",
|
||||
"rustls::*",
|
||||
"serde_json::*",
|
||||
"serde_urlencoded::*",
|
||||
"serde::*",
|
||||
"serde::*",
|
||||
"tokio::*",
|
||||
"url::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = [
|
||||
@ -71,18 +93,18 @@ secure-cookies = ["cookies", "cookie/secure"]
|
||||
http2 = ["actix-http/http2"]
|
||||
|
||||
# TLS via OpenSSL
|
||||
openssl = ["http2", "actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
||||
openssl = ["__tls", "http2", "actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
||||
|
||||
# TLS via Rustls v0.20
|
||||
rustls = ["rustls-0_20"]
|
||||
# TLS via Rustls v0.20
|
||||
rustls-0_20 = ["http2", "actix-http/rustls-0_20", "actix-tls/accept", "actix-tls/rustls-0_20"]
|
||||
rustls-0_20 = ["__tls", "http2", "actix-http/rustls-0_20", "actix-tls/accept", "actix-tls/rustls-0_20"]
|
||||
# TLS via Rustls v0.21
|
||||
rustls-0_21 = ["http2", "actix-http/rustls-0_21", "actix-tls/accept", "actix-tls/rustls-0_21"]
|
||||
rustls-0_21 = ["__tls", "http2", "actix-http/rustls-0_21", "actix-tls/accept", "actix-tls/rustls-0_21"]
|
||||
# TLS via Rustls v0.22
|
||||
rustls-0_22 = ["http2", "actix-http/rustls-0_22", "actix-tls/accept", "actix-tls/rustls-0_22"]
|
||||
rustls-0_22 = ["__tls", "http2", "actix-http/rustls-0_22", "actix-tls/accept", "actix-tls/rustls-0_22"]
|
||||
# TLS via Rustls v0.23
|
||||
rustls-0_23 = ["http2", "actix-http/rustls-0_23", "actix-tls/accept", "actix-tls/rustls-0_23"]
|
||||
rustls-0_23 = ["__tls", "http2", "actix-http/rustls-0_23", "actix-tls/accept", "actix-tls/rustls-0_23"]
|
||||
|
||||
# Full unicode support
|
||||
unicode = ["dep:regex", "actix-router/unicode"]
|
||||
@ -91,6 +113,10 @@ unicode = ["dep:regex", "actix-router/unicode"]
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__compress = []
|
||||
|
||||
# Internal (PRIVATE!) features used to aid checking feature status.
|
||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||
__tls = []
|
||||
|
||||
# io-uring feature only available for Linux OSes.
|
||||
experimental-io-uring = ["actix-server/io-uring"]
|
||||
|
||||
|
@ -8,10 +8,10 @@
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web/4.7.0)
|
||||
[](https://docs.rs/actix-web/4.8.0)
|
||||

|
||||

|
||||
[](https://deps.rs/crate/actix-web/4.7.0)
|
||||
[](https://deps.rs/crate/actix-web/4.8.0)
|
||||
<br />
|
||||
[](https://github.com/actix/actix-web/actions/workflows/ci.yml)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||
@ -109,4 +109,4 @@ This project is licensed under either of the following licenses, at your option:
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Contribution to the actix-web repo is organized under the terms of the Contributor Covenant. The Actix team promises to intervene to uphold that code of conduct.
|
||||
Contribution to the `actix/actix-web` repo is organized under the terms of the Contributor Covenant. The Actix team promises to intervene to uphold that code of conduct.
|
||||
|
@ -234,7 +234,6 @@ where
|
||||
///
|
||||
/// * *Resource* is an entry in resource table which corresponds to requested URL.
|
||||
/// * *Scope* is a set of resources with common root path.
|
||||
/// * "StaticFiles" is a service for static files support
|
||||
pub fn service<F>(mut self, factory: F) -> Self
|
||||
where
|
||||
F: HttpServiceFactory + 'static,
|
||||
|
@ -100,6 +100,7 @@ impl ResponseError for UrlencodedError {
|
||||
match self {
|
||||
Self::Overflow { .. } => StatusCode::PAYLOAD_TOO_LARGE,
|
||||
Self::UnknownLength => StatusCode::LENGTH_REQUIRED,
|
||||
Self::ContentType => StatusCode::UNSUPPORTED_MEDIA_TYPE,
|
||||
Self::Payload(err) => err.status_code(),
|
||||
_ => StatusCode::BAD_REQUEST,
|
||||
}
|
||||
@ -232,7 +233,7 @@ mod tests {
|
||||
let resp = UrlencodedError::UnknownLength.error_response();
|
||||
assert_eq!(resp.status(), StatusCode::LENGTH_REQUIRED);
|
||||
let resp = UrlencodedError::ContentType.error_response();
|
||||
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
||||
assert_eq!(resp.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -154,7 +154,7 @@ impl DispositionParam {
|
||||
#[inline]
|
||||
pub fn as_name(&self) -> Option<&str> {
|
||||
match self {
|
||||
DispositionParam::Name(ref name) => Some(name.as_str()),
|
||||
DispositionParam::Name(name) => Some(name.as_str()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -163,7 +163,7 @@ impl DispositionParam {
|
||||
#[inline]
|
||||
pub fn as_filename(&self) -> Option<&str> {
|
||||
match self {
|
||||
DispositionParam::Filename(ref filename) => Some(filename.as_str()),
|
||||
DispositionParam::Filename(filename) => Some(filename.as_str()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -172,7 +172,7 @@ impl DispositionParam {
|
||||
#[inline]
|
||||
pub fn as_filename_ext(&self) -> Option<&ExtendedValue> {
|
||||
match self {
|
||||
DispositionParam::FilenameExt(ref value) => Some(value),
|
||||
DispositionParam::FilenameExt(value) => Some(value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,8 @@ fn bare_address(val: &str) -> &str {
|
||||
val.split("]:")
|
||||
.next()
|
||||
.map(|s| s.trim_start_matches('[').trim_end_matches(']'))
|
||||
// This shouldn't *actually* ever happen
|
||||
// this indicates that the IPv6 address is malformed so shouldn't
|
||||
// usually happen, but if it does, just return the original input
|
||||
.unwrap_or(val)
|
||||
} else {
|
||||
val.split(':').next().unwrap_or(val)
|
||||
|
@ -91,6 +91,35 @@ impl HttpRequest {
|
||||
&self.head().uri
|
||||
}
|
||||
|
||||
/// Returns request's original full URL.
|
||||
///
|
||||
/// Reconstructed URL is best-effort, using [`connection_info`](HttpRequest::connection_info())
|
||||
/// to get forwarded scheme & host.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::test::TestRequest;
|
||||
/// let req = TestRequest::with_uri("http://10.1.2.3:8443/api?id=4&name=foo")
|
||||
/// .insert_header(("host", "example.com"))
|
||||
/// .to_http_request();
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// req.full_url().as_str(),
|
||||
/// "http://example.com/api?id=4&name=foo",
|
||||
/// );
|
||||
/// ```
|
||||
pub fn full_url(&self) -> url::Url {
|
||||
let info = self.connection_info();
|
||||
let scheme = info.scheme();
|
||||
let host = info.host();
|
||||
let path_and_query = self
|
||||
.uri()
|
||||
.path_and_query()
|
||||
.map(|paq| paq.as_str())
|
||||
.unwrap_or("/");
|
||||
|
||||
url::Url::parse(&format!("{scheme}://{host}{path_and_query}")).unwrap()
|
||||
}
|
||||
|
||||
/// Read the Request method.
|
||||
#[inline]
|
||||
pub fn method(&self) -> &Method {
|
||||
@ -963,4 +992,27 @@ mod tests {
|
||||
|
||||
assert!(format!("{:?}", req).contains(location_header));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_full_url() {
|
||||
let req = TestRequest::with_uri("/api?id=4&name=foo").to_http_request();
|
||||
assert_eq!(
|
||||
req.full_url().as_str(),
|
||||
"http://localhost:8080/api?id=4&name=foo",
|
||||
);
|
||||
|
||||
let req = TestRequest::with_uri("https://example.com/api?id=4&name=foo").to_http_request();
|
||||
assert_eq!(
|
||||
req.full_url().as_str(),
|
||||
"https://example.com/api?id=4&name=foo",
|
||||
);
|
||||
|
||||
let req = TestRequest::with_uri("http://10.1.2.3:8443/api?id=4&name=foo")
|
||||
.insert_header(("host", "example.com"))
|
||||
.to_http_request();
|
||||
assert_eq!(
|
||||
req.full_url().as_str(),
|
||||
"http://example.com/api?id=4&name=foo",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -213,7 +213,6 @@ where
|
||||
///
|
||||
/// * *Resource* is an entry in resource table which corresponds to requested URL.
|
||||
/// * *Scope* is a set of resources with common root path.
|
||||
/// * "StaticFiles" is a service for static files support
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpRequest};
|
||||
|
@ -7,13 +7,7 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[cfg(any(
|
||||
feature = "openssl",
|
||||
feature = "rustls-0_20",
|
||||
feature = "rustls-0_21",
|
||||
feature = "rustls-0_22",
|
||||
feature = "rustls-0_23",
|
||||
))]
|
||||
#[cfg(feature = "__tls")]
|
||||
use actix_http::TlsAcceptorConfig;
|
||||
use actix_http::{body::MessageBody, Extensions, HttpService, KeepAlive, Request, Response};
|
||||
use actix_server::{Server, ServerBuilder};
|
||||
@ -190,7 +184,7 @@ where
|
||||
/// By default max connections is set to a 256.
|
||||
#[allow(unused_variables)]
|
||||
pub fn max_connection_rate(self, num: usize) -> Self {
|
||||
#[cfg(any(feature = "rustls-0_20", feature = "rustls-0_21", feature = "openssl"))]
|
||||
#[cfg(feature = "__tls")]
|
||||
actix_tls::accept::max_concurrent_tls_connect(num);
|
||||
self
|
||||
}
|
||||
@ -243,13 +237,7 @@ where
|
||||
/// time, the connection is closed.
|
||||
///
|
||||
/// By default, the handshake timeout is 3 seconds.
|
||||
#[cfg(any(
|
||||
feature = "openssl",
|
||||
feature = "rustls-0_20",
|
||||
feature = "rustls-0_21",
|
||||
feature = "rustls-0_22",
|
||||
feature = "rustls-0_23",
|
||||
))]
|
||||
#[cfg(feature = "__tls")]
|
||||
pub fn tls_handshake_timeout(self, dur: Duration) -> Self {
|
||||
self.config
|
||||
.lock()
|
||||
|
66
actix-web/src/types/html.rs
Normal file
66
actix-web/src/types/html.rs
Normal file
@ -0,0 +1,66 @@
|
||||
//! Semantic HTML responder. See [`Html`].
|
||||
|
||||
use crate::{
|
||||
http::{
|
||||
header::{self, ContentType, TryIntoHeaderValue},
|
||||
StatusCode,
|
||||
},
|
||||
HttpRequest, HttpResponse, Responder,
|
||||
};
|
||||
|
||||
/// Semantic HTML responder.
|
||||
///
|
||||
/// When used as a responder, creates a 200 OK response, sets the correct HTML content type, and
|
||||
/// uses the string passed to [`Html::new()`] as the body.
|
||||
///
|
||||
/// ```
|
||||
/// # use actix_web::web::Html;
|
||||
/// Html::new("<p>Hello, World!</p>")
|
||||
/// # ;
|
||||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||
pub struct Html(String);
|
||||
|
||||
impl Html {
|
||||
/// Constructs a new `Html` responder.
|
||||
pub fn new(html: impl Into<String>) -> Self {
|
||||
Self(html.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl Responder for Html {
|
||||
type Body = String;
|
||||
|
||||
fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
|
||||
let mut res = HttpResponse::with_body(StatusCode::OK, self.0);
|
||||
res.headers_mut().insert(
|
||||
header::CONTENT_TYPE,
|
||||
ContentType::html().try_into_value().unwrap(),
|
||||
);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::test::TestRequest;
|
||||
|
||||
#[test]
|
||||
fn responder() {
|
||||
let req = TestRequest::default().to_http_request();
|
||||
|
||||
let res = Html::new("<p>Hello, World!</p>");
|
||||
let res = res.respond_to(&req);
|
||||
|
||||
assert!(res.status().is_success());
|
||||
assert!(res
|
||||
.headers()
|
||||
.get(header::CONTENT_TYPE)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.starts_with("text/html"));
|
||||
assert!(res.body().starts_with("<p>"));
|
||||
}
|
||||
}
|
@ -3,6 +3,7 @@
|
||||
mod either;
|
||||
mod form;
|
||||
mod header;
|
||||
mod html;
|
||||
mod json;
|
||||
mod path;
|
||||
mod payload;
|
||||
@ -13,6 +14,7 @@ pub use self::{
|
||||
either::Either,
|
||||
form::{Form, FormConfig, UrlEncoded},
|
||||
header::Header,
|
||||
html::Html,
|
||||
json::{Json, JsonBody, JsonConfig},
|
||||
path::{Path, PathConfig},
|
||||
payload::{Payload, PayloadConfig},
|
||||
|
@ -15,10 +15,6 @@ repository = "https://github.com/actix/actix-web"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "awc"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
features = [
|
||||
@ -33,6 +29,27 @@ features = [
|
||||
"compress-zstd",
|
||||
]
|
||||
|
||||
[package.metadata.cargo_check_external_types]
|
||||
allowed_external_types = [
|
||||
"actix_codec::*",
|
||||
"actix_http::*",
|
||||
"actix_rt::*",
|
||||
"actix_service::*",
|
||||
"actix_tls::*",
|
||||
"bytes::*",
|
||||
"cookie::*",
|
||||
"cookie",
|
||||
"futures_core::*",
|
||||
"h2::*",
|
||||
"http::*",
|
||||
"openssl::*",
|
||||
"rustls::*",
|
||||
"serde_json::*",
|
||||
"serde_urlencoded::*",
|
||||
"serde::*",
|
||||
"tokio::*",
|
||||
]
|
||||
|
||||
[features]
|
||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||
|
||||
@ -134,7 +151,7 @@ rcgen = "0.13"
|
||||
rustls-pemfile = "2"
|
||||
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
|
||||
zstd = "0.13"
|
||||
tls-rustls-0_23 = { package = "rustls", version = "0.23" } # add rustls 0.23 with default features to make aws_lc_rs work in tests
|
||||
tls-rustls-0_23 = { package = "rustls", version = "0.23" } # add rustls 0.23 with default features to make aws_lc_rs work in tests
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
|
@ -1,6 +1,8 @@
|
||||
use std::error::Error as StdError;
|
||||
|
||||
#[tokio::main]
|
||||
/// If we want to make requests to addresses starting with `https`, we need to enable the rustls feature of awc
|
||||
/// `awc = { version = "3.5.0", features = ["rustls"] }`
|
||||
#[actix_rt::main]
|
||||
async fn main() -> Result<(), Box<dyn StdError>> {
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||
|
||||
|
83
justfile
83
justfile
@ -3,6 +3,7 @@ _list:
|
||||
|
||||
# Format workspace.
|
||||
fmt:
|
||||
just --unstable --fmt
|
||||
cargo +nightly fmt
|
||||
fd --hidden --type=file --extension=md --extension=yml --exec-batch npx -y prettier --write
|
||||
|
||||
@ -17,20 +18,22 @@ msrv := ```
|
||||
| sed -E 's/^1\.([0-9]{2})$/1\.\1\.0/'
|
||||
```
|
||||
msrv_rustup := "+" + msrv
|
||||
|
||||
non_linux_all_features_list := ```
|
||||
cargo metadata --format-version=1 \
|
||||
| jq '.packages[] | select(.source == null) | .features | keys' \
|
||||
| jq -r --slurp \
|
||||
--arg exclusions "tokio-uring,io-uring,experimental-io-uring" \
|
||||
--arg exclusions "__tls,__compress,tokio-uring,io-uring,experimental-io-uring" \
|
||||
'add | unique | . - ($exclusions | split(",")) | join(",")'
|
||||
```
|
||||
all_crate_features := if os() == "linux" { "--all-features" } else { "--features='" + non_linux_all_features_list + "'" }
|
||||
|
||||
all_crate_features := if os() == "linux" {
|
||||
"--all-features"
|
||||
} else {
|
||||
"--features='" + non_linux_all_features_list + "'"
|
||||
}
|
||||
[private]
|
||||
check-min:
|
||||
cargo hack --workspace check --no-default-features
|
||||
|
||||
[private]
|
||||
check-default:
|
||||
cargo hack --workspace check
|
||||
|
||||
# Run Clippy over workspace.
|
||||
clippy toolchain="":
|
||||
@ -53,9 +56,33 @@ test-docs toolchain="": && doc
|
||||
# Test workspace.
|
||||
test-all toolchain="": (test toolchain) (test-docs toolchain)
|
||||
|
||||
# Test workspace and collect coverage info.
|
||||
[private]
|
||||
test-coverage toolchain="":
|
||||
cargo {{ toolchain }} llvm-cov nextest --no-report {{ all_crate_features }}
|
||||
cargo {{ toolchain }} llvm-cov --doc --no-report {{ all_crate_features }}
|
||||
|
||||
# Test workspace and generate Codecov report.
|
||||
test-coverage-codecov toolchain="": (test-coverage toolchain)
|
||||
cargo {{ toolchain }} llvm-cov report --doctests --codecov --output-path=codecov.json
|
||||
|
||||
# Test workspace and generate LCOV report.
|
||||
test-coverage-lcov toolchain="": (test-coverage toolchain)
|
||||
cargo {{ toolchain }} llvm-cov report --doctests --lcov --output-path=lcov.info
|
||||
|
||||
# Document crates in workspace.
|
||||
doc *args:
|
||||
RUSTDOCFLAGS="--cfg=docsrs -Dwarnings" cargo +nightly doc --no-deps --workspace {{ all_crate_features }} {{ args }}
|
||||
doc *args: && doc-set-workspace-crates
|
||||
RUSTDOCFLAGS="--cfg=docsrs -Dwarnings" cargo +nightly doc --workspace {{ all_crate_features }} {{ args }}
|
||||
|
||||
[private]
|
||||
doc-set-workspace-crates:
|
||||
#!/usr/bin/env bash
|
||||
(
|
||||
echo "window.ALL_CRATES ="
|
||||
cargo metadata --format-version=1 \
|
||||
| jq '[.packages[] | select(.source == null) | .targets | map(select(.doc) | .name)] | flatten'
|
||||
echo ";"
|
||||
) > "$(cargo metadata --format-version=1 | jq -r '.target_directory')/doc/crates.js"
|
||||
|
||||
# Document crates in workspace and watch for changes.
|
||||
doc-watch:
|
||||
@ -65,4 +92,42 @@ doc-watch:
|
||||
# Update READMEs from crate root documentation.
|
||||
update-readmes: && fmt
|
||||
cd ./actix-files && cargo rdme --force
|
||||
cd ./actix-http-test && cargo rdme --force
|
||||
cd ./actix-router && cargo rdme --force
|
||||
cd ./actix-multipart && cargo rdme --force
|
||||
cd ./actix-test && cargo rdme --force
|
||||
|
||||
feature_combo_skip_list := if os() == "linux" { "__tls,__compress" } else { "__tls,__compress,experimental-io-uring" }
|
||||
|
||||
# Checks compatibility of feature combinations.
|
||||
check-feature-combinations:
|
||||
cargo hack --workspace \
|
||||
--feature-powerset --depth=4 \
|
||||
--skip={{ feature_combo_skip_list }} \
|
||||
check
|
||||
|
||||
# Check for unintentional external type exposure on all crates in workspace.
|
||||
check-external-types-all toolchain="+nightly":
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
exit=0
|
||||
for f in $(find . -mindepth 2 -maxdepth 2 -name Cargo.toml | grep -vE "\-codegen/|\-derive/|\-macros/"); do
|
||||
if ! just check-external-types-manifest "$f" {{ toolchain }}; then exit=1; fi
|
||||
echo
|
||||
echo
|
||||
done
|
||||
exit $exit
|
||||
|
||||
# Check for unintentional external type exposure on all crates in workspace.
|
||||
check-external-types-all-table toolchain="+nightly":
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
for f in $(find . -mindepth 2 -maxdepth 2 -name Cargo.toml | grep -vE "\-codegen/|\-derive/|\-macros/"); do
|
||||
echo
|
||||
echo "Checking for $f"
|
||||
just check-external-types-manifest "$f" {{ toolchain }} --output-format=markdown-table
|
||||
done
|
||||
|
||||
# Check for unintentional external type exposure on a crate.
|
||||
check-external-types-manifest manifest_path toolchain="+nightly" *extra_args="":
|
||||
cargo {{ toolchain }} check-external-types --manifest-path "{{ manifest_path }}" {{ extra_args }}
|
||||
|
@ -169,3 +169,5 @@ if [ "$GH_RELEASE" = 'y' ] || [ "$GH_RELEASE" = 'Y' ]; then
|
||||
fi
|
||||
|
||||
echo
|
||||
|
||||
cargo update >/dev/null 2>&1 || true
|
||||
|
Reference in New Issue
Block a user