1
0
mirror of https://github.com/fafhrd91/actix-web synced 2025-09-01 01:16:59 +02:00

Compare commits

..

2 Commits

Author SHA1 Message Date
Rob Ede
34fdc808e2 implement from<bytes/vec> for payload 2025-03-09 16:21:24 +00:00
Ultra-Code
f0d6e1b25a feat: Add from_bytes/u8_bytes to dev::Payload
This allows convinent construction of Payload from bytes which is
useful in middlewares

closes actix/actix-web#3589

Add doc comment and changelog entry
2025-03-03 23:35:54 +00:00
84 changed files with 490 additions and 5333 deletions

View File

@@ -1,8 +1,7 @@
disallowed-names = [
"..",
"e", # no single letter error bindings
"e", # no single letter error bindings
]
disallowed-methods = [
{ path = "std::cell::RefCell::default()", reason = "prefer explicit inner type default (remove allow-invalid when rust-lang/rust-clippy/#8581 is fixed)", allow-invalid = true },
{ path = "std::rc::Rc::default()", reason = "prefer explicit inner type default (remove allow-invalid when rust-lang/rust-clippy/#8581 is fixed)", allow-invalid = true },
"std::cell::RefCell::default()",
"std::rc::Rc::default()",
]

View File

@@ -1,13 +0,0 @@
version: "0.2"
words:
- actix
- addrs
- bytestring
- httparse
- msrv
- realip
- rustls
- rustup
- serde
- uring
- zstd

2
.github/FUNDING.yml vendored
View File

@@ -1,3 +1,3 @@
# These are supported funding model platforms
github: [robjtede, JohnTitor]
github: [robjtede]

View File

@@ -1,11 +1,10 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
- package-ecosystem: cargo
directory: /
schedule:
interval: weekly
versioning-strategy: lockfile-only
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly

View File

@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust
run: |

View File

@@ -28,11 +28,11 @@ jobs:
runs-on: ${{ matrix.target.os }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install nasm
if: matrix.target.os == 'windows-latest'
uses: ilammy/setup-nasm@72793074d3c8cdda771dba85f6deafe00623038b # v1.5.2
uses: ilammy/setup-nasm@v1.5.1
- name: Install OpenSSL
if: matrix.target.os == 'windows-latest'
@@ -44,12 +44,12 @@ jobs:
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
- name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@f63c33fd96cc1e69a29bafd06541cf28588b43a4 # v2.58.21
uses: taiki-e/install-action@v2.49.0
with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
@@ -71,19 +71,19 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Free Disk Space
run: ./scripts/free-disk-space.sh
- name: Setup mold linker
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
uses: rui314/setup-mold@v1
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
- name: Install just, cargo-hack
uses: taiki-e/install-action@f63c33fd96cc1e69a29bafd06541cf28588b43a4 # v2.58.21
uses: taiki-e/install-action@v2.49.0
with:
tool: just,cargo-hack

View File

@@ -18,7 +18,7 @@ concurrency:
jobs:
read_msrv:
name: Read MSRV
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@8b553824444060021f2843d7b4d803f3624d15e5 # v0.1.0
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@v0.1.0
build_and_test:
needs: read_msrv
@@ -39,11 +39,11 @@ jobs:
runs-on: ${{ matrix.target.os }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install nasm
if: matrix.target.os == 'windows-latest'
uses: ilammy/setup-nasm@72793074d3c8cdda771dba85f6deafe00623038b # v1.5.2
uses: ilammy/setup-nasm@v1.5.1
- name: Install OpenSSL
if: matrix.target.os == 'windows-latest'
@@ -56,15 +56,15 @@ jobs:
- name: Setup mold linker
if: matrix.target.os == 'ubuntu-latest'
uses: rui314/setup-mold@7344740a9418dcdcb481c7df83d9fbd1d5072d7d # v1
uses: rui314/setup-mold@v1
- name: Install Rust (${{ matrix.version.name }})
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@f63c33fd96cc1e69a29bafd06541cf28588b43a4 # v2.58.21
uses: taiki-e/install-action@v2.49.0
with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
@@ -89,10 +89,10 @@ jobs:
name: io-uring tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: nightly
@@ -105,15 +105,15 @@ jobs:
name: doc tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: nightly
- name: Install just
uses: taiki-e/install-action@f63c33fd96cc1e69a29bafd06541cf28588b43a4 # v2.58.21
uses: taiki-e/install-action@v2.49.0
with:
tool: just

View File

@@ -15,16 +15,16 @@ jobs:
coverage:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: nightly
components: llvm-tools
- name: Install just, cargo-llvm-cov, cargo-nextest
uses: taiki-e/install-action@f63c33fd96cc1e69a29bafd06541cf28588b43a4 # v2.58.21
uses: taiki-e/install-action@v2.49.0
with:
tool: just,cargo-llvm-cov,cargo-nextest
@@ -32,7 +32,7 @@ jobs:
run: just test-coverage-codecov
- name: Upload coverage to Codecov
uses: codecov/codecov-action@fdcc8476540edceab3de004e990f80d881c6cc00 # v5.5.0
uses: codecov/codecov-action@v5.3.1
with:
files: codecov.json
fail_ci_if_error: true

View File

@@ -15,10 +15,10 @@ jobs:
fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: nightly
components: rustfmt
@@ -33,15 +33,15 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
components: clippy
- name: Check with Clippy
uses: giraffate/clippy-action@13b9d32482f25d29ead141b79e7e04e7900281e0 # v1.0.1
uses: giraffate/clippy-action@v1.0.1
with:
reporter: github-pr-check
github_token: ${{ secrets.GITHUB_TOKEN }}
@@ -52,10 +52,10 @@ jobs:
lint-docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust (nightly)
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: nightly
components: rust-docs
@@ -69,20 +69,20 @@ jobs:
if: false # rustdoc mismatch currently
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v4
- name: Install Rust (${{ vars.RUST_VERSION_EXTERNAL_TYPES }})
uses: actions-rust-lang/setup-rust-toolchain@ab6845274e2ff01cd4462007e1a9d9df1ab49f42 # v1.14.0
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
with:
toolchain: ${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
- name: Install just
uses: taiki-e/install-action@f63c33fd96cc1e69a29bafd06541cf28588b43a4 # v2.58.21
uses: taiki-e/install-action@v2.49.0
with:
tool: just
- name: Install cargo-check-external-types
uses: taiki-e/cache-cargo-install-action@b33c63d3b3c85540f4eba8a4f71a5cc0ce030855 # v2.3.0
uses: taiki-e/cache-cargo-install-action@v2.1.1
with:
tool: cargo-check-external-types

1
.gitignore vendored
View File

@@ -1,3 +1,4 @@
Cargo.lock
target/
guide/build/
/gh-pages

View File

@@ -1,38 +0,0 @@
exclude = ["target/*"]
include = ["**/*.toml"]
[formatting]
column_width = 100
align_comments = false
[[rule]]
include = ["**/Cargo.toml"]
keys = ["features"]
formatting.column_width = 105
formatting.reorder_keys = false
[[rule]]
include = ["**/Cargo.toml"]
keys = [
"dependencies",
"*-dependencies",
"workspace.dependencies",
"workspace.*-dependencies",
"target.*.dependencies",
"target.*.*-dependencies",
]
formatting.column_width = 120
formatting.reorder_keys = true
[[rule]]
include = ["**/Cargo.toml"]
keys = [
"dependencies.*",
"*-dependencies.*",
"workspace.dependencies.*",
"workspace.*-dependencies.*",
"target.*.dependencies",
"target.*.*-dependencies",
]
formatting.column_width = 120
formatting.reorder_keys = false

4070
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,17 @@
[workspace]
resolver = "2"
members = [
"actix-files",
"actix-http-test",
"actix-http",
"actix-multipart",
"actix-multipart-derive",
"actix-router",
"actix-test",
"actix-web-actors",
"actix-web-codegen",
"actix-web",
"awc",
"actix-files",
"actix-http-test",
"actix-http",
"actix-multipart",
"actix-multipart-derive",
"actix-router",
"actix-test",
"actix-web-actors",
"actix-web-codegen",
"actix-web",
"awc",
]
[workspace.package]

View File

@@ -2,9 +2,6 @@
## Unreleased
## 0.6.7
- Add `{Files, NamedFile}::read_mode_threshold()` methods to allow faster synchronous reads of small files.
- Minimum supported Rust version (MSRV) is now 1.75.
## 0.6.6

View File

@@ -1,7 +1,10 @@
[package]
name = "actix-files"
version = "0.6.7"
authors = ["Nikolay Kim <fafhrd91@gmail.com>", "Rob Ede <robjtede@icloud.com>"]
version = "0.6.6"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Static file serving for Actix Web"
keywords = ["actix", "http", "async", "futures"]
homepage = "https://actix.rs"
@@ -11,7 +14,13 @@ license = "MIT OR Apache-2.0"
edition = "2021"
[package.metadata.cargo_check_external_types]
allowed_external_types = ["actix_http::*", "actix_service::*", "actix_web::*", "http::*", "mime::*"]
allowed_external_types = [
"actix_http::*",
"actix_service::*",
"actix_web::*",
"http::*",
"mime::*",
]
[features]
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
@@ -37,7 +46,7 @@ v_htmlescape = "0.15.5"
# experimental-io-uring
[target.'cfg(target_os = "linux")'.dependencies]
tokio-uring = { version = "0.5", optional = true, features = ["bytes"] }
actix-server = { version = "2.4", optional = true } # ensure matching tokio-uring versions
actix-server = { version = "2.4", optional = true } # ensure matching tokio-uring versions
[dev-dependencies]
actix-rt = "2.7"

View File

@@ -3,11 +3,11 @@
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.7)](https://docs.rs/actix-files/0.6.7)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.6)](https://docs.rs/actix-files/0.6.6)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![License](https://img.shields.io/crates/l/actix-files.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-files/0.6.7/status.svg)](https://deps.rs/crate/actix-files/0.6.7)
[![dependency status](https://deps.rs/crate/actix-files/0.6.6/status.svg)](https://deps.rs/crate/actix-files/0.6.6)
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@@ -14,12 +14,6 @@ use pin_project_lite::pin_project;
use super::named::File;
#[derive(Debug, Clone, Copy)]
pub(crate) enum ReadMode {
Sync,
Async,
}
pin_project! {
/// Adapter to read a `std::file::File` in chunks.
#[doc(hidden)]
@@ -30,7 +24,6 @@ pin_project! {
state: ChunkedReadFileState<Fut>,
counter: u64,
callback: F,
read_mode: ReadMode,
}
}
@@ -64,7 +57,6 @@ pub(crate) fn new_chunked_read(
size: u64,
offset: u64,
file: File,
read_mode_threshold: u64,
) -> impl Stream<Item = Result<Bytes, Error>> {
ChunkedReadFile {
size,
@@ -77,50 +69,31 @@ pub(crate) fn new_chunked_read(
},
counter: 0,
callback: chunked_read_file_callback,
read_mode: if size < read_mode_threshold {
ReadMode::Sync
} else {
ReadMode::Async
},
}
}
#[cfg(not(feature = "experimental-io-uring"))]
fn chunked_read_file_callback_sync(
async fn chunked_read_file_callback(
mut file: File,
offset: u64,
max_bytes: usize,
) -> Result<(File, Bytes), io::Error> {
) -> Result<(File, Bytes), Error> {
use io::{Read as _, Seek as _};
let mut buf = Vec::with_capacity(max_bytes);
let res = actix_web::web::block(move || {
let mut buf = Vec::with_capacity(max_bytes);
file.seek(io::SeekFrom::Start(offset))?;
file.seek(io::SeekFrom::Start(offset))?;
let n_bytes = file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
let n_bytes = file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
if n_bytes == 0 {
Err(io::Error::from(io::ErrorKind::UnexpectedEof))
} else {
Ok((file, Bytes::from(buf)))
}
}
#[cfg(not(feature = "experimental-io-uring"))]
#[inline]
async fn chunked_read_file_callback(
file: File,
offset: u64,
max_bytes: usize,
read_mode: ReadMode,
) -> Result<(File, Bytes), Error> {
let res = match read_mode {
ReadMode::Sync => chunked_read_file_callback_sync(file, offset, max_bytes)?,
ReadMode::Async => {
actix_web::web::block(move || chunked_read_file_callback_sync(file, offset, max_bytes))
.await??
if n_bytes == 0 {
Err(io::Error::from(io::ErrorKind::UnexpectedEof))
} else {
Ok((file, Bytes::from(buf)))
}
};
})
.await??;
Ok(res)
}
@@ -198,7 +171,7 @@ where
#[cfg(not(feature = "experimental-io-uring"))]
impl<F, Fut> Stream for ChunkedReadFile<F, Fut>
where
F: Fn(File, u64, usize, ReadMode) -> Fut,
F: Fn(File, u64, usize) -> Fut,
Fut: Future<Output = Result<(File, Bytes), Error>>,
{
type Item = Result<Bytes, Error>;
@@ -220,7 +193,7 @@ where
.take()
.expect("ChunkedReadFile polled after completion");
let fut = (this.callback)(file, offset, max_bytes, *this.read_mode);
let fut = (this.callback)(file, offset, max_bytes);
this.state
.project_replace(ChunkedReadFileState::Future { fut });

View File

@@ -49,7 +49,6 @@ pub struct Files {
use_guards: Option<Rc<dyn Guard>>,
guards: Vec<Rc<dyn Guard>>,
hidden_files: bool,
read_mode_threshold: u64,
}
impl fmt::Debug for Files {
@@ -74,7 +73,6 @@ impl Clone for Files {
use_guards: self.use_guards.clone(),
guards: self.guards.clone(),
hidden_files: self.hidden_files,
read_mode_threshold: self.read_mode_threshold,
}
}
}
@@ -121,7 +119,6 @@ impl Files {
use_guards: None,
guards: Vec::new(),
hidden_files: false,
read_mode_threshold: 0,
}
}
@@ -207,23 +204,6 @@ impl Files {
self
}
/// Sets the size threshold that determines file read mode (sync/async).
///
/// When a file is smaller than the threshold (bytes), the reader will switch from synchronous
/// (blocking) file-reads to async reads to avoid blocking the main-thread when processing large
/// files.
///
/// Tweaking this value according to your expected usage may lead to signifiant performance
/// gains (or losses in other handlers, if `size` is too high).
///
/// When the `experimental-io-uring` crate feature is enabled, file reads are always async.
///
/// Default is 0, meaning all files are read asynchronously.
pub fn read_mode_threshold(mut self, size: u64) -> Self {
self.read_mode_threshold = size;
self
}
/// Specifies whether to use ETag or not.
///
/// Default is true.
@@ -387,7 +367,6 @@ impl ServiceFactory<ServiceRequest> for Files {
file_flags: self.file_flags,
guards: self.use_guards.clone(),
hidden_files: self.hidden_files,
size_threshold: self.read_mode_threshold,
};
if let Some(ref default) = *self.default.borrow() {

View File

@@ -80,7 +80,6 @@ pub struct NamedFile {
pub(crate) content_type: Mime,
pub(crate) content_disposition: ContentDisposition,
pub(crate) encoding: Option<ContentEncoding>,
pub(crate) read_mode_threshold: u64,
}
#[cfg(not(feature = "experimental-io-uring"))]
@@ -201,7 +200,6 @@ impl NamedFile {
encoding,
status_code: StatusCode::OK,
flags: Flags::default(),
read_mode_threshold: 0,
})
}
@@ -355,23 +353,6 @@ impl NamedFile {
self
}
/// Sets the size threshold that determines file read mode (sync/async).
///
/// When a file is smaller than the threshold (bytes), the reader will switch from synchronous
/// (blocking) file-reads to async reads to avoid blocking the main-thread when processing large
/// files.
///
/// Tweaking this value according to your expected usage may lead to signifiant performance
/// gains (or losses in other handlers, if `size` is too high).
///
/// When the `experimental-io-uring` crate feature is enabled, file reads are always async.
///
/// Default is 0, meaning all files are read asynchronously.
pub fn read_mode_threshold(mut self, size: u64) -> Self {
self.read_mode_threshold = size;
self
}
/// Specifies whether to return `ETag` header in response.
///
/// Default is true.
@@ -459,8 +440,7 @@ impl NamedFile {
res.insert_header((header::CONTENT_ENCODING, current_encoding.as_str()));
}
let reader =
chunked::new_chunked_read(self.md.len(), 0, self.file, self.read_mode_threshold);
let reader = chunked::new_chunked_read(self.md.len(), 0, self.file);
return res.streaming(reader);
}
@@ -597,7 +577,7 @@ impl NamedFile {
.map_into_boxed_body();
}
let reader = chunked::new_chunked_read(length, offset, self.file, self.read_mode_threshold);
let reader = chunked::new_chunked_read(length, offset, self.file);
if offset != 0 || length != self.md.len() {
res.status(StatusCode::PARTIAL_CONTENT);

View File

@@ -39,7 +39,6 @@ pub struct FilesServiceInner {
pub(crate) file_flags: named::Flags,
pub(crate) guards: Option<Rc<dyn Guard>>,
pub(crate) hidden_files: bool,
pub(crate) size_threshold: u64,
}
impl fmt::Debug for FilesServiceInner {
@@ -71,9 +70,7 @@ impl FilesService {
named_file.flags = self.file_flags;
let (req, _) = req.into_parts();
let res = named_file
.read_mode_threshold(self.size_threshold)
.into_response(&req);
let res = named_file.into_response(&req);
ServiceResponse::new(req, res)
}
@@ -172,7 +169,17 @@ impl Service<ServiceRequest> for FilesService {
}
} else {
match NamedFile::open_async(&path).await {
Ok(named_file) => Ok(this.serve_named_file(req, named_file)),
Ok(mut named_file) => {
if let Some(ref mime_override) = this.mime_override {
let new_disposition = mime_override(&named_file.content_type.type_());
named_file.content_disposition.disposition = new_disposition;
}
named_file.flags = this.file_flags;
let (req, _) = req.into_parts();
let res = named_file.into_response(&req);
Ok(ServiceResponse::new(req, res))
}
Err(err) => this.handle_err(err, req).await,
}
}

View File

@@ -7,10 +7,10 @@ keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
categories = [
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
]
license = "MIT OR Apache-2.0"
edition = "2021"
@@ -20,14 +20,14 @@ features = []
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_http::*",
"actix_server::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"tokio::*",
"actix_codec::*",
"actix_http::*",
"actix_server::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"tokio::*",
]
[features]
@@ -37,25 +37,25 @@ default = []
openssl = ["tls-openssl", "awc/openssl"]
[dependencies]
actix-codec = "0.5"
actix-rt = "2.2"
actix-server = "2"
actix-service = "2"
actix-codec = "0.5"
actix-tls = "3"
actix-utils = "3"
actix-rt = "2.2"
actix-server = "2"
awc = { version = "3", default-features = false }
bytes = "1"
futures-core = { version = "0.3.17", default-features = false }
http = "0.2.7"
log = "0.4"
socket2 = "0.5"
serde = "1"
serde_json = "1"
serde_urlencoded = "0.7"
slab = "0.4"
socket2 = "0.6"
serde_urlencoded = "0.7"
tls-openssl = { version = "0.10.55", package = "openssl", optional = true }
tokio = { version = "1.38.2", features = ["sync"] }
tokio = { version = "1.24.2", features = ["sync"] }
[dev-dependencies]
actix-http = "3"

View File

@@ -2,20 +2,6 @@
## Unreleased
- Properly wake Payload receivers when feeding errors or EOF
## 3.11.1
- Prevent more hangs after client disconnects.
- More malformed WebSocket frames are now gracefully rejected.
- Using `TestRequest::set_payload()` now sets a Content-Length header.
## 3.11.0
- Update `brotli` dependency to `8`.
## 3.10.0
### Added
- Add `header::CLEAR_SITE_DATA` constant.

View File

@@ -1,54 +1,58 @@
[package]
name = "actix-http"
version = "3.11.1"
authors = ["Nikolay Kim <fafhrd91@gmail.com>", "Rob Ede <robjtede@icloud.com>"]
version = "3.9.0"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "HTTP types and services for the Actix ecosystem"
keywords = ["actix", "http", "framework", "async", "futures"]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
categories = [
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
]
license.workspace = true
edition.workspace = true
rust-version.workspace = true
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"]
features = [
"http2",
"ws",
"openssl",
"rustls-0_20",
"rustls-0_21",
"rustls-0_22",
"rustls-0_23",
"compress-brotli",
"compress-gzip",
"compress-zstd",
"http2",
"ws",
"openssl",
"rustls-0_20",
"rustls-0_21",
"rustls-0_22",
"rustls-0_23",
"compress-brotli",
"compress-gzip",
"compress-zstd",
]
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_service::*",
"actix_tls::*",
"actix_utils::*",
"bytes::*",
"bytestring::*",
"encoding_rs::*",
"futures_core::*",
"h2::*",
"http::*",
"httparse::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"tokio_util::*",
"tokio::*",
"actix_codec::*",
"actix_service::*",
"actix_tls::*",
"actix_utils::*",
"bytes::*",
"bytestring::*",
"encoding_rs::*",
"futures_core::*",
"h2::*",
"http::*",
"httparse::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"tokio_util::*",
"tokio::*",
]
[features]
@@ -58,10 +62,12 @@ default = []
http2 = ["dep:h2"]
# WebSocket protocol implementation
ws = ["dep:local-channel", "dep:base64", "dep:rand", "dep:sha1"]
# HAProxy PROXY protocol support
haproxy = ["dep:nom"]
ws = [
"dep:local-channel",
"dep:base64",
"dep:rand",
"dep:sha1",
]
# TLS via OpenSSL
openssl = ["__tls", "actix-tls/accept", "actix-tls/openssl"]
@@ -83,8 +89,8 @@ rustls-0_23 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_23"]
# Compression codecs
compress-brotli = ["__compress", "dep:brotli"]
compress-gzip = ["__compress", "dep:flate2"]
compress-zstd = ["__compress", "dep:zstd"]
compress-gzip = ["__compress", "dep:flate2"]
compress-zstd = ["__compress", "dep:zstd"]
# Internal (PRIVATE!) features used to aid testing and checking feature status.
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
@@ -95,12 +101,10 @@ __compress = []
__tls = []
[dependencies]
actix-codec = "0.5"
# actix-proxy-protocol = "0.0.2"
actix-proxy-protocol = { path = "../../actix-web-lab/actix-proxy-protocol" }
actix-rt = { version = "2.2", default-features = false }
actix-service = "2"
actix-codec = "0.5"
actix-utils = "3"
actix-rt = { version = "2.2", default-features = false }
bitflags = "2"
bytes = "1"
@@ -109,7 +113,6 @@ derive_more = { version = "2", features = ["as_ref", "deref", "deref_mut", "disp
encoding_rs = "0.8"
foldhash = "0.1"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
http = "0.2.7"
httparse = "1.5.1"
httpdate = "1.0.1"
@@ -119,27 +122,24 @@ mime = "0.3.4"
percent-encoding = "2.1"
pin-project-lite = "0.2"
smallvec = "1.6.1"
tokio = { version = "1.38.2", features = [] }
tokio = { version = "1.24.2", features = [] }
tokio-util = { version = "0.7", features = ["io", "codec"] }
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
# http2
h2 = { version = "0.3.27", optional = true }
h2 = { version = "0.3.26", optional = true }
# websockets
base64 = { version = "0.22", optional = true }
local-channel = { version = "0.1", optional = true }
base64 = { version = "0.22", optional = true }
rand = { version = "0.9", optional = true }
sha1 = { version = "0.10", optional = true }
# proxy
nom = { version = "8", optional = true }
# openssl/rustls
actix-tls = { version = "3.4", default-features = false, optional = true }
# compress-*
brotli = { version = "8", optional = true }
brotli = { version = "7", optional = true }
flate2 = { version = "1.0.13", optional = true }
zstd = { version = "0.13", optional = true }
@@ -155,17 +155,17 @@ divan = "0.1.8"
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
memchr = "2.4"
once_cell = "1.21"
once_cell = "1.9"
rcgen = "0.13"
regex = "1.3"
rustls-pemfile = "2"
rustversion = "1"
rustls-pemfile = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0"
static_assertions = "1"
tls-openssl = { package = "openssl", version = "0.10.55" }
tls-rustls_023 = { package = "rustls", version = "0.23" }
tokio = { version = "1.38.2", features = ["net", "rt", "macros", "sync"] }
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
[lints]
workspace = true
@@ -174,10 +174,6 @@ workspace = true
name = "ws"
required-features = ["ws", "rustls-0_23"]
[[example]]
name = "haproxy"
required-features = ["http2", "haproxy"]
[[example]]
name = "tls_rustls"
required-features = ["http2", "rustls-0_23"]

View File

@@ -5,11 +5,11 @@
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.11.1)](https://docs.rs/actix-http/3.11.1)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.9.0)](https://docs.rs/actix-http/3.9.0)
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
<br />
[![dependency status](https://deps.rs/crate/actix-http/3.11.1/status.svg)](https://deps.rs/crate/actix-http/3.11.1)
[![dependency status](https://deps.rs/crate/actix-http/3.9.0/status.svg)](https://deps.rs/crate/actix-http/3.9.0)
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)

View File

@@ -1,45 +0,0 @@
use std::{io, time::Duration};
use actix_http::{Error, HttpService, Request, Response, StatusCode};
use actix_server::Server;
use bytes::BytesMut;
use futures_util::StreamExt as _;
use http::header::HeaderValue;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("echo", ("127.0.0.1", 8080), || {
HttpService::build()
.client_request_timeout(Duration::from_secs(20))
.client_disconnect_timeout(Duration::from_secs(20))
.finish(|mut req: Request| async move {
let mut body = BytesMut::new();
while let Some(item) = req.payload().next().await {
body.extend_from_slice(&item?);
}
info!("request body: {body:?}");
let res = Response::build(StatusCode::OK)
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
.body(body);
Ok::<_, Error>(res)
})
.tcp_auto_h2c_proxy_protocol_v1()
})?
.workers(2)
.run()
.await
}
static_assertions::assert_impl_all!(
tokio::io::BufReader<tokio::net::TcpStream>:
tokio::io::AsyncRead,
tokio::io::AsyncWrite,
Unpin,
);

View File

@@ -31,7 +31,7 @@ async fn main() -> io::Result<()> {
actix_rt::time::sleep(Duration::from_secs(1)).await;
yield Err(io::Error::other("abc"));
yield Err(io::Error::new(io::ErrorKind::Other, "abc"));
})))
})
.tcp()

View File

@@ -190,7 +190,7 @@ mod tests {
#[actix_rt::test]
async fn to_body_limit_error() {
let err_stream = stream::once(async { Err(io::Error::other("")) });
let err_stream = stream::once(async { Err(io::Error::new(io::ErrorKind::Other, "")) });
let body = SizedStream::new(8, err_stream);
// not too big, but propagates error from body stream
assert!(to_bytes_limited(body, 10).await.unwrap().is_err());

View File

@@ -100,7 +100,10 @@ where
loop {
if let Some(ref mut fut) = this.fut {
let (chunk, decoder) = ready!(Pin::new(fut).poll(cx)).map_err(|_| {
PayloadError::Io(io::Error::other("Blocking task was cancelled unexpectedly"))
PayloadError::Io(io::Error::new(
io::ErrorKind::Other,
"Blocking task was cancelled unexpectedly",
))
})??;
*this.decoder = Some(decoder);

View File

@@ -183,7 +183,8 @@ where
if let Some(ref mut fut) = this.fut {
let mut encoder = ready!(Pin::new(fut).poll(cx))
.map_err(|_| {
EncoderError::Io(io::Error::other(
EncoderError::Io(io::Error::new(
io::ErrorKind::Other,
"Blocking task was cancelled unexpectedly",
))
})?

View File

@@ -415,7 +415,7 @@ mod tests {
#[test]
fn test_as_response() {
let orig = io::Error::other("other");
let orig = io::Error::new(io::ErrorKind::Other, "other");
let err: Error = ParseError::Io(orig).into();
assert_eq!(
format!("{}", err),
@@ -425,14 +425,14 @@ mod tests {
#[test]
fn test_error_display() {
let orig = io::Error::other("other");
let orig = io::Error::new(io::ErrorKind::Other, "other");
let err = Error::new_io().with_cause(orig);
assert_eq!("connection error: other", err.to_string());
}
#[test]
fn test_error_http_response() {
let orig = io::Error::other("other");
let orig = io::Error::new(io::ErrorKind::Other, "other");
let err = Error::new_io().with_cause(orig);
let resp: Response<BoxBody> = err.into();
assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR);
@@ -440,7 +440,7 @@ mod tests {
#[test]
fn test_payload_error() {
let err: PayloadError = io::Error::other("ParseError").into();
let err: PayloadError = io::Error::new(io::ErrorKind::Other, "ParseError").into();
assert!(err.to_string().contains("ParseError"));
let err = PayloadError::Incomplete(None);
@@ -475,7 +475,7 @@ mod tests {
#[test]
fn test_from() {
from_and_cause!(io::Error::other("other") => ParseError::Io(..));
from_and_cause!(io::Error::new(io::ErrorKind::Other, "other") => ParseError::Io(..));
from!(httparse::Error::HeaderName => ParseError::Header);
from!(httparse::Error::HeaderName => ParseError::Header);
from!(httparse::Error::HeaderValue => ParseError::Header);

View File

@@ -993,8 +993,6 @@ where
Poll::Ready(Ok(n)) => {
this.flags.remove(Flags::FINISHED);
eprintln!("readbuf contents: {:?}", this.read_buf);
if n == 0 {
return Ok(true);
}
@@ -1184,7 +1182,7 @@ where
let state_is_none = inner_p.state.is_none();
// read half is closed; we do not process any responses
if inner_p.flags.contains(Flags::READ_DISCONNECT) {
if inner_p.flags.contains(Flags::READ_DISCONNECT) && state_is_none {
trace!("read half closed; start shutdown");
inner_p.flags.insert(Flags::SHUTDOWN);
}
@@ -1218,9 +1216,6 @@ where
inner_p.shutdown_timer,
);
if inner_p.flags.contains(Flags::SHUTDOWN) {
cx.waker().wake_by_ref();
}
Poll::Pending
};

View File

@@ -310,10 +310,10 @@ impl MessageType for RequestHeadType {
Version::HTTP_11 => "HTTP/1.1",
Version::HTTP_2 => "HTTP/2.0",
Version::HTTP_3 => "HTTP/3.0",
_ => return Err(io::Error::other("Unsupported version")),
_ => return Err(io::Error::new(io::ErrorKind::Other, "unsupported version")),
}
)
.map_err(io::Error::other)
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))
}
}
@@ -433,7 +433,7 @@ impl TransferEncoding {
buf.extend_from_slice(b"0\r\n\r\n");
} else {
writeln!(helpers::MutWriter(buf), "{:X}\r", msg.len())
.map_err(io::Error::other)?;
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
buf.reserve(msg.len() + 2);
buf.extend_from_slice(msg);

View File

@@ -200,13 +200,11 @@ impl Inner {
#[inline]
fn set_error(&mut self, err: PayloadError) {
self.err = Some(err);
self.wake();
}
#[inline]
fn feed_eof(&mut self) {
self.eof = true;
self.wake();
}
#[inline]
@@ -255,13 +253,8 @@ impl Inner {
#[cfg(test)]
mod tests {
use std::{task::Poll, time::Duration};
use actix_rt::time::timeout;
use actix_utils::future::poll_fn;
use futures_util::{FutureExt, StreamExt};
use static_assertions::{assert_impl_all, assert_not_impl_any};
use tokio::sync::oneshot;
use super::*;
@@ -270,67 +263,6 @@ mod tests {
assert_impl_all!(Inner: Unpin, Send, Sync);
const WAKE_TIMEOUT: Duration = Duration::from_secs(2);
fn prepare_waking_test(
mut payload: Payload,
expected: Option<Result<(), ()>>,
) -> (oneshot::Receiver<()>, actix_rt::task::JoinHandle<()>) {
let (tx, rx) = oneshot::channel();
let handle = actix_rt::spawn(async move {
// Make sure to poll once to set the waker
poll_fn(|cx| {
assert!(payload.poll_next_unpin(cx).is_pending());
Poll::Ready(())
})
.await;
tx.send(()).unwrap();
// actix-rt is single-threaded, so this won't race with `rx.await`
let mut pend_once = false;
poll_fn(|_| {
if pend_once {
Poll::Ready(())
} else {
// Return pending without storing wakers, we already did on the previous
// `poll_fn`, now this task will only continue if the `sender` wakes us
pend_once = true;
Poll::Pending
}
})
.await;
let got = payload.next().now_or_never().unwrap();
match expected {
Some(Ok(_)) => assert!(got.unwrap().is_ok()),
Some(Err(_)) => assert!(got.unwrap().is_err()),
None => assert!(got.is_none()),
}
});
(rx, handle)
}
#[actix_rt::test]
async fn wake_on_error() {
let (mut sender, payload) = Payload::create(false);
let (rx, handle) = prepare_waking_test(payload, Some(Err(())));
rx.await.unwrap();
sender.set_error(PayloadError::Incomplete(None));
timeout(WAKE_TIMEOUT, handle).await.unwrap().unwrap();
}
#[actix_rt::test]
async fn wake_on_eof() {
let (mut sender, payload) = Payload::create(false);
let (rx, handle) = prepare_waking_test(payload, None);
rx.await.unwrap();
sender.feed_eof();
timeout(WAKE_TIMEOUT, handle).await.unwrap().unwrap();
}
#[actix_rt::test]
async fn test_unread_data() {
let (_, mut payload) = Payload::create(false);

View File

@@ -91,59 +91,6 @@ where
}
}
#[cfg(feature = "haproxy")]
impl<S, B, X, U> H1Service<actix_proxy_protocol::v1::TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>>,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>>,
B: MessageBody,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(
Request,
Framed<actix_proxy_protocol::v1::TlsStream<TcpStream>, Codec>,
),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Creates TCP stream service from HTTP service that consumes PROXY protocol v1 headers first.
///
/// The connection info obtained from the PROXY header.
pub fn tcp_proxy_protocol_v1(
self,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = actix_proxy_protocol::v1::TlsError<std::io::Error, DispatchError>,
InitError = (),
> {
use actix_proxy_protocol::v1::{TlsError, TlsStream};
actix_proxy_protocol::v1::Acceptor::new()
.map_init_err(|_| unreachable!("TLS acceptor service factory does not error on init"))
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.0.get_ref().peer_addr().ok();
(io, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
#[cfg(feature = "openssl")]
mod openssl {
use actix_tls::accept::{

View File

@@ -240,131 +240,6 @@ where
}
}
#[cfg(feature = "haproxy")]
impl<S, B, X, U> HttpService<actix_proxy_protocol::v1::TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(
Request,
Framed<actix_proxy_protocol::v1::TlsStream<TcpStream>, h1::Codec>,
),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Creates TCP stream service from HTTP service that consumes PROXY protocol v1 headers first.
///
/// The connection info obtained from the PROXY header.
pub fn tcp_proxy_protocol_v1(
self,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = actix_proxy_protocol::v1::TlsError<std::io::Error, DispatchError>,
InitError = (),
> {
use actix_proxy_protocol::v1::{TlsError, TlsStream};
actix_proxy_protocol::v1::Acceptor::new()
.map_init_err(|_| unreachable!("TLS acceptor service factory does not error on init"))
.map_err(TlsError::into_service_error)
.map(|io: TlsStream<TcpStream>| {
let peer_addr = io.0.get_ref().peer_addr().ok();
(io, Protocol::Http1, peer_addr)
})
.and_then(self.map_err(TlsError::Service))
}
}
#[cfg(feature = "haproxy")]
impl<S, B, X, U> HttpService<actix_proxy_protocol::v1::TlsStream<TcpStream>, S, B, X, U>
where
S: ServiceFactory<Request, Config = ()>,
S::Future: 'static,
S::Error: Into<Response<BoxBody>> + 'static,
S::InitError: fmt::Debug,
S::Response: Into<Response<B>> + 'static,
<S::Service as Service<Request>>::Future: 'static,
B: MessageBody + 'static,
X: ServiceFactory<Request, Config = (), Response = Request>,
X::Future: 'static,
X::Error: Into<Response<BoxBody>>,
X::InitError: fmt::Debug,
U: ServiceFactory<
(
Request,
Framed<actix_proxy_protocol::v1::TlsStream<TcpStream>, h1::Codec>,
),
Config = (),
Response = (),
>,
U::Future: 'static,
U::Error: fmt::Display + Into<Response<BoxBody>>,
U::InitError: fmt::Debug,
{
/// Creates TCP stream service from HTTP service that consumes PROXY protocol v1 headers first.
///
/// The connection info obtained from the PROXY header.
pub fn tcp_auto_h2c_proxy_protocol_v1(
self,
) -> impl ServiceFactory<
TcpStream,
Config = (),
Response = (),
Error = actix_proxy_protocol::v1::TlsError<std::io::Error, DispatchError>,
InitError = (),
> {
use actix_proxy_protocol::v1::{TlsError, TlsStream};
actix_proxy_protocol::v1::Acceptor::new()
.map_init_err(|_| unreachable!("TLS acceptor service factory does not error on init"))
.map_err(TlsError::into_service_error)
.and_then(fn_service(move |io: TlsStream<TcpStream>| async move {
// subset of HTTP/2 preface defined by RFC 9113 §3.4
// this subset was chosen to maximize likelihood that peeking only once will allow us to
// reliably determine version or else it should fallback to h1 and fail quickly if data
// on the wire is junk
const H2_PREFACE: &[u8] = b"PRI * HTTP/2";
let mut buf = [0; 12];
// TODO: cannot peak into a bufreader
io.0.get_ref().peek(&mut buf).await.map_err(TlsError::Tls)?;
let proto = if buf == H2_PREFACE {
Protocol::Http2
} else {
Protocol::Http1
};
let peer_addr = io.0.get_ref().peer_addr().ok();
Ok((io, proto, peer_addr))
}))
.and_then(self.map_err(TlsError::Service))
}
}
/// Configuration options used when accepting TLS connection.
#[cfg(feature = "__tls")]
#[derive(Debug, Default)]

View File

@@ -11,7 +11,7 @@ use std::{
use actix_codec::{AsyncRead, AsyncWrite, ReadBuf};
use bytes::{Bytes, BytesMut};
use http::{header, Method, Uri, Version};
use http::{Method, Uri, Version};
use crate::{
header::{HeaderMap, TryIntoHeaderPair},
@@ -98,13 +98,9 @@ impl TestRequest {
}
/// Set request payload.
///
/// This sets the `Content-Length` header with the size of `data`.
pub fn set_payload(&mut self, data: impl Into<Bytes>) -> &mut Self {
let mut payload = crate::h1::Payload::empty();
let bytes = data.into();
self.insert_header((header::CONTENT_LENGTH, bytes.len()));
payload.unread_data(bytes);
payload.unread_data(data.into());
parts(&mut self.0).payload = Some(payload.into());
self
}

View File

@@ -94,21 +94,11 @@ impl Parser {
Some(res) => res,
};
let frame_len = match idx.checked_add(length) {
Some(len) => len,
None => return Err(ProtocolError::Overflow),
};
// not enough data
if src.len() < frame_len {
if src.len() < idx + length {
let min_length = min(length, max_size);
let required_cap = match idx.checked_add(min_length) {
Some(cap) => cap,
None => return Err(ProtocolError::Overflow),
};
if src.capacity() < required_cap {
src.reserve(required_cap - src.capacity());
if src.capacity() < idx + min_length {
src.reserve(idx + min_length - src.capacity());
}
return Ok(None);
}
@@ -412,14 +402,4 @@ mod tests {
Parser::write_close(&mut buf, None, false);
assert_eq!(&buf[..], &vec![0x88, 0x00][..]);
}
#[test]
fn test_parse_length_overflow() {
let buf: [u8; 14] = [
0x0a, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xeb, 0x0e, 0x8f,
];
let mut buf = BytesMut::from(&buf[..]);
let result = Parser::parse(&mut buf, true, 65536);
assert!(matches!(result, Err(ProtocolError::Overflow)));
}
}

View File

@@ -11,14 +11,15 @@ edition.workspace = true
rust-version.workspace = true
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"]
all-features = true
[lib]
proc-macro = true
[dependencies]
bytesize = "2"
darling = "0.20"
parse-size = "1"
proc-macro2 = "1"
quote = "1"
syn = "2"
@@ -26,7 +27,7 @@ syn = "2"
[dev-dependencies]
actix-multipart = "0.7"
actix-web = "4"
rustversion-msrv = "0.100"
rustversion = "1"
trybuild = "1"
[lints]

View File

@@ -9,8 +9,8 @@
use std::collections::HashSet;
use bytesize::ByteSize;
use darling::{FromDeriveInput, FromField, FromMeta};
use parse_size::parse_size;
use proc_macro::TokenStream;
use proc_macro2::Ident;
use quote::quote;
@@ -103,7 +103,7 @@ struct ParsedField<'t> {
/// # Field Limits
///
/// You can use the `#[multipart(limit = "<size>")]` attribute to set field level limits. The limit
/// string is parsed using [`bytesize`].
/// string is parsed using [parse_size].
///
/// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
///
@@ -150,7 +150,7 @@ struct ParsedField<'t> {
/// struct Form { }
/// ```
///
/// [`bytesize`]: https://docs.rs/bytesize/2
/// [parse_size]: https://docs.rs/parse-size/1/parse_size
#[proc_macro_derive(MultipartForm, attributes(multipart))]
pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: syn::DeriveInput = parse_macro_input!(input);
@@ -191,8 +191,8 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
let attrs = FieldAttrs::from_field(field).map_err(|err| err.write_errors())?;
let serialization_name = attrs.rename.unwrap_or_else(|| rust_name.to_string());
let limit = match attrs.limit.map(|limit| match limit.parse::<ByteSize>() {
Ok(ByteSize(size)) => Ok(usize::try_from(size).unwrap()),
let limit = match attrs.limit.map(|limit| match parse_size(&limit) {
Ok(size) => Ok(usize::try_from(size).unwrap()),
Err(err) => Err(syn::Error::new(
field.ident.as_ref().unwrap().span(),
format!("Could not parse size limit `{}`: {}", limit, err),

View File

@@ -1,4 +1,4 @@
#[rustversion_msrv::msrv]
#[rustversion::stable(1.72)] // MSRV
#[test]
fn compile_macros() {
let t = trybuild::TestCases::new();

View File

@@ -1,16 +1,16 @@
error: Could not parse size limit `2 bytes`: couldn't parse "bytes" into a known SI unit, couldn't parse unit of "bytes"
error: Could not parse size limit `2 bytes`: invalid digit found in string
--> tests/trybuild/size-limit-parse-fail.rs:6:5
|
6 | description: Text<String>,
| ^^^^^^^^^^^
error: Could not parse size limit `2 megabytes`: couldn't parse "megabytes" into a known SI unit, couldn't parse unit of "megabytes"
error: Could not parse size limit `2 megabytes`: invalid digit found in string
--> tests/trybuild/size-limit-parse-fail.rs:12:5
|
12 | description: Text<String>,
| ^^^^^^^^^^^
error: Could not parse size limit `four meters`: couldn't parse "four meters" into a ByteSize, cannot parse float from empty string
error: Could not parse size limit `four meters`: invalid digit found in string
--> tests/trybuild/size-limit-parse-fail.rs:18:5
|
18 | description: Text<String>,

View File

@@ -2,9 +2,9 @@
name = "actix-multipart"
version = "0.7.2"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Jacob Halsey <jacob@jhalsey.com>",
"Rob Ede <robjtede@icloud.com>",
"Nikolay Kim <fafhrd91@gmail.com>",
"Jacob Halsey <jacob@jhalsey.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Multipart request & form support for Actix Web"
keywords = ["http", "actix", "web", "multipart", "form"]
@@ -14,21 +14,22 @@ license.workspace = true
edition.workspace = true
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"]
all-features = true
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_http::*",
"actix_multipart_derive::*",
"actix_utils::*",
"actix_web::*",
"bytes::*",
"futures_core::*",
"mime::*",
"serde_json::*",
"serde_plain::*",
"serde::*",
"tempfile::*",
"actix_http::*",
"actix_multipart_derive::*",
"actix_utils::*",
"actix_web::*",
"bytes::*",
"futures_core::*",
"mime::*",
"serde_json::*",
"serde_plain::*",
"serde::*",
"tempfile::*",
]
[features]
@@ -54,7 +55,7 @@ serde = "1"
serde_json = "1"
serde_plain = "1"
tempfile = { version = "3.4", optional = true }
tokio = { version = "1.38.2", features = ["sync", "io-util"] }
tokio = { version = "1.24.2", features = ["sync", "io-util"] }
[dev-dependencies]
actix-http = "3"
@@ -65,10 +66,10 @@ actix-web = "4"
assert_matches = "1"
awc = "3"
env_logger = "0.11"
futures-test = "0.3"
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-test = "0.3"
multer = "3"
tokio = { version = "1.38.2", features = ["sync"] }
tokio = { version = "1.24.2", features = ["sync"] }
tokio-stream = "0.1"
[lints]

View File

@@ -24,10 +24,9 @@ Due to additional requirements for `multipart/form-data` requests, the higher le
## Examples
```rust
use actix_multipart::form::{
json::Json as MpJson, tempfile::TempFile, MultipartForm, MultipartFormConfig,
};
use actix_web::{middleware::Logger, post, App, HttpServer, Responder};
use actix_web::{post, App, HttpServer, Responder};
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
@@ -37,37 +36,25 @@ struct Metadata {
#[derive(Debug, MultipartForm)]
struct UploadForm {
// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
#[multipart(limit = "100MB")]
file: TempFile,
json: MpJson<Metadata>,
}
#[post("/videos")]
async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
pub async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Responder {
format!(
"Uploaded file {}, with size: {}\ntemporary file ({}) was deleted\n",
form.json.name,
form.file.size,
form.file.file.path().display(),
"Uploaded file {}, with size: {}",
form.json.name, form.file.size
)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
HttpServer::new(move || {
App::new()
.service(post_video)
.wrap(Logger::default())
// Also increase the global total limit to 100MiB.
.app_data(MultipartFormConfig::default().total_limit(100 * 1024 * 1024))
})
.workers(2)
.bind(("127.0.0.1", 8080))?
.run()
.await
HttpServer::new(move || App::new().service(post_video))
.bind(("127.0.0.1", 8080))?
.run()
.await
}
```

View File

@@ -1,6 +1,4 @@
use actix_multipart::form::{
json::Json as MpJson, tempfile::TempFile, MultipartForm, MultipartFormConfig,
};
use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
use actix_web::{middleware::Logger, post, App, HttpServer, Responder};
use serde::Deserialize;
@@ -11,7 +9,6 @@ struct Metadata {
#[derive(Debug, MultipartForm)]
struct UploadForm {
// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
#[multipart(limit = "100MB")]
file: TempFile,
json: MpJson<Metadata>,
@@ -31,15 +28,9 @@ async fn post_video(MultipartForm(form): MultipartForm<UploadForm>) -> impl Resp
async fn main() -> std::io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
HttpServer::new(move || {
App::new()
.service(post_video)
.wrap(Logger::default())
// Also increase the global total limit to 100MiB.
.app_data(MultipartFormConfig::default().total_limit(100 * 1024 * 1024))
})
.workers(2)
.bind(("127.0.0.1", 8080))?
.run()
.await
HttpServer::new(move || App::new().service(post_video).wrap(Logger::default()))
.workers(2)
.bind(("127.0.0.1", 8080))?
.run()
.await
}

View File

@@ -13,7 +13,7 @@
//! ```no_run
//! use actix_web::{post, App, HttpServer, Responder};
//!
//! use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm, MultipartFormConfig};
//! use actix_multipart::form::{json::Json as MpJson, tempfile::TempFile, MultipartForm};
//! use serde::Deserialize;
//!
//! #[derive(Debug, Deserialize)]
@@ -23,7 +23,6 @@
//!
//! #[derive(Debug, MultipartForm)]
//! struct UploadForm {
//! // Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
//! #[multipart(limit = "100MB")]
//! file: TempFile,
//! json: MpJson<Metadata>,
@@ -39,15 +38,10 @@
//!
//! #[actix_web::main]
//! async fn main() -> std::io::Result<()> {
//! HttpServer::new(move || {
//! App::new()
//! .service(post_video)
//! // Also increase the global total limit to 100MiB.
//! .app_data(MultipartFormConfig::default().total_limit(100 * 1024 * 1024))
//! })
//! .bind(("127.0.0.1", 8080))?
//! .run()
//! .await
//! HttpServer::new(move || App::new().service(post_video))
//! .bind(("127.0.0.1", 8080))?
//! .run()
//! .await
//! }
//! ```
//!

View File

@@ -2,9 +2,9 @@
name = "actix-router"
version = "0.5.3"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
"Nikolay Kim <fafhrd91@gmail.com>",
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Resource path matching and router"
keywords = ["actix", "router", "routing"]
@@ -13,7 +13,10 @@ license = "MIT OR Apache-2.0"
edition = "2021"
[package.metadata.cargo_check_external_types]
allowed_external_types = ["http::*", "serde::*"]
allowed_external_types = [
"http::*",
"serde::*",
]
[features]
default = ["http", "unicode"]
@@ -32,8 +35,8 @@ tracing = { version = "0.1.30", default-features = false, features = ["log"] }
[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports"] }
http = "0.2.7"
percent-encoding = "2.1"
serde = { version = "1", features = ["derive"] }
percent-encoding = "2.1"
[lints]
workspace = true

View File

@@ -13,7 +13,6 @@ macro_rules! register {
register!(finish => "(.*)", "(.*)", "(.*)", "(.*)")
}};
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
#[expect(clippy::useless_concat)]
let arr = [
concat!("/authorizations"),
concat!("/authorizations/", $p1),

View File

@@ -1,34 +1,37 @@
[package]
name = "actix-test"
version = "0.1.5"
authors = ["Nikolay Kim <fafhrd91@gmail.com>", "Rob Ede <robjtede@icloud.com>"]
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Integration testing tools for Actix Web applications"
keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
categories = [
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
]
license = "MIT OR Apache-2.0"
edition = "2021"
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_codec::*",
"actix_http_test::*",
"actix_http::*",
"actix_service::*",
"actix_web::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"openssl::*",
"rustls::*",
"tokio::*",
"actix_codec::*",
"actix_http_test::*",
"actix_http::*",
"actix_service::*",
"actix_web::*",
"awc::*",
"bytes::*",
"futures_core::*",
"http::*",
"openssl::*",
"rustls::*",
"tokio::*",
]
[features]
@@ -69,7 +72,7 @@ tls-rustls-0_20 = { package = "rustls", version = "0.20", optional = true }
tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true }
tls-rustls-0_22 = { package = "rustls", version = "0.22", optional = true }
tls-rustls-0_23 = { package = "rustls", version = "0.23", default-features = false, optional = true }
tokio = { version = "1.38.2", features = ["sync"] }
tokio = { version = "1.24.2", features = ["sync"] }
[lints]
workspace = true

View File

@@ -12,32 +12,32 @@ rust-version.workspace = true
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix::*",
"actix_http::*",
"actix_web::*",
"bytes::*",
"bytestring::*",
"futures_core::*",
"actix::*",
"actix_http::*",
"actix_web::*",
"bytes::*",
"bytestring::*",
"futures_core::*",
]
[dependencies]
actix = { version = ">=0.12, <0.14", default-features = false }
actix-codec = "0.5"
actix-http = "3"
actix-web = { version = "4", default-features = false, features = ["ws"] }
actix-web = { version = "4", default-features = false }
bytes = "1"
bytestring = "1"
futures-core = { version = "0.3.17", default-features = false }
pin-project-lite = "0.2"
tokio = { version = "1.38.2", features = ["sync"] }
tokio = { version = "1.24.2", features = ["sync"] }
tokio-util = { version = "0.7", features = ["codec"] }
[dev-dependencies]
actix-rt = "2.2"
actix-test = "0.1"
actix-web = { version = "4", features = ["macros"] }
awc = { version = "3", default-features = false }
actix-web = { version = "4", features = ["macros"] }
env_logger = "0.11"
futures-util = { version = "0.3.17", default-features = false, features = ["std"] }

View File

@@ -776,7 +776,10 @@ where
}
Poll::Pending => break,
Poll::Ready(Some(Err(err))) => {
return Poll::Ready(Some(Err(ProtocolError::Io(io::Error::other(err)))));
return Poll::Ready(Some(Err(ProtocolError::Io(io::Error::new(
io::ErrorKind::Other,
format!("{err}"),
)))));
}
}
}
@@ -792,10 +795,11 @@ where
}
Some(frm) => {
let msg = match frm {
Frame::Text(data) => Message::Text(
ByteString::try_from(data)
.map_err(|err| ProtocolError::Io(io::Error::other(err)))?,
),
Frame::Text(data) => {
Message::Text(ByteString::try_from(data).map_err(|err| {
ProtocolError::Io(io::Error::new(io::ErrorKind::Other, err))
})?)
}
Frame::Binary(data) => Message::Binary(data),
Frame::Ping(s) => Message::Ping(s),
Frame::Pong(s) => Message::Pong(s),

View File

@@ -2,7 +2,10 @@
name = "actix-web-codegen"
version = "4.3.0"
description = "Routing and runtime macros for Actix Web"
authors = ["Nikolay Kim <fafhrd91@gmail.com>", "Rob Ede <robjtede@icloud.com>"]
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
homepage.workspace = true
repository.workspace = true
license.workspace = true
@@ -30,8 +33,8 @@ actix-utils = "3"
actix-web = "4"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
rustversion-msrv = "0.100"
trybuild = "1"
rustversion = "1"
[lints]
workspace = true

View File

@@ -59,7 +59,6 @@ macro_rules! standard_method_type {
(
$($variant:ident, $upper:ident, $lower:ident,)+
) => {
#[doc(hidden)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum MethodType {
$(
@@ -467,7 +466,7 @@ impl ToTokens for Route {
let stream = quote! {
#(#doc_attributes)*
#[allow(non_camel_case_types)]
#[allow(non_camel_case_types, missing_docs)]
#vis struct #name;
impl ::actix_web::dev::HttpServiceFactory for #name {

View File

@@ -1,4 +1,4 @@
#[rustversion_msrv::msrv]
#[rustversion::stable(1.72)] // MSRV
#[test]
fn compile_macros() {
let t = trybuild::TestCases::new();

View File

@@ -2,37 +2,7 @@
## Unreleased
- `actix_web::response::builder::HttpResponseBuilder::streaming()` now sets `Content-Type` to `application/octet-stream` if `Content-Type` does not exist.
- `actix_web::response::builder::HttpResponseBuilder::streaming()` now calls `actix_web::response::builder::HttpResponseBuilder::no_chunking()` if `Content-Length` is set by user.
- Add `ws` crate feature (on-by-default) which forwards to `actix-http` and guards some of its `ResponseError` impls.
## 4.11.0
- Add `Logger::log_level()` method.
- Improve handling of non-UTF-8 header values in `Logger` middleware.
- Add `HttpServer::shutdown_signal()` method.
- Mark `HttpServer` as `#[must_use]`.
- Allow SVG images to be compressed by the `Compress` middleware.
- Ignore `Host` header in `Host` guard when connection protocol is HTTP/2.
- Re-export `mime` dependency.
- Update `brotli` dependency to `8`.
## 4.10.2
- No significant changes since `4.10.1`.
## 4.10.1
- No significant changes since `4.10.0`.
## 4.10.0
### Added
- Implement `Responder` for `Result<(), E: Into<Error>>`. Returning `Ok(())` responds with HTTP 204 No Content.
### Changed
- On Windows, an error is now returned from `HttpServer::bind()` (or TLS variants) when binding to a socket that's already in use.
- Update `brotli` dependency to `7`.
- Minimum supported Rust version (MSRV) is now 1.75.

View File

@@ -1,14 +1,17 @@
[package]
name = "actix-web"
version = "4.11.0"
version = "4.9.0"
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
authors = ["Nikolay Kim <fafhrd91@gmail.com>", "Rob Ede <robjtede@icloud.com>"]
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
keywords = ["actix", "http", "web", "framework", "async"]
categories = [
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket",
"network-programming",
"asynchronous",
"web-programming::http-server",
"web-programming::websocket"
]
homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web"
@@ -17,57 +20,57 @@ edition.workspace = true
rust-version.workspace = true
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"]
features = [
"macros",
"openssl",
"rustls-0_20",
"rustls-0_21",
"rustls-0_22",
"rustls-0_23",
"compress-brotli",
"compress-gzip",
"compress-zstd",
"cookies",
"secure-cookies",
"macros",
"openssl",
"rustls-0_20",
"rustls-0_21",
"rustls-0_22",
"rustls-0_23",
"compress-brotli",
"compress-gzip",
"compress-zstd",
"cookies",
"secure-cookies",
]
[package.metadata.cargo_check_external_types]
allowed_external_types = [
"actix_http::*",
"actix_router::*",
"actix_rt::*",
"actix_server::*",
"actix_service::*",
"actix_utils::*",
"actix_web_codegen::*",
"bytes::*",
"cookie::*",
"cookie",
"futures_core::*",
"http::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"serde_json::*",
"serde_urlencoded::*",
"serde::*",
"serde::*",
"tokio::*",
"url::*",
"actix_http::*",
"actix_router::*",
"actix_rt::*",
"actix_server::*",
"actix_service::*",
"actix_utils::*",
"actix_web_codegen::*",
"bytes::*",
"cookie::*",
"cookie",
"futures_core::*",
"http::*",
"language_tags::*",
"mime::*",
"openssl::*",
"rustls::*",
"serde_json::*",
"serde_urlencoded::*",
"serde::*",
"serde::*",
"tokio::*",
"url::*",
]
[features]
default = [
"macros",
"compress-brotli",
"compress-gzip",
"compress-zstd",
"cookies",
"http2",
"unicode",
"compat",
"ws",
"macros",
"compress-brotli",
"compress-gzip",
"compress-zstd",
"cookies",
"http2",
"unicode",
"compat",
]
# Brotli algorithm content-encoding support
@@ -86,12 +89,9 @@ cookies = ["dep:cookie"]
# Secure & signed cookies
secure-cookies = ["cookies", "cookie/secure"]
# HTTP/2 support (including h2c)
# HTTP/2 support (including h2c).
http2 = ["actix-http/http2"]
# WebSocket support
ws = ["actix-http/ws"]
# TLS via OpenSSL
openssl = ["__tls", "http2", "actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
@@ -121,7 +121,9 @@ __tls = []
experimental-io-uring = ["actix-server/io-uring"]
# Feature group which, when disabled, helps migrate code to v5.0.
compat = ["compat-routing-macros-force-pub"]
compat = [
"compat-routing-macros-force-pub",
]
# Opt-out forwards-compatibility for handler visibility inheritance fix.
compat-routing-macros-force-pub = ["actix-web-codegen?/compat-routing-macros-force-pub"]
@@ -130,12 +132,12 @@ compat-routing-macros-force-pub = ["actix-web-codegen?/compat-routing-macros-for
actix-codec = "0.5"
actix-macros = { version = "0.2.3", optional = true }
actix-rt = { version = "2.6", default-features = false }
actix-server = "2.6"
actix-server = "2"
actix-service = "2"
actix-tls = { version = "3.4", default-features = false, optional = true }
actix-utils = "3"
actix-tls = { version = "3.4", default-features = false, optional = true }
actix-http = "3.11"
actix-http = { version = "3.7", features = ["ws"] }
actix-router = { version = "0.5.3", default-features = false, features = ["http"] }
actix-web-codegen = { version = "4.3", optional = true, default-features = false }
@@ -143,17 +145,17 @@ bytes = "1"
bytestring = "1"
cfg-if = "1"
cookie = { version = "0.16", features = ["percent-encode"], optional = true }
derive_more = { version = "2", features = ["as_ref", "deref", "deref_mut", "display", "error", "from"] }
derive_more = { version = "2", features = ["display", "error", "from"] }
encoding_rs = "0.8"
foldhash = "0.1"
futures-core = { version = "0.3.17", default-features = false }
futures-util = { version = "0.3.17", default-features = false }
impl-more = "0.1.4"
itoa = "1"
impl-more = "0.1.4"
language-tags = "0.3"
log = "0.4"
mime = "0.3"
once_cell = "1.21"
once_cell = "1.5"
pin-project-lite = "0.2.7"
regex = { version = "1.5.5", optional = true }
regex-lite = "0.1"
@@ -161,18 +163,18 @@ serde = "1.0"
serde_json = "1.0"
serde_urlencoded = "0.7"
smallvec = "1.6.1"
socket2 = "0.6"
time = { version = "0.3", default-features = false, features = ["formatting"] }
tracing = "0.1.30"
url = "2.5.4"
socket2 = "0.5"
time = { version = "0.3", default-features = false, features = ["formatting"] }
url = "2.1"
[dev-dependencies]
actix-files = "0.6"
actix-test = { version = "0.1", features = ["openssl", "rustls-0_23"] }
awc = { version = "3", features = ["openssl"] }
brotli = "8"
const-str = "0.5" # TODO(MSRV 1.77): update to 0.6
brotli = "7"
const-str = "0.5"
core_affinity = "0.8"
criterion = { version = "0.5", features = ["html_reports"] }
env_logger = "0.11"
@@ -185,8 +187,7 @@ serde = { version = "1", features = ["derive"] }
static_assertions = "1"
tls-openssl = { package = "openssl", version = "0.10.55" }
tls-rustls = { package = "rustls", version = "0.23" }
tokio = { version = "1.38.2", features = ["rt-multi-thread", "macros"] }
tokio-util = "0.7"
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
zstd = "0.13"
[lints]

View File

@@ -3,6 +3,7 @@
- The return type for `ServiceRequest::app_data::<T>()` was changed from returning a `Data<T>` to simply a `T`. To access a `Data<T>` use `ServiceRequest::app_data::<Data<T>>()`.
- Cookie handling has been offloaded to the `cookie` crate:
- `USERINFO_ENCODE_SET` is no longer exposed. Percent-encoding is still supported; check docs.
- Some types now require lifetime parameters.

View File

@@ -8,10 +8,10 @@
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web)
[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.11.0)](https://docs.rs/actix-web/4.11.0)
[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.9.0)](https://docs.rs/actix-web/4.9.0)
![MSRV](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg)
[![Dependency Status](https://deps.rs/crate/actix-web/4.11.0/status.svg)](https://deps.rs/crate/actix-web/4.11.0)
[![Dependency Status](https://deps.rs/crate/actix-web/4.9.0/status.svg)](https://deps.rs/crate/actix-web/4.9.0)
<br />
[![CI](https://github.com/actix/actix-web/actions/workflows/ci.yml/badge.svg)](https://github.com/actix/actix-web/actions/workflows/ci.yml)
[![codecov](https://codecov.io/gh/actix/actix-web/graph/badge.svg?token=dSwOnp9QCv)](https://codecov.io/gh/actix/actix-web)

View File

@@ -7,6 +7,7 @@ use std::{
io::{self, Write as _},
};
use actix_http::Response;
use bytes::BytesMut;
use crate::{
@@ -125,24 +126,20 @@ impl ResponseError for actix_http::error::PayloadError {
}
}
impl ResponseError for actix_http::ws::ProtocolError {}
impl ResponseError for actix_http::error::ContentTypeError {
fn status_code(&self) -> StatusCode {
StatusCode::BAD_REQUEST
}
}
#[cfg(feature = "ws")]
impl ResponseError for actix_http::ws::HandshakeError {
fn error_response(&self) -> HttpResponse<BoxBody> {
actix_http::Response::from(self)
.map_into_boxed_body()
.into()
Response::from(self).map_into_boxed_body().into()
}
}
#[cfg(feature = "ws")]
impl ResponseError for actix_http::ws::ProtocolError {}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -1,4 +1,4 @@
use actix_http::{header, uri::Uri, RequestHead, Version};
use actix_http::{header, uri::Uri, RequestHead};
use super::{Guard, GuardContext};
@@ -66,7 +66,6 @@ fn get_host_uri(req: &RequestHead) -> Option<Uri> {
req.headers
.get(header::HOST)
.and_then(|host_value| host_value.to_str().ok())
.filter(|_| req.version < Version::HTTP_2)
.or_else(|| req.uri.host())
.and_then(|host| host.parse().ok())
}
@@ -124,38 +123,6 @@ mod tests {
use super::*;
use crate::test::TestRequest;
#[test]
fn host_not_from_header_if_http2() {
let req = TestRequest::default()
.uri("www.rust-lang.org")
.insert_header((
header::HOST,
header::HeaderValue::from_static("www.example.com"),
))
.to_srv_request();
let host = Host("www.example.com");
assert!(host.check(&req.guard_ctx()));
let host = Host("www.rust-lang.org");
assert!(!host.check(&req.guard_ctx()));
let req = TestRequest::default()
.version(actix_http::Version::HTTP_2)
.uri("www.rust-lang.org")
.insert_header((
header::HOST,
header::HeaderValue::from_static("www.example.com"),
))
.to_srv_request();
let host = Host("www.example.com");
assert!(!host.check(&req.guard_ctx()));
let host = Host("www.rust-lang.org");
assert!(host.check(&req.guard_ctx()));
}
#[test]
fn host_from_header() {
let req = TestRequest::default()

View File

@@ -70,7 +70,7 @@ use crate::{
/// This is the source code for the 2-parameter implementation of `Handler` to help illustrate the
/// bounds of the handler call after argument extraction:
/// ```ignore
/// impl<Func, Fut, Arg1, Arg2> Handler<(Arg1, Arg2)> for Func
/// impl<Func, Arg1, Arg2, Fut> Handler<(Arg1, Arg2)> for Func
/// where
/// Func: Fn(Arg1, Arg2) -> Fut + Clone + 'static,
/// Fut: Future,

View File

@@ -267,7 +267,7 @@ impl DispositionParam {
/// parameters: vec![DispositionParam::FilenameExt(ExtendedValue {
/// charset: Charset::Iso_8859_1, // The character set for the bytes of the filename
/// language_tag: None, // The optional language tag (see `language-tag` crate)
/// value: b"\xA9 Ferris 2011.txt".to_vec(), // the actual bytes of the filename
/// value: b"\xa9 Copyright 1989.txt".to_vec(), // the actual bytes of the filename
/// })],
/// };
/// assert!(cd1.is_attachment());

View File

@@ -158,7 +158,7 @@ impl ConnectionInfo {
/// The address is resolved through the following, in order:
/// - `Forwarded` header
/// - `X-Forwarded-For` header
/// - peer address of opened socket (same as [`peer_addr`](Self::peer_addr))
/// - peer address of opened socket (same as [`remote_addr`](Self::remote_addr))
///
/// # Security
/// Do not use this function for security purposes unless you can be sure that the `Forwarded`

View File

@@ -78,7 +78,7 @@ pub use actix_http::{body, HttpMessage};
#[cfg(feature = "cookies")]
#[doc(inline)]
pub use cookie;
pub use mime;
mod app;
mod app_service;
mod config;

View File

@@ -2,79 +2,16 @@
## What Is A Middleware?
Middleware in Actix Web is a powerful mechanism that allows you to add additional behavior to request/response processing. It enables you to:
- Pre-process incoming requests (e.g., path normalization, authentication)
- Post-process outgoing responses (e.g., logging, compression)
- Modify application state through ServiceRequest
- Access external services (e.g., sessions, caching)
Middleware is registered for each App, Scope, or Resource and executed in the reverse order of registration. This means the last registered middleware is the first to process the request.
## Middleware Traits
Actix Web's middleware system is built on two main traits:
1. `Transform<S, Req>`: The builder trait that creates the actual Service. It's responsible for:
- Creating new middleware instances
- Assembling the middleware chain
- Handling initialization errors
2. `Service<Req>`: The trait that represents the actual middleware functionality. It:
- Processes requests and responses
- Can modify both request and response
- Can short-circuit request processing
- Must be implemented for the middleware to work
## Understanding Body Types
When working with middleware, it's important to understand body types:
- Middleware can work with different body types for requests and responses
- The `MessageBody` trait is used to handle different body types
- You can use `EitherBody` when you need to handle multiple body types
- Be careful with body consumption - once a body is consumed, it cannot be read again
## Best Practices
1. Keep middleware focused and single-purpose
2. Handle errors appropriately and propagate them correctly
3. Be mindful of performance impact
4. Use appropriate body types and handle them correctly
5. Consider middleware ordering carefully
6. Document your middleware's behavior and requirements
7. Test your middleware thoroughly
## Error Propagation
Proper error handling is crucial in middleware:
1. Always propagate errors from the inner service
2. Use appropriate error types
3. Handle initialization errors
4. Consider using custom error types for specific middleware errors
5. Document error conditions and handling
## When To (Not) Use Middleware
Use middleware when you need to:
- Add cross-cutting concerns
- Modify requests/responses globally
- Add authentication/authorization
- Add logging or monitoring
- Handle compression or caching
Avoid middleware when:
- The functionality is specific to a single route
- The operation is better handled by a service
- The overhead would be too high
- The functionality can be implemented more simply
## Author's References
- `EitherBody` + when is middleware appropriate: https://discord.com/channels/771444961383153695/952016890723729428
- Actix Web Documentation: https://docs.rs/actix-web
- Service Trait Documentation: https://docs.rs/actix-service
- MessageBody Trait Documentation: https://docs.rs/actix-web/latest/actix_web/body/trait.MessageBody.html

View File

@@ -191,10 +191,8 @@ where
None => true,
Some(hdr) => {
match hdr.to_str().ok().and_then(|hdr| hdr.parse::<Mime>().ok()) {
Some(mime) if mime.type_() == mime::IMAGE => {
matches!(mime.subtype(), mime::SVG)
}
Some(mime) if mime.type_() == mime::VIDEO => false,
Some(mime) if mime.type_().as_str() == "image" => false,
Some(mime) if mime.type_().as_str() == "video" => false,
_ => true,
}
}

View File

@@ -16,7 +16,7 @@ use actix_service::{Service, Transform};
use actix_utils::future::{ready, Ready};
use bytes::Bytes;
use futures_core::ready;
use log::{debug, warn, Level};
use log::{debug, warn};
use pin_project_lite::pin_project;
#[cfg(feature = "unicode")]
use regex::Regex;
@@ -92,7 +92,6 @@ struct Inner {
exclude: HashSet<String>,
exclude_regex: Vec<Regex>,
log_target: Cow<'static, str>,
log_level: Level,
}
impl Logger {
@@ -103,7 +102,6 @@ impl Logger {
exclude: HashSet::new(),
exclude_regex: Vec::new(),
log_target: Cow::Borrowed(module_path!()),
log_level: Level::Info,
}))
}
@@ -141,23 +139,6 @@ impl Logger {
self
}
/// Sets the log level to `level`.
///
/// By default, the log level is `Level::Info`.
///
/// # Examples
/// Using `.log_level(Level::Debug)` would have this effect on request logs:
/// ```diff
/// - [2015-10-21T07:28:00Z INFO actix_web::middleware::logger] 127.0.0.1 "GET / HTTP/1.1" 200 88 "-" "dmc/1.0" 0.001985
/// + [2015-10-21T07:28:00Z DEBUG actix_web::middleware::logger] 127.0.0.1 "GET / HTTP/1.1" 200 88 "-" "dmc/1.0" 0.001985
/// ^^^^^^
/// ```
pub fn log_level(mut self, level: log::Level) -> Self {
let inner = Rc::get_mut(&mut self.0).unwrap();
inner.log_level = level;
self
}
/// Register a function that receives a ServiceRequest and returns a String for use in the
/// log line. The label passed as the first argument should match a replacement substring in
/// the logger format like `%{label}xi`.
@@ -261,7 +242,6 @@ impl Default for Logger {
exclude: HashSet::new(),
exclude_regex: Vec::new(),
log_target: Cow::Borrowed(module_path!()),
log_level: Level::Info,
}))
}
}
@@ -332,7 +312,6 @@ where
format: None,
time: OffsetDateTime::now_utc(),
log_target: Cow::Borrowed(""),
log_level: self.inner.log_level,
_phantom: PhantomData,
}
} else {
@@ -348,7 +327,6 @@ where
format: Some(format),
time: now,
log_target: self.inner.log_target.clone(),
log_level: self.inner.log_level,
_phantom: PhantomData,
}
}
@@ -366,7 +344,6 @@ pin_project! {
time: OffsetDateTime,
format: Option<Format>,
log_target: Cow<'static, str>,
log_level: Level,
_phantom: PhantomData<B>,
}
}
@@ -413,7 +390,6 @@ where
let time = *this.time;
let format = this.format.take();
let log_target = this.log_target.clone();
let log_level = *this.log_level;
Poll::Ready(Ok(res.map_body(move |_, body| StreamLog {
body,
@@ -421,7 +397,6 @@ where
format,
size: 0,
log_target,
log_level,
})))
}
}
@@ -434,7 +409,6 @@ pin_project! {
size: usize,
time: OffsetDateTime,
log_target: Cow<'static, str>,
log_level: Level
}
impl<B> PinnedDrop for StreamLog<B> {
@@ -447,9 +421,8 @@ pin_project! {
Ok(())
};
log::log!(
log::info!(
target: this.log_target.as_ref(),
this.log_level,
"{}", FormatDisplay(&render)
);
}
@@ -649,9 +622,9 @@ impl FormatText {
FormatText::ResponseHeader(ref name) => {
let s = if let Some(val) = res.headers().get(name) {
String::from_utf8_lossy(val.as_bytes()).into_owned()
val.to_str().unwrap_or("-")
} else {
"-".to_owned()
"-"
};
*self = FormatText::Str(s.to_string())
}
@@ -693,11 +666,11 @@ impl FormatText {
FormatText::RequestTime => *self = FormatText::Str(now.format(&Rfc3339).unwrap()),
FormatText::RequestHeader(ref name) => {
let s = if let Some(val) = req.headers().get(name) {
String::from_utf8_lossy(val.as_bytes()).into_owned()
val.to_str().unwrap_or("-")
} else {
"-".to_owned()
"-"
};
*self = FormatText::Str(s);
*self = FormatText::Str(s.to_string());
}
FormatText::RemoteAddr => {
let s = if let Some(peer) = req.connection_info().peer_addr() {

View File

@@ -264,10 +264,8 @@ impl HttpRequest {
///
/// For expanded client connection information, use [`connection_info`] instead.
///
/// Will only return `None` when server is listening on [UDS socket] or when called in unit
/// tests unless [`TestRequest::peer_addr`] is used.
/// Will only return None when called in unit tests unless [`TestRequest::peer_addr`] is used.
///
/// [UDS socket]: crate::HttpServer::bind_uds
/// [`TestRequest::peer_addr`]: crate::test::TestRequest::peer_addr
/// [`connection_info`]: Self::connection_info
#[inline]

View File

@@ -318,33 +318,12 @@ impl HttpResponseBuilder {
/// Set a streaming body and build the `HttpResponse`.
///
/// `HttpResponseBuilder` can not be used after this call.
///
/// If `Content-Type` is not set, then it is automatically set to `application/octet-stream`.
///
/// If `Content-Length` is set, then [`no_chunking()`](Self::no_chunking) is automatically called.
#[inline]
pub fn streaming<S, E>(&mut self, stream: S) -> HttpResponse
where
S: Stream<Item = Result<Bytes, E>> + 'static,
E: Into<BoxError> + 'static,
{
// Set mime type to application/octet-stream if it is not set
if let Some(parts) = self.inner() {
if !parts.headers.contains_key(header::CONTENT_TYPE) {
self.insert_header((header::CONTENT_TYPE, mime::APPLICATION_OCTET_STREAM));
}
}
if let Some(parts) = self.inner() {
if let Some(length) = parts.headers.get(header::CONTENT_LENGTH) {
if let Ok(length) = length.to_str() {
if let Ok(length) = length.parse::<u64>() {
self.no_chunking(length);
}
}
}
}
self.body(BodyStream::new(stream))
}

View File

@@ -6,8 +6,7 @@ use crate::{HttpResponse, HttpResponseBuilder};
macro_rules! static_resp {
($name:ident, $status:expr) => {
#[allow(non_snake_case)]
#[doc = concat!("Creates a new response builder with the status code `", stringify!($status), "`.")]
#[allow(non_snake_case, missing_docs)]
pub fn $name() -> HttpResponseBuilder {
HttpResponseBuilder::new($status)
}

View File

@@ -1,8 +1,6 @@
use std::{
any::Any,
cmp, fmt,
future::Future,
io,
cmp, fmt, io,
marker::PhantomData,
net,
sync::{Arc, Mutex},
@@ -66,7 +64,6 @@ struct Config {
/// .await
/// }
/// ```
#[must_use]
pub struct HttpServer<F, I, S, B>
where
F: Fn() -> I + Send + Clone + 'static,
@@ -275,12 +272,19 @@ where
/// - `actix_web::rt::net::TcpStream` when no encryption is used.
///
/// See the `on_connect` example for additional details.
pub fn on_connect<CB>(mut self, f: CB) -> HttpServer<F, I, S, B>
pub fn on_connect<CB>(self, f: CB) -> HttpServer<F, I, S, B>
where
CB: Fn(&dyn Any, &mut Extensions) + Send + Sync + 'static,
{
self.on_connect_fn = Some(Arc::new(f));
self
HttpServer {
factory: self.factory,
config: self.config,
backlog: self.backlog,
sockets: self.sockets,
builder: self.builder,
on_connect_fn: Some(Arc::new(f)),
_phantom: PhantomData,
}
}
/// Sets server host name.
@@ -308,37 +312,6 @@ where
self
}
/// Specify shutdown signal from a future.
///
/// Using this method will prevent OS signal handlers being set up.
///
/// Typically, a `CancellationToken` will be used, but any future _can_ be.
///
/// # Examples
///
/// ```no_run
/// use actix_web::{App, HttpServer};
/// use tokio_util::sync::CancellationToken;
///
/// # #[actix_web::main]
/// # async fn main() -> std::io::Result<()> {
/// let stop_signal = CancellationToken::new();
///
/// HttpServer::new(move || App::new())
/// .shutdown_signal(stop_signal.cancelled_owned())
/// .bind(("127.0.0.1", 8080))?
/// .run()
/// .await
/// # }
/// ```
pub fn shutdown_signal<Fut>(mut self, shutdown_signal: Fut) -> Self
where
Fut: Future<Output = ()> + Send + 'static,
{
self.builder = self.builder.shutdown_signal(shutdown_signal);
self
}
/// Sets timeout for graceful worker shutdown of workers.
///
/// After receiving a stop signal, workers have this much time to finish serving requests.
@@ -909,7 +882,6 @@ where
let factory = self.factory.clone();
let cfg = Arc::clone(&self.config);
let addr = lst.local_addr().unwrap();
self.sockets.push(Socket {
addr,
scheme: "https",
@@ -1014,7 +986,6 @@ where
let factory = self.factory.clone();
let socket_addr =
net::SocketAddr::new(net::IpAddr::V4(net::Ipv4Addr::new(127, 0, 0, 1)), 8080);
self.sockets.push(Socket {
scheme: "http",
addr: socket_addr,
@@ -1102,7 +1073,10 @@ fn bind_addrs(addrs: impl net::ToSocketAddrs, backlog: u32) -> io::Result<Vec<ne
} else if let Some(err) = err.take() {
Err(err)
} else {
Err(io::Error::other("Could not bind to address"))
Err(io::Error::new(
io::ErrorKind::Other,
"Can not bind to address.",
))
}
}

View File

@@ -238,7 +238,7 @@ where
match res {
Ok(bytes) => {
let fallback = bytes.clone();
let left = L::from_request(this.req, &mut dev::Payload::from(bytes));
let left = L::from_request(this.req, &mut payload_from_bytes(bytes));
EitherExtractState::Left { left, fallback }
}
Err(err) => break Err(EitherExtractError::Bytes(err)),
@@ -251,7 +251,7 @@ where
Err(left_err) => {
let right = R::from_request(
this.req,
&mut dev::Payload::from(mem::take(fallback)),
&mut payload_from_bytes(mem::take(fallback)),
);
EitherExtractState::Right {
left_err: Some(left_err),
@@ -276,6 +276,12 @@ where
}
}
fn payload_from_bytes(bytes: Bytes) -> dev::Payload {
let (_, mut h1_payload) = actix_http::h1::Payload::create(true);
h1_payload.unread_data(bytes);
dev::Payload::from(h1_payload)
}
#[cfg(test)]
mod tests {
use serde::{Deserialize, Serialize};

View File

@@ -616,7 +616,7 @@ mod tests {
}
));
let (mut req, mut pl) = TestRequest::default()
let (req, mut pl) = TestRequest::default()
.insert_header((
header::CONTENT_TYPE,
header::HeaderValue::from_static("application/json"),
@@ -624,7 +624,6 @@ mod tests {
.set_payload(Bytes::from_static(&[0u8; 1000]))
.to_http_parts();
req.head_mut().headers_mut().remove(header::CONTENT_LENGTH);
let json = JsonBody::<MyObject>::new(&req, &mut pl, None, true)
.limit(100)
.await;

View File

@@ -38,7 +38,7 @@ use crate::{
///
/// A dynamic segment is specified in the form `{identifier}`, where the identifier can be used
/// later in a request handler to access the matched value for that segment. This is done by looking
/// up the identifier in the `Path` object returned by [`HttpRequest::match_info()`](crate::HttpRequest::match_info) method.
/// up the identifier in the `Path` object returned by [`HttpRequest.match_info()`] method.
///
/// By default, each segment matches the regular expression `[^{}/]+`.
///

View File

@@ -1,115 +0,0 @@
use std::{
pin::Pin,
task::{Context, Poll},
};
use actix_web::{
http::header::{self, HeaderValue},
HttpResponse,
};
use bytes::Bytes;
use futures_core::Stream;
struct FixedSizeStream {
data: Vec<u8>,
yielded: bool,
}
impl FixedSizeStream {
fn new(size: usize) -> Self {
Self {
data: vec![0u8; size],
yielded: false,
}
}
}
impl Stream for FixedSizeStream {
type Item = Result<Bytes, std::io::Error>;
fn poll_next(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {
if self.yielded {
Poll::Ready(None)
} else {
self.yielded = true;
let data = std::mem::take(&mut self.data);
Poll::Ready(Some(Ok(Bytes::from(data))))
}
}
}
#[actix_rt::test]
async fn test_streaming_response_with_content_length() {
let stream = FixedSizeStream::new(100);
let resp = HttpResponse::Ok()
.append_header((header::CONTENT_LENGTH, "100"))
.streaming(stream);
assert_eq!(
resp.headers().get(header::CONTENT_LENGTH),
Some(&HeaderValue::from_static("100")),
"Content-Length should be preserved when explicitly set"
);
let has_chunked = resp
.headers()
.get(header::TRANSFER_ENCODING)
.map(|v| v.to_str().unwrap_or(""))
.unwrap_or("")
.contains("chunked");
assert!(
!has_chunked,
"chunked should not be used when Content-Length is provided"
);
assert_eq!(
resp.headers().get(header::CONTENT_TYPE),
Some(&HeaderValue::from_static("application/octet-stream")),
"Content-Type should default to application/octet-stream"
);
}
#[actix_rt::test]
async fn test_streaming_response_default_content_type() {
let stream = FixedSizeStream::new(50);
let resp = HttpResponse::Ok().streaming(stream);
assert_eq!(
resp.headers().get(header::CONTENT_TYPE),
Some(&HeaderValue::from_static("application/octet-stream")),
"Content-Type should default to application/octet-stream"
);
}
#[actix_rt::test]
async fn test_streaming_response_user_defined_content_type() {
let stream = FixedSizeStream::new(25);
let resp = HttpResponse::Ok()
.insert_header((header::CONTENT_TYPE, "text/plain"))
.streaming(stream);
assert_eq!(
resp.headers().get(header::CONTENT_TYPE),
Some(&HeaderValue::from_static("text/plain")),
"User-defined Content-Type should be preserved"
);
}
#[actix_rt::test]
async fn test_streaming_response_empty_stream() {
let stream = FixedSizeStream::new(0);
let resp = HttpResponse::Ok()
.append_header((header::CONTENT_LENGTH, "0"))
.streaming(stream);
assert_eq!(
resp.headers().get(header::CONTENT_LENGTH),
Some(&HeaderValue::from_static("0")),
"Content-Length 0 should be preserved for empty streams"
);
}

View File

@@ -2,16 +2,10 @@
## Unreleased
## 3.7.0
- Update `brotli` dependency to `8`.
## 3.6.0
- Prevent panics on connection pool drop when Tokio runtime is shutdown early.
- Do not send `Host` header on HTTP/2 requests, as it is not required, and some web servers may reject it.
- Update `brotli` dependency to `7`.
- Prevent panics on connection pool drop when Tokio runtime is shutdown early.
- Minimum supported Rust version (MSRV) is now 1.75.
- Do not send `Host` header on HTTP/2 requests, as it is not required, and some web servers may reject it.
## 3.5.1

View File

@@ -1,6 +1,6 @@
[package]
name = "awc"
version = "3.7.0"
version = "3.5.1"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Async HTTP and WebSocket client library"
keywords = ["actix", "http", "framework", "async", "web"]
@@ -16,6 +16,7 @@ license = "MIT OR Apache-2.0"
edition = "2021"
[package.metadata.docs.rs]
rustdoc-args = ["--cfg", "docsrs"]
features = [
"cookies",
"openssl",
@@ -96,9 +97,9 @@ dangerous-h2c = []
[dependencies]
actix-codec = "0.5"
actix-http = { version = "3.10", features = ["http2", "ws"] }
actix-rt = { version = "2.1", default-features = false }
actix-service = "2"
actix-http = { version = "3.7", features = ["http2", "ws"] }
actix-rt = { version = "2.1", default-features = false }
actix-tls = { version = "3.4", features = ["connect", "uri"] }
actix-utils = "3"
@@ -108,10 +109,10 @@ cfg-if = "1"
derive_more = { version = "2", features = ["display", "error", "from"] }
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
futures-util = { version = "0.3.17", default-features = false, features = ["alloc", "sink"] }
h2 = "0.3.27"
h2 = "0.3.26"
http = "0.2.7"
itoa = "1"
log = "0.4"
log =" 0.4"
mime = "0.3"
percent-encoding = "2.1"
pin-project-lite = "0.2"
@@ -119,7 +120,7 @@ rand = "0.9"
serde = "1.0"
serde_json = "1.0"
serde_urlencoded = "0.7"
tokio = { version = "1.38.2", features = ["sync"] }
tokio = { version = "1.24.2", features = ["sync"] }
cookie = { version = "0.16", features = ["percent-encode"], optional = true }
@@ -140,15 +141,15 @@ actix-tls = { version = "3.4", features = ["openssl", "rustls-0_23"] }
actix-utils = "3"
actix-web = { version = "4", features = ["openssl"] }
brotli = "8"
const-str = "0.5" # TODO(MSRV 1.77): update to 0.6
brotli = "7"
const-str = "0.5"
env_logger = "0.11"
flate2 = "1.0.13"
futures-util = { version = "0.3.17", default-features = false }
static_assertions = "1.1"
rcgen = "0.13"
rustls-pemfile = "2"
tokio = { version = "1.38.2", features = ["rt-multi-thread", "macros"] }
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
zstd = "0.13"
tls-rustls-0_23 = { package = "rustls", version = "0.23" } # add rustls 0.23 with default features to make aws_lc_rs work in tests

View File

@@ -5,9 +5,9 @@
<!-- prettier-ignore-start -->
[![crates.io](https://img.shields.io/crates/v/awc?label=latest)](https://crates.io/crates/awc)
[![Documentation](https://docs.rs/awc/badge.svg?version=3.7.0)](https://docs.rs/awc/3.7.0)
[![Documentation](https://docs.rs/awc/badge.svg?version=3.5.1)](https://docs.rs/awc/3.5.1)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/awc)
[![Dependency Status](https://deps.rs/crate/awc/3.7.0/status.svg)](https://deps.rs/crate/awc/3.7.0)
[![Dependency Status](https://deps.rs/crate/awc/3.5.1/status.svg)](https://deps.rs/crate/awc/3.5.1)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
<!-- prettier-ignore-end -->

View File

@@ -89,9 +89,9 @@ impl Connector<()> {
/// # Panics
///
/// - When the `rustls-0_23-webpki-roots` or `rustls-0_23-native-roots` features are enabled
/// and no default crypto provider has been loaded, this method will panic.
/// and no default crypto provider has been loaded, this method will panic.
/// - When the `rustls-0_23-native-roots` or `rustls-0_22-native-roots` features are enabled
/// and the runtime system has no native root certificates, this method will panic.
/// and the runtime system has no native root certificates, this method will panic.
#[allow(clippy::new_ret_no_self, clippy::let_unit_value)]
pub fn new() -> Connector<
impl Service<

View File

@@ -179,8 +179,9 @@ where
.acquire_owned()
.await
.map_err(|_| {
ConnectError::Io(io::Error::other(
"Failed to acquire semaphore on client connection pool",
ConnectError::Io(io::Error::new(
io::ErrorKind::Other,
"failed to acquire semaphore on client connection pool",
))
})?;

View File

@@ -65,7 +65,9 @@ impl TestResponse {
/// Set response's payload
pub fn set_payload<B: Into<Bytes>>(mut self, data: B) -> Self {
self.payload = Some(Payload::from(data.into()));
let (_, mut payload) = h1::Payload::create(true);
payload.unread_data(data.into());
self.payload = Some(payload.into());
self
}

View File

@@ -1,24 +1,18 @@
_list:
@just --list
toolchain := ""
# Format workspace.
fmt:
just --unstable --fmt
cargo +nightly fmt
fd --hidden --type=file --extension=md --extension=yml --exec-batch npx -y prettier --write
# Downgrade dependencies necessary to run MSRV checks/tests.
# Downgrade dev-dependencies necessary to run MSRV checks/tests.
[private]
downgrade-for-msrv:
cargo {{ toolchain }} update -p=divan --precise=0.1.15 # next ver: 1.80.0
cargo {{ toolchain }} update -p=rayon --precise=1.10.0 # next ver: 1.80.0
cargo {{ toolchain }} update -p=rayon-core --precise=1.12.1 # next ver: 1.80.0
cargo {{ toolchain }} update -p=half --precise=2.4.1 # next ver: 1.81.0
cargo {{ toolchain }} update -p=idna_adapter --precise=1.2.0 # next ver: 1.82.0
cargo {{ toolchain }} update -p=litemap --precise=0.7.4 # next ver: 1.81.0
cargo {{ toolchain }} update -p=zerofrom --precise=0.1.5 # next ver: 1.81.0
cargo update -p=parse-size --precise=1.0.0
cargo update -p=clap --precise=4.4.18
cargo update -p=divan --precise=0.1.15
msrv := ```
cargo metadata --format-version=1 \
@@ -43,48 +37,42 @@ check-min:
check-default:
cargo hack --workspace check
# Check workspace.
check: && clippy
fd --hidden --type=file --extension=md --extension=yml --exec-batch npx -y prettier --check
# Run Clippy over workspace.
check toolchain="": && (clippy toolchain)
# Run Clippy over workspace.
clippy:
clippy toolchain="":
cargo {{ toolchain }} clippy --workspace --all-targets {{ all_crate_features }}
# Run Clippy over workspace using MSRV.
clippy-msrv: downgrade-for-msrv
@just toolchain={{ msrv_rustup }} clippy
# Test workspace using MSRV.
test-msrv: downgrade-for-msrv (test msrv_rustup)
# Test workspace code.
test:
test toolchain="":
cargo {{ toolchain }} test --lib --tests -p=actix-web-codegen --all-features
cargo {{ toolchain }} test --lib --tests -p=actix-multipart-derive --all-features
cargo {{ toolchain }} nextest run --no-tests=warn -p=actix-router --no-default-features
cargo {{ toolchain }} nextest run --no-tests=warn --workspace --exclude=actix-web-codegen --exclude=actix-multipart-derive {{ all_crate_features }} --filter-expr="not test(test_reading_deflate_encoding_large_random_rustls)"
# Test workspace using MSRV.
test-msrv: downgrade-for-msrv
@just toolchain={{ msrv_rustup }} test
# Test workspace docs.
test-docs: && doc
test-docs toolchain="": && doc
cargo {{ toolchain }} test --doc --workspace {{ all_crate_features }} --no-fail-fast -- --nocapture
# Test workspace.
test-all: test test-docs
test-all toolchain="": (test toolchain) (test-docs toolchain)
# Test workspace and collect coverage info.
[private]
test-coverage:
test-coverage toolchain="":
cargo {{ toolchain }} llvm-cov nextest --no-tests=warn --no-report {{ all_crate_features }}
cargo {{ toolchain }} llvm-cov --doc --no-report {{ all_crate_features }}
# Test workspace and generate Codecov report.
test-coverage-codecov: test-coverage
test-coverage-codecov toolchain="": (test-coverage toolchain)
cargo {{ toolchain }} llvm-cov report --doctests --codecov --output-path=codecov.json
# Test workspace and generate LCOV report.
test-coverage-lcov: test-coverage
test-coverage-lcov toolchain="": (test-coverage toolchain)
cargo {{ toolchain }} llvm-cov report --doctests --lcov --output-path=lcov.info
# Document crates in workspace.

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
# developed on macOS and probably doesn't work on Linux yet due to minor
# differences in flags on sed

38
scripts/ci-test Executable file
View File

@@ -0,0 +1,38 @@
#!/bin/sh
# run tests matching what CI does for non-linux feature sets
set -x
EXIT=0
save_exit_code() {
eval $@
local CMD_EXIT=$?
[ "$CMD_EXIT" = "0" ] || EXIT=$CMD_EXIT
}
save_exit_code cargo test --lib --tests -p=actix-router --all-features -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-http --all-features -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-web --features=rustls,openssl -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-web-codegen --all-features -- --nocapture
save_exit_code cargo test --lib --tests -p=awc --all-features -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-http-test --all-features -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-test --all-features -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-files -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-multipart --all-features -- --nocapture
save_exit_code cargo test --lib --tests -p=actix-web-actors --all-features -- --nocapture
save_exit_code cargo test --workspace --doc
if [ "$EXIT" = "0" ]; then
PASSED="All tests passed!"
if [ "$(command -v figlet)" ]; then
figlet "$PASSED"
else
echo "$PASSED"
fi
fi
exit $EXIT

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env bash
set -Euo pipefail
for dir in $@; do
cd "$dir"
cargo publish --dry-run
read -p "Look okay? "
read -p "Sure? "
cargo publish
if [ $? -ne 0 ]; then
echo
read -p "Was the above error caused by cyclic dev-deps? Choosing yes will publish without a git backreference. (y/N) " publish_no_dev_deps
if [[ "$publish_no_dev_deps" == "y" || "$publish_no_dev_deps" == "Y" ]]; then
cargo hack --no-dev-deps publish --allow-dirty
fi
fi
cd ..
done