mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-03 17:41:30 +02:00
Compare commits
54 Commits
codegen-v4
...
http-v3.5.
Author | SHA1 | Date | |
---|---|---|---|
68597b5426 | |||
9dc3ad754e | |||
17060ed993 | |||
0d9ca4d939 | |||
ff2904ee78 | |||
fdef224a06 | |||
ede0201aa4 | |||
271edafd4d | |||
5e5e5d8315 | |||
c7a0af31d3 | |||
eefe8b0733 | |||
1114a51b22 | |||
0a312037ea | |||
37d304b0f2 | |||
039f8fb193 | |||
929ceb5eb5 | |||
e95c8fe5a6 | |||
2fe5189954 | |||
4accfab196 | |||
c0615f28ed | |||
9d1f75d349 | |||
e50bceb914 | |||
f5655721aa | |||
989548e36a | |||
7d2349afb9 | |||
b78f6da05f | |||
3b8d4de0e0 | |||
40196f16be | |||
32ddf972c6 | |||
ce18f35e03 | |||
d3d0208cbd | |||
9e51116da2 | |||
3193b81a3e | |||
3acdda48e0 | |||
935d36c441 | |||
05b4c4964f | |||
fba766b4be | |||
76a0385f94 | |||
f1c9b93b87 | |||
55ddded315 | |||
2cfe257fc2 | |||
ccabcd83c0 | |||
13fed45bfa | |||
8bd4b36ffe | |||
801a51b312 | |||
b28e0fff4b | |||
043bc88f73 | |||
e1c48dba26 | |||
835a57afc6 | |||
81ac30f3df | |||
d50eccb3f7 | |||
a7983351be | |||
215a52f565 | |||
d445742974 |
14
.github/dependabot.yml
vendored
14
.github/dependabot.yml
vendored
@ -1,12 +1,10 @@
|
|||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
- package-ecosystem: "cargo"
|
- package-ecosystem: cargo
|
||||||
directory: "/"
|
directory: /
|
||||||
schedule:
|
schedule:
|
||||||
interval: "monthly"
|
interval: weekly
|
||||||
open-pull-requests-limit: 10
|
- package-ecosystem: github-actions
|
||||||
- package-ecosystem: "github-actions"
|
directory: /
|
||||||
directory: "/"
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "monthly"
|
interval: weekly
|
||||||
open-pull-requests-limit: 10
|
|
||||||
|
4
.github/workflows/bench.yml
vendored
4
.github/workflows/bench.yml
vendored
@ -5,7 +5,7 @@ on:
|
|||||||
branches: [master]
|
branches: [master]
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
@ -16,7 +16,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
run: |
|
run: |
|
||||||
|
24
.github/workflows/ci-post-merge.yml
vendored
24
.github/workflows/ci-post-merge.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.target.os }}
|
runs-on: ${{ matrix.target.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install OpenSSL
|
- name: Install OpenSSL
|
||||||
if: matrix.target.os == 'windows-latest'
|
if: matrix.target.os == 'windows-latest'
|
||||||
@ -40,12 +40,14 @@ jobs:
|
|||||||
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append
|
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||||
|
|
||||||
- name: Install Rust (${{ matrix.version.name }})
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install cargo-hack
|
||||||
uses: taiki-e/install-action@cargo-hack
|
uses: taiki-e/install-action@v2.23.0
|
||||||
|
with:
|
||||||
|
tool: cargo-hack
|
||||||
|
|
||||||
- name: check minimal
|
- name: check minimal
|
||||||
run: cargo ci-check-min
|
run: cargo ci-check-min
|
||||||
@ -77,13 +79,15 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install cargo-hack
|
||||||
uses: taiki-e/install-action@cargo-hack
|
uses: taiki-e/install-action@v2.23.0
|
||||||
|
with:
|
||||||
|
tool: cargo-hack
|
||||||
|
|
||||||
- name: check feature combinations
|
- name: check feature combinations
|
||||||
run: cargo ci-check-all-feature-powerset
|
run: cargo ci-check-all-feature-powerset
|
||||||
@ -96,13 +100,15 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
|
|
||||||
- name: Install nextest
|
- name: Install nextest
|
||||||
uses: taiki-e/install-action@nextest
|
uses: taiki-e/install-action@v2.23.0
|
||||||
|
with:
|
||||||
|
tool: nextest
|
||||||
|
|
||||||
- name: Test with cargo-nextest
|
- name: Test with cargo-nextest
|
||||||
run: cargo nextest run
|
run: cargo nextest run
|
||||||
|
27
.github/workflows/ci.yml
vendored
27
.github/workflows/ci.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.target.os }}
|
runs-on: ${{ matrix.target.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install OpenSSL
|
- name: Install OpenSSL
|
||||||
if: matrix.target.os == 'windows-latest'
|
if: matrix.target.os == 'windows-latest'
|
||||||
@ -45,18 +45,21 @@ jobs:
|
|||||||
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append
|
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||||
|
|
||||||
- name: Install Rust (${{ matrix.version.name }})
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install cargo-hack
|
||||||
uses: taiki-e/install-action@cargo-hack
|
uses: taiki-e/install-action@v2.23.0
|
||||||
|
with:
|
||||||
|
tool: cargo-hack
|
||||||
|
|
||||||
- name: workaround MSRV issues
|
- name: workaround MSRV issues
|
||||||
if: matrix.version.name != 'stable'
|
if: matrix.version.name == 'msrv'
|
||||||
run: |
|
run: |
|
||||||
cargo update -p=clap --precise=4.3.24
|
cargo update -p=clap --precise=4.3.24
|
||||||
cargo update -p=clap_lex --precise=0.5.0
|
cargo update -p=clap_lex --precise=0.5.0
|
||||||
|
cargo update -p=anstyle --precise=1.0.2
|
||||||
|
|
||||||
- name: check minimal
|
- name: check minimal
|
||||||
run: cargo ci-check-min
|
run: cargo ci-check-min
|
||||||
@ -80,18 +83,19 @@ jobs:
|
|||||||
|
|
||||||
- name: Clear the cargo caches
|
- name: Clear the cargo caches
|
||||||
run: |
|
run: |
|
||||||
cargo install cargo-cache --version 0.8.3 --no-default-features --features ci-autoclean
|
cargo --locked install cargo-cache --version 0.8.3 --no-default-features --features ci-autoclean
|
||||||
cargo-cache
|
cargo-cache
|
||||||
|
|
||||||
io-uring:
|
io-uring:
|
||||||
name: io-uring tests
|
name: io-uring tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
with: { toolchain: nightly }
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
|
||||||
- name: tests (io-uring)
|
- name: tests (io-uring)
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
@ -102,11 +106,12 @@ jobs:
|
|||||||
name: doc tests
|
name: doc tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust (nightly)
|
- name: Install Rust (nightly)
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
with: { toolchain: nightly }
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
|
||||||
- name: doc tests
|
- name: doc tests
|
||||||
run: cargo ci-doctest
|
run: cargo ci-doctest
|
||||||
|
75
.github/workflows/clippy-fmt.yml
vendored
75
.github/workflows/clippy-fmt.yml
vendored
@ -1,75 +0,0 @@
|
|||||||
name: Lint
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize, reopened]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read # to fetch code (actions/checkout)
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
components: rustfmt
|
|
||||||
|
|
||||||
- run: cargo fmt --all -- --check
|
|
||||||
|
|
||||||
clippy:
|
|
||||||
permissions:
|
|
||||||
checks: write # to add clippy checks to PR diffs
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with: { components: clippy }
|
|
||||||
|
|
||||||
- uses: giraffate/clippy-action@v1
|
|
||||||
with:
|
|
||||||
reporter: 'github-pr-check'
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
clippy_flags: --workspace --all-features --tests --examples --bins -- -Dclippy::todo -Aunknown_lints
|
|
||||||
|
|
||||||
lint-docs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with: { components: rust-docs }
|
|
||||||
|
|
||||||
- name: Check for broken intra-doc links
|
|
||||||
env: { RUSTDOCFLAGS: "-D warnings" }
|
|
||||||
run: cargo doc --no-deps --all-features --workspace
|
|
||||||
|
|
||||||
public-api-diff:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ github.base_ref }}
|
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with: { toolchain: nightly-2023-08-25 }
|
|
||||||
|
|
||||||
- uses: taiki-e/cache-cargo-install-action@v1
|
|
||||||
with: { tool: cargo-public-api }
|
|
||||||
|
|
||||||
- name: generate API diff
|
|
||||||
run: |
|
|
||||||
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
|
|
||||||
cargo public-api --manifest-path "$f" diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
|
|
||||||
done
|
|
7
.github/workflows/coverage.yml
vendored
7
.github/workflows/coverage.yml
vendored
@ -15,14 +15,15 @@ jobs:
|
|||||||
coverage:
|
coverage:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
with:
|
with:
|
||||||
components: llvm-tools-preview
|
components: llvm-tools-preview
|
||||||
|
|
||||||
- name: Install cargo-llvm-cov
|
- name: Install cargo-llvm-cov
|
||||||
uses: taiki-e/install-action@v2.13.4
|
uses: taiki-e/install-action@v2.23.0
|
||||||
with:
|
with:
|
||||||
tool: cargo-llvm-cov
|
tool: cargo-llvm-cov
|
||||||
|
|
||||||
|
87
.github/workflows/lint.yml
vendored
Normal file
87
.github/workflows/lint.yml
vendored
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
name: Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
fmt:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
|
- name: Check with rustfmt
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
|
clippy:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
checks: write # to add clippy checks to PR diffs
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
|
with:
|
||||||
|
components: clippy
|
||||||
|
|
||||||
|
- name: Check with Clippy
|
||||||
|
uses: giraffate/clippy-action@v1.0.1
|
||||||
|
with:
|
||||||
|
reporter: github-pr-check
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
clippy_flags: >-
|
||||||
|
--workspace --all-features --tests --examples --bins --
|
||||||
|
-A unknown_lints -D clippy::todo -D clippy::dbg_macro
|
||||||
|
|
||||||
|
lint-docs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: rust-docs
|
||||||
|
|
||||||
|
- name: Check for broken intra-doc links
|
||||||
|
env:
|
||||||
|
RUSTDOCFLAGS: -D warnings
|
||||||
|
run: cargo +nightly doc --no-deps --workspace --all-features
|
||||||
|
|
||||||
|
public-api-diff:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.base_ref }}
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly-2023-08-25
|
||||||
|
|
||||||
|
- uses: taiki-e/cache-cargo-install-action@v1.3.0
|
||||||
|
with:
|
||||||
|
tool: cargo-public-api
|
||||||
|
|
||||||
|
- name: generate API diff
|
||||||
|
run: |
|
||||||
|
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
|
||||||
|
cargo public-api --manifest-path "$f" diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
|
||||||
|
done
|
13
.github/workflows/upload-doc.yml
vendored
13
.github/workflows/upload-doc.yml
vendored
@ -5,7 +5,7 @@ on:
|
|||||||
branches: [master]
|
branches: [master]
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
@ -14,14 +14,17 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
permissions:
|
permissions:
|
||||||
contents: write # to push changes in repo (jamesives/github-pages-deploy-action)
|
contents: write
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@nightly
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.6.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
|
||||||
- name: Build Docs
|
- name: Build Docs
|
||||||
run: cargo +nightly doc --no-deps --workspace --all-features
|
run: cargo +nightly doc --no-deps --workspace --all-features
|
||||||
@ -32,7 +35,7 @@ jobs:
|
|||||||
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
|
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
|
||||||
|
|
||||||
- name: Deploy to GitHub Pages
|
- name: Deploy to GitHub Pages
|
||||||
uses: JamesIves/github-pages-deploy-action@v4.4.3
|
uses: JamesIves/github-pages-deploy-action@v4.5.0
|
||||||
with:
|
with:
|
||||||
folder: target/doc
|
folder: target/doc
|
||||||
single-commit: true
|
single-commit: true
|
||||||
|
@ -47,4 +47,5 @@ actix-server = { version = "2.2", optional = true } # ensure matching tokio-urin
|
|||||||
actix-rt = "2.7"
|
actix-rt = "2.7"
|
||||||
actix-test = "0.1"
|
actix-test = "0.1"
|
||||||
actix-web = "4"
|
actix-web = "4"
|
||||||
|
env_logger = "0.10"
|
||||||
tempfile = "3.2"
|
tempfile = "3.2"
|
||||||
|
33
actix-files/examples/guarded-listing.rs
Normal file
33
actix-files/examples/guarded-listing.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use actix_files::Files;
|
||||||
|
use actix_web::{get, guard, middleware, App, HttpServer, Responder};
|
||||||
|
|
||||||
|
const EXAMPLES_DIR: &str = concat![env!("CARGO_MANIFEST_DIR"), "/examples"];
|
||||||
|
|
||||||
|
#[get("/")]
|
||||||
|
async fn index() -> impl Responder {
|
||||||
|
"Hello world!"
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() -> std::io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
log::info!("starting HTTP server at http://localhost:8080");
|
||||||
|
|
||||||
|
HttpServer::new(|| {
|
||||||
|
App::new()
|
||||||
|
.service(index)
|
||||||
|
.service(
|
||||||
|
Files::new("/assets", EXAMPLES_DIR)
|
||||||
|
.show_files_listing()
|
||||||
|
.guard(guard::Header("show-listing", "?1")),
|
||||||
|
)
|
||||||
|
.service(Files::new("/assets", EXAMPLES_DIR))
|
||||||
|
.wrap(middleware::Compress::default())
|
||||||
|
.wrap(middleware::Logger::default())
|
||||||
|
})
|
||||||
|
.bind(("127.0.0.1", 8080))?
|
||||||
|
.workers(2)
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
@ -235,7 +235,7 @@ impl Files {
|
|||||||
/// request starts being handled by the file service, it will not be able to back-out and try
|
/// request starts being handled by the file service, it will not be able to back-out and try
|
||||||
/// the next service, you will simply get a 404 (or 405) error response.
|
/// the next service, you will simply get a 404 (or 405) error response.
|
||||||
///
|
///
|
||||||
/// To allow `POST` requests to retrieve files, see [`Files::use_guards`].
|
/// To allow `POST` requests to retrieve files, see [`Files::method_guard()`].
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
|
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -5,7 +5,7 @@ authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
|||||||
description = "Various helpers for Actix applications to use during testing"
|
description = "Various helpers for Actix applications to use during testing"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible)]
|
#![warn(future_incompatible)]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -2,6 +2,17 @@
|
|||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 3.5.0
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Implement `From<HeaderMap>` for `http::HeaderMap`.
|
||||||
|
- Updated `zstd` dependency to `0.13`.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Prevent compression of zero-sized response bodies.
|
||||||
|
|
||||||
## 3.4.0
|
## 3.4.0
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "3.4.0"
|
version = "3.5.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
]
|
]
|
||||||
description = "HTTP primitives for the Actix ecosystem"
|
description = "HTTP types and services for the Actix ecosystem"
|
||||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
@ -103,7 +103,7 @@ actix-tls = { version = "3.1", default-features = false, optional = true }
|
|||||||
# compress-*
|
# compress-*
|
||||||
brotli = { version = "3.3.3", optional = true }
|
brotli = { version = "3.3.3", optional = true }
|
||||||
flate2 = { version = "1.0.13", optional = true }
|
flate2 = { version = "1.0.13", optional = true }
|
||||||
zstd = { version = "0.12", optional = true }
|
zstd = { version = "0.13", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-http-test = { version = "3", features = ["openssl"] }
|
actix-http-test = { version = "3", features = ["openssl"] }
|
||||||
|
@ -1,22 +1,26 @@
|
|||||||
# actix-http
|
# `actix-http`
|
||||||
|
|
||||||
> HTTP primitives for the Actix ecosystem.
|
> HTTP types and services for the Actix ecosystem.
|
||||||
|
|
||||||
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-http)
|
[](https://crates.io/crates/actix-http)
|
||||||
[](https://docs.rs/actix-http/3.4.0)
|
[](https://docs.rs/actix-http/3.5.0)
|
||||||

|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-http/3.4.0)
|
[](https://deps.rs/crate/actix-http/3.5.0)
|
||||||
[](https://crates.io/crates/actix-http)
|
[](https://crates.io/crates/actix-http)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
## Documentation & Resources
|
## Documentation & Resources
|
||||||
|
|
||||||
- [API Documentation](https://docs.rs/actix-http)
|
- [API Documentation](https://docs.rs/actix-http)
|
||||||
- Minimum Supported Rust Version (MSRV): 1.68
|
- Minimum Supported Rust Version (MSRV): 1.68
|
||||||
|
|
||||||
## Example
|
## Examples
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
use std::{env, io};
|
use std::{env, io};
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use std::convert::Infallible;
|
use std::convert::Infallible;
|
||||||
|
|
||||||
use actix_http::{encoding::Encoder, ContentEncoding, Request, Response, StatusCode};
|
use actix_http::{encoding::Encoder, ContentEncoding, Request, Response, StatusCode};
|
||||||
|
@ -191,7 +191,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -205,7 +205,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -218,7 +218,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
@ -231,7 +231,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -250,7 +250,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -265,7 +265,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -280,7 +280,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
@ -295,7 +295,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ impl<B: MessageBody> Encoder<B> {
|
|||||||
|
|
||||||
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
|
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
|
||||||
// no need to compress an empty body
|
// no need to compress an empty body
|
||||||
if matches!(body.size(), BodySize::None) {
|
if matches!(body.size(), BodySize::None | BodySize::Sized(0)) {
|
||||||
return Self::none();
|
return Self::none();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -532,7 +532,7 @@ impl Decoder for PayloadDecoder {
|
|||||||
*state = match state.step(src, size, &mut buf) {
|
*state = match state.step(src, size, &mut buf) {
|
||||||
Poll::Pending => return Ok(None),
|
Poll::Pending => return Ok(None),
|
||||||
Poll::Ready(Ok(state)) => state,
|
Poll::Ready(Ok(state)) => state,
|
||||||
Poll::Ready(Err(e)) => return Err(e),
|
Poll::Ready(Err(err)) => return Err(err),
|
||||||
};
|
};
|
||||||
|
|
||||||
if *state == ChunkedState::End {
|
if *state == ChunkedState::End {
|
||||||
|
@ -636,10 +636,17 @@ impl<'a> IntoIterator for &'a HeaderMap {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert `http::HeaderMap` to our `HeaderMap`.
|
/// Convert a `http::HeaderMap` to our `HeaderMap`.
|
||||||
impl From<http::HeaderMap> for HeaderMap {
|
impl From<http::HeaderMap> for HeaderMap {
|
||||||
fn from(mut map: http::HeaderMap) -> HeaderMap {
|
fn from(mut map: http::HeaderMap) -> Self {
|
||||||
HeaderMap::from_drain(map.drain())
|
Self::from_drain(map.drain())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert our `HeaderMap` to a `http::HeaderMap`.
|
||||||
|
impl From<HeaderMap> for http::HeaderMap {
|
||||||
|
fn from(map: HeaderMap) -> Self {
|
||||||
|
Self::from_iter(map)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
//! HTTP primitives for the Actix ecosystem.
|
//! HTTP types and services for the Actix ecosystem.
|
||||||
//!
|
//!
|
||||||
//! ## Crate Features
|
//! ## Crate Features
|
||||||
|
//!
|
||||||
//! | Feature | Functionality |
|
//! | Feature | Functionality |
|
||||||
//! | ------------------- | ------------------------------------------- |
|
//! | ------------------- | ------------------------------------------- |
|
||||||
//! | `http2` | HTTP/2 support via [h2]. |
|
//! | `http2` | HTTP/2 support via [h2]. |
|
||||||
@ -21,8 +22,7 @@
|
|||||||
#![allow(
|
#![allow(
|
||||||
clippy::type_complexity,
|
clippy::type_complexity,
|
||||||
clippy::too_many_arguments,
|
clippy::too_many_arguments,
|
||||||
clippy::borrow_interior_mutable_const,
|
clippy::borrow_interior_mutable_const
|
||||||
clippy::uninlined_format_args
|
|
||||||
)]
|
)]
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
|
@ -93,7 +93,7 @@ impl ResponseBuilder {
|
|||||||
Ok((key, value)) => {
|
Ok((key, value)) => {
|
||||||
parts.headers.insert(key, value);
|
parts.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,7 +119,7 @@ impl ResponseBuilder {
|
|||||||
if let Some(parts) = self.inner() {
|
if let Some(parts) = self.inner() {
|
||||||
match header.try_into_pair() {
|
match header.try_into_pair() {
|
||||||
Ok((key, value)) => parts.headers.append(key, value),
|
Ok((key, value)) => parts.headers.append(key, value),
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,7 +193,7 @@ impl ResponseBuilder {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
parts.headers.insert(header::CONTENT_TYPE, value);
|
parts.headers.insert(header::CONTENT_TYPE, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
|
@ -296,7 +296,7 @@ impl Decoder for Codec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(None) => Ok(None),
|
Ok(None) => Ok(None),
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
#![cfg(feature = "openssl")]
|
#![cfg(feature = "openssl")]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
extern crate tls_openssl as openssl;
|
extern crate tls_openssl as openssl;
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ async fn chunked_payload() {
|
|||||||
.take_payload()
|
.take_payload()
|
||||||
.map(|res| match res {
|
.map(|res| match res {
|
||||||
Ok(pl) => pl,
|
Ok(pl) => pl,
|
||||||
Err(e) => panic!("Error reading payload: {}", e),
|
Err(err) => panic!("Error reading payload: {err}"),
|
||||||
})
|
})
|
||||||
.fold(0usize, |acc, chunk| ready(acc + chunk.len()))
|
.fold(0usize, |acc, chunk| ready(acc + chunk.len()))
|
||||||
.map(|req_size| {
|
.map(|req_size| {
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cell::Cell,
|
cell::Cell,
|
||||||
convert::Infallible,
|
convert::Infallible,
|
||||||
|
@ -5,7 +5,7 @@ authors = ["Jacob Halsey <jacob@jhalsey.com>"]
|
|||||||
description = "Multipart form derive macro for Actix Web"
|
description = "Multipart form derive macro for Actix Web"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
> The derive macro implementation for actix-multipart-derive.
|
> The derive macro implementation for actix-multipart-derive.
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-multipart-derive)
|
[](https://crates.io/crates/actix-multipart-derive)
|
||||||
[](https://docs.rs/actix-multipart-derive/0.5.0)
|
[](https://docs.rs/actix-multipart-derive/0.6.1)
|
||||||

|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-multipart-derive/0.5.0)
|
[](https://deps.rs/crate/actix-multipart-derive/0.6.1)
|
||||||
[](https://crates.io/crates/actix-multipart-derive)
|
[](https://crates.io/crates/actix-multipart-derive)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ authors = [
|
|||||||
description = "Multipart form support for Actix Web"
|
description = "Multipart form support for Actix Web"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible)]
|
#![warn(future_incompatible)]
|
||||||
#![allow(clippy::borrow_interior_mutable_const, clippy::uninlined_format_args)]
|
#![allow(clippy::borrow_interior_mutable_const)]
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -8,7 +8,7 @@ authors = [
|
|||||||
]
|
]
|
||||||
description = "Resource path matching and router"
|
description = "Resource path matching and router"
|
||||||
keywords = ["actix", "router", "routing"]
|
keywords = ["actix", "router", "routing"]
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
16
actix-router/README.md
Normal file
16
actix-router/README.md
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# `actix-router`
|
||||||
|
|
||||||
|
[](https://crates.io/crates/actix-router)
|
||||||
|
[](https://docs.rs/actix-router/0.5.1)
|
||||||
|

|
||||||
|

|
||||||
|
<br />
|
||||||
|
[](https://deps.rs/crate/actix-router/0.5.1)
|
||||||
|
[](https://crates.io/crates/actix-router)
|
||||||
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
|
<!-- cargo-rdme start -->
|
||||||
|
|
||||||
|
Resource path matching and router.
|
||||||
|
|
||||||
|
<!-- cargo-rdme end -->
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use std::{borrow::Cow, fmt::Write as _};
|
use std::{borrow::Cow, fmt::Write as _};
|
||||||
|
|
||||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible)]
|
#![warn(future_incompatible)]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -3,7 +3,7 @@ use std::{
|
|||||||
ops::{DerefMut, Index},
|
ops::{DerefMut, Index},
|
||||||
};
|
};
|
||||||
|
|
||||||
use serde::de;
|
use serde::{de, Deserialize};
|
||||||
|
|
||||||
use crate::{de::PathDeserializer, Resource, ResourcePath};
|
use crate::{de::PathDeserializer, Resource, ResourcePath};
|
||||||
|
|
||||||
@ -24,8 +24,13 @@ impl Default for PathItem {
|
|||||||
/// If resource path contains variable patterns, `Path` stores them.
|
/// If resource path contains variable patterns, `Path` stores them.
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct Path<T> {
|
pub struct Path<T> {
|
||||||
|
/// Full path representation.
|
||||||
path: T,
|
path: T,
|
||||||
|
|
||||||
|
/// Number of characters in `path` that have been processed into `segments`.
|
||||||
pub(crate) skip: u16,
|
pub(crate) skip: u16,
|
||||||
|
|
||||||
|
/// List of processed dynamic segments; name->value pairs.
|
||||||
pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>,
|
pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,8 +88,8 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
/// Set new path.
|
/// Set new path.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set(&mut self, path: T) {
|
pub fn set(&mut self, path: T) {
|
||||||
self.skip = 0;
|
|
||||||
self.path = path;
|
self.path = path;
|
||||||
|
self.skip = 0;
|
||||||
self.segments.clear();
|
self.segments.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,7 +108,7 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
|
|
||||||
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
|
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
|
||||||
match value {
|
match value {
|
||||||
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
|
PathItem::Static(seg) => self.segments.push((name.into(), PathItem::Static(seg))),
|
||||||
PathItem::Segment(begin, end) => self.segments.push((
|
PathItem::Segment(begin, end) => self.segments.push((
|
||||||
name.into(),
|
name.into(),
|
||||||
PathItem::Segment(self.skip + begin, self.skip + end),
|
PathItem::Segment(self.skip + begin, self.skip + end),
|
||||||
@ -168,9 +173,13 @@ impl<T: ResourcePath> Path<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try to deserialize matching parameters to a specified type `U`
|
/// Deserializes matching parameters to a specified type `U`.
|
||||||
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
|
///
|
||||||
de::Deserialize::deserialize(PathDeserializer::new(self))
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// Returns error when dynamic path segments cannot be deserialized into a `U` type.
|
||||||
|
pub fn load<'de, U: Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
|
||||||
|
Deserialize::deserialize(PathDeserializer::new(self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,8 +193,8 @@ const REGEX_FLAGS: &str = "(?s-m)";
|
|||||||
/// # Trailing Slashes
|
/// # Trailing Slashes
|
||||||
/// It should be noted that this library takes no steps to normalize intra-path or trailing slashes.
|
/// It should be noted that this library takes no steps to normalize intra-path or trailing slashes.
|
||||||
/// As such, all resource definitions implicitly expect a pre-processing step to normalize paths if
|
/// As such, all resource definitions implicitly expect a pre-processing step to normalize paths if
|
||||||
/// they you wish to accommodate "recoverable" path errors. Below are several examples of
|
/// you wish to accommodate "recoverable" path errors. Below are several examples of resource-path
|
||||||
/// resource-path pairs that would not be compatible.
|
/// pairs that would not be compatible.
|
||||||
///
|
///
|
||||||
/// ## Examples
|
/// ## Examples
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -97,6 +97,7 @@ impl<T, U> RouterBuilder<T, U> {
|
|||||||
ctx: U,
|
ctx: U,
|
||||||
) -> (&mut ResourceDef, &mut T, &mut U) {
|
) -> (&mut ResourceDef, &mut T, &mut U) {
|
||||||
self.routes.push((rdef, val, ctx));
|
self.routes.push((rdef, val, ctx));
|
||||||
|
#[allow(clippy::map_identity)] // map is used to distribute &mut-ness to tuple elements
|
||||||
self.routes
|
self.routes
|
||||||
.last_mut()
|
.last_mut()
|
||||||
.map(|(rdef, val, ctx)| (rdef, val, ctx))
|
.map(|(rdef, val, ctx)| (rdef, val, ctx))
|
||||||
@ -186,11 +187,11 @@ mod tests {
|
|||||||
assert_eq!(path.get("file").unwrap(), "file");
|
assert_eq!(path.get("file").unwrap(), "file");
|
||||||
assert_eq!(path.get("ext").unwrap(), "gz");
|
assert_eq!(path.get("ext").unwrap(), "gz");
|
||||||
|
|
||||||
let mut path = Path::new("/vtest/ttt/index.html");
|
let mut path = Path::new("/v2/ttt/index.html");
|
||||||
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
let (h, info) = router.recognize_mut(&mut path).unwrap();
|
||||||
assert_eq!(*h, 14);
|
assert_eq!(*h, 14);
|
||||||
assert_eq!(info, ResourceId(4));
|
assert_eq!(info, ResourceId(4));
|
||||||
assert_eq!(path.get("val").unwrap(), "test");
|
assert_eq!(path.get("val").unwrap(), "2");
|
||||||
assert_eq!(path.get("val2").unwrap(), "ttt");
|
assert_eq!(path.get("val2").unwrap(), "ttt");
|
||||||
|
|
||||||
let mut path = Path::new("/v/blah-blah/index.html");
|
let mut path = Path::new("/v/blah-blah/index.html");
|
||||||
|
@ -8,7 +8,7 @@ authors = [
|
|||||||
description = "Integration testing tools for Actix Web applications"
|
description = "Integration testing tools for Actix Web applications"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
|
@ -57,7 +57,6 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible)]
|
#![warn(future_incompatible)]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -775,10 +775,10 @@ where
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Poll::Pending => break,
|
Poll::Pending => break,
|
||||||
Poll::Ready(Some(Err(e))) => {
|
Poll::Ready(Some(Err(err))) => {
|
||||||
return Poll::Ready(Some(Err(ProtocolError::Io(io::Error::new(
|
return Poll::Ready(Some(Err(ProtocolError::Io(io::Error::new(
|
||||||
io::ErrorKind::Other,
|
io::ErrorKind::Other,
|
||||||
format!("{}", e),
|
format!("{err}"),
|
||||||
)))));
|
)))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ name = "actix-web-codegen"
|
|||||||
version = "4.2.2"
|
version = "4.2.2"
|
||||||
description = "Routing and runtime macros for Actix Web"
|
description = "Routing and runtime macros for Actix Web"
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
|
@ -2,6 +2,17 @@
|
|||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 4.4.1
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Updated `zstd` dependency to `0.13`.
|
||||||
|
- Compression middleware now prefers brotli over zstd over gzip.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix validation of `Json` extractor when `JsonConfig::validate_content_type()` is set to false.
|
||||||
|
|
||||||
## 4.4.0
|
## 4.4.0
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-web"
|
name = "actix-web"
|
||||||
version = "4.4.0"
|
version = "4.4.1"
|
||||||
|
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
]
|
]
|
||||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
|
||||||
keywords = ["actix", "http", "web", "framework", "async"]
|
keywords = ["actix", "http", "web", "framework", "async"]
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
@ -14,8 +14,8 @@ categories = [
|
|||||||
"web-programming::websocket"
|
"web-programming::websocket"
|
||||||
]
|
]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license.workspace = true
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
||||||
@ -75,7 +75,7 @@ actix-service = "2"
|
|||||||
actix-utils = "3"
|
actix-utils = "3"
|
||||||
actix-tls = { version = "3.1", default-features = false, optional = true }
|
actix-tls = { version = "3.1", default-features = false, optional = true }
|
||||||
|
|
||||||
actix-http = { version = "3.4", features = ["ws"] }
|
actix-http = { version = "3.5", features = ["ws"] }
|
||||||
actix-router = "0.5"
|
actix-router = "0.5"
|
||||||
actix-web-codegen = { version = "4.2", optional = true }
|
actix-web-codegen = { version = "4.2", optional = true }
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ static_assertions = "1"
|
|||||||
tls-openssl = { package = "openssl", version = "0.10.55" }
|
tls-openssl = { package = "openssl", version = "0.10.55" }
|
||||||
tls-rustls = { package = "rustls", version = "0.21" }
|
tls-rustls = { package = "rustls", version = "0.21" }
|
||||||
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
|
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
|
||||||
zstd = "0.12"
|
zstd = "0.13"
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "test_server"
|
name = "test_server"
|
||||||
|
@ -8,10 +8,10 @@
|
|||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-web)
|
[](https://crates.io/crates/actix-web)
|
||||||
[](https://docs.rs/actix-web/4.4.0)
|
[](https://docs.rs/actix-web/4.4.1)
|
||||||

|

|
||||||

|

|
||||||
[](https://deps.rs/crate/actix-web/4.4.0)
|
[](https://deps.rs/crate/actix-web/4.4.1)
|
||||||
<br />
|
<br />
|
||||||
[](https://github.com/actix/actix-web/actions/workflows/ci.yml)
|
[](https://github.com/actix/actix-web/actions/workflows/ci.yml)
|
||||||
[](https://codecov.io/gh/actix/actix-web)
|
[](https://codecov.io/gh/actix/actix-web)
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use actix_web::{web, App, HttpResponse};
|
use actix_web::{web, App, HttpResponse};
|
||||||
use awc::Client;
|
use awc::Client;
|
||||||
use criterion::{criterion_group, criterion_main, Criterion};
|
use criterion::{criterion_group, criterion_main, Criterion};
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use actix_web::{middleware, rt, web, App, HttpRequest, HttpServer};
|
use actix_web::{middleware, rt, web, App, HttpRequest, HttpServer};
|
||||||
|
|
||||||
async fn index(req: HttpRequest) -> &'static str {
|
async fn index(req: HttpRequest) -> &'static str {
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use actix_web::{get, web, HttpRequest};
|
use actix_web::{get, web, HttpRequest};
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
use actix_web::{middleware, App, Error, HttpResponse, HttpServer};
|
use actix_web::{middleware, App, Error, HttpResponse, HttpServer};
|
||||||
|
@ -141,8 +141,8 @@ where
|
|||||||
let fut = data();
|
let fut = data();
|
||||||
async move {
|
async move {
|
||||||
match fut.await {
|
match fut.await {
|
||||||
Err(e) => {
|
Err(err) => {
|
||||||
log::error!("Can not construct data instance: {:?}", e);
|
log::error!("Can not construct data instance: {err:?}");
|
||||||
Err(())
|
Err(())
|
||||||
}
|
}
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
|
@ -32,8 +32,8 @@ pub(crate) type FnDataFactory =
|
|||||||
/// Since the Actix Web router layers application data, the returned object will reference the
|
/// Since the Actix Web router layers application data, the returned object will reference the
|
||||||
/// "closest" instance of the type. For example, if an `App` stores a `u32`, a nested `Scope`
|
/// "closest" instance of the type. For example, if an `App` stores a `u32`, a nested `Scope`
|
||||||
/// also stores a `u32`, and the delegated request handler falls within that `Scope`, then
|
/// also stores a `u32`, and the delegated request handler falls within that `Scope`, then
|
||||||
/// extracting a `web::<Data<u32>>` for that handler will return the `Scope`'s instance.
|
/// extracting a `web::Data<u32>` for that handler will return the `Scope`'s instance. However,
|
||||||
/// However, using the same router set up and a request that does not get captured by the `Scope`,
|
/// using the same router set up and a request that does not get captured by the `Scope`,
|
||||||
/// `web::<Data<u32>>` would return the `App`'s instance.
|
/// `web::<Data<u32>>` would return the `App`'s instance.
|
||||||
///
|
///
|
||||||
/// If route data is not set for a handler, using `Data<T>` extractor would cause a `500 Internal
|
/// If route data is not set for a handler, using `Data<T>` extractor would cause a `500 Internal
|
||||||
|
@ -175,8 +175,8 @@ where
|
|||||||
let res = ready!(this.fut.poll(cx));
|
let res = ready!(this.fut.poll(cx));
|
||||||
match res {
|
match res {
|
||||||
Ok(t) => Poll::Ready(Ok(Some(t))),
|
Ok(t) => Poll::Ready(Ok(Some(t))),
|
||||||
Err(e) => {
|
Err(err) => {
|
||||||
log::debug!("Error for Option<T> extractor: {}", e.into());
|
log::debug!("Error for Option<T> extractor: {}", err.into());
|
||||||
Poll::Ready(Ok(None))
|
Poll::Ready(Ok(None))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -217,8 +217,8 @@ where
|
|||||||
/// /// extract `Thing` from request
|
/// /// extract `Thing` from request
|
||||||
/// async fn index(supplied_thing: Result<Thing>) -> String {
|
/// async fn index(supplied_thing: Result<Thing>) -> String {
|
||||||
/// match supplied_thing {
|
/// match supplied_thing {
|
||||||
/// Ok(thing) => format!("Got thing: {:?}", thing),
|
/// Ok(thing) => format!("Got thing: {thing:?}"),
|
||||||
/// Err(e) => format!("Error extracting thing: {}", e)
|
/// Err(err) => format!("Error extracting thing: {err}"),
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
@ -355,7 +355,7 @@ mod tuple_from_req {
|
|||||||
Poll::Ready(Ok(output)) => {
|
Poll::Ready(Ok(output)) => {
|
||||||
let _ = this.$T.as_mut().project_replace(ExtractFuture::Done { output });
|
let _ = this.$T.as_mut().project_replace(ExtractFuture::Done { output });
|
||||||
},
|
},
|
||||||
Poll::Ready(Err(e)) => return Poll::Ready(Err(e.into())),
|
Poll::Ready(Err(err)) => return Poll::Ready(Err(err.into())),
|
||||||
Poll::Pending => ready = false,
|
Poll::Pending => ready = false,
|
||||||
},
|
},
|
||||||
ExtractProj::Done { .. } => {},
|
ExtractProj::Done { .. } => {},
|
||||||
|
@ -10,10 +10,12 @@ use crate::{
|
|||||||
/// The interface for request handlers.
|
/// The interface for request handlers.
|
||||||
///
|
///
|
||||||
/// # What Is A Request Handler
|
/// # What Is A Request Handler
|
||||||
|
///
|
||||||
/// In short, a handler is just an async function that receives request-based arguments, in any
|
/// In short, a handler is just an async function that receives request-based arguments, in any
|
||||||
/// order, and returns something that can be converted to a response.
|
/// order, and returns something that can be converted to a response.
|
||||||
///
|
///
|
||||||
/// In particular, a request handler has three requirements:
|
/// In particular, a request handler has three requirements:
|
||||||
|
///
|
||||||
/// 1. It is an async function (or a function/closure that returns an appropriate future);
|
/// 1. It is an async function (or a function/closure that returns an appropriate future);
|
||||||
/// 1. The function parameters (up to 12) implement [`FromRequest`];
|
/// 1. The function parameters (up to 12) implement [`FromRequest`];
|
||||||
/// 1. The async function (or future) resolves to a type that can be converted into an
|
/// 1. The async function (or future) resolves to a type that can be converted into an
|
||||||
@ -21,11 +23,15 @@ use crate::{
|
|||||||
///
|
///
|
||||||
///
|
///
|
||||||
/// # Compiler Errors
|
/// # Compiler Errors
|
||||||
|
///
|
||||||
/// If you get the error `the trait Handler<_> is not implemented`, then your handler does not
|
/// If you get the error `the trait Handler<_> is not implemented`, then your handler does not
|
||||||
/// fulfill the _first_ of the above requirements. Missing other requirements manifest as errors on
|
/// fulfill the _first_ of the above requirements. (It could also mean that you're attempting to use
|
||||||
/// implementing [`FromRequest`] and [`Responder`], respectively.
|
/// a macro-routed handler in a manual routing context like `web::get().to(handler)`, which is not
|
||||||
|
/// supported). Breaking the other requirements manifests as errors on implementing [`FromRequest`]
|
||||||
|
/// and [`Responder`], respectively.
|
||||||
///
|
///
|
||||||
/// # How Do Handlers Receive Variable Numbers Of Arguments
|
/// # How Do Handlers Receive Variable Numbers Of Arguments
|
||||||
|
///
|
||||||
/// Rest assured there is no macro magic here; it's just traits.
|
/// Rest assured there is no macro magic here; it's just traits.
|
||||||
///
|
///
|
||||||
/// The first thing to note is that [`FromRequest`] is implemented for tuples (up to 12 in length).
|
/// The first thing to note is that [`FromRequest`] is implemented for tuples (up to 12 in length).
|
||||||
@ -40,6 +46,7 @@ use crate::{
|
|||||||
/// destructures the tuple into its component types and calls your handler function with them.
|
/// destructures the tuple into its component types and calls your handler function with them.
|
||||||
///
|
///
|
||||||
/// In pseudo-code the process looks something like this:
|
/// In pseudo-code the process looks something like this:
|
||||||
|
///
|
||||||
/// ```ignore
|
/// ```ignore
|
||||||
/// async fn my_handler(body: String, state: web::Data<MyState>) -> impl Responder {
|
/// async fn my_handler(body: String, state: web::Data<MyState>) -> impl Responder {
|
||||||
/// ...
|
/// ...
|
||||||
|
@ -149,7 +149,7 @@ impl AcceptEncoding {
|
|||||||
|
|
||||||
/// Extracts the most preferable encoding, accounting for [q-factor weighting].
|
/// Extracts the most preferable encoding, accounting for [q-factor weighting].
|
||||||
///
|
///
|
||||||
/// If no q-factors are provided, the first encoding is chosen. Note that items without
|
/// If no q-factors are provided, we prefer brotli > zstd > gzip. Note that items without
|
||||||
/// q-factors are given the maximum preference value.
|
/// q-factors are given the maximum preference value.
|
||||||
///
|
///
|
||||||
/// As per the spec, returns [`Preference::Any`] if acceptable list is empty. Though, if this is
|
/// As per the spec, returns [`Preference::Any`] if acceptable list is empty. Though, if this is
|
||||||
@ -167,6 +167,7 @@ impl AcceptEncoding {
|
|||||||
|
|
||||||
let mut max_item = None;
|
let mut max_item = None;
|
||||||
let mut max_pref = Quality::ZERO;
|
let mut max_pref = Quality::ZERO;
|
||||||
|
let mut max_rank = 0;
|
||||||
|
|
||||||
// uses manual max lookup loop since we want the first occurrence in the case of same
|
// uses manual max lookup loop since we want the first occurrence in the case of same
|
||||||
// preference but `Iterator::max_by_key` would give us the last occurrence
|
// preference but `Iterator::max_by_key` would give us the last occurrence
|
||||||
@ -174,9 +175,13 @@ impl AcceptEncoding {
|
|||||||
for pref in &self.0 {
|
for pref in &self.0 {
|
||||||
// only change if strictly greater
|
// only change if strictly greater
|
||||||
// equal items, even while unsorted, still have higher preference if they appear first
|
// equal items, even while unsorted, still have higher preference if they appear first
|
||||||
if pref.quality > max_pref {
|
|
||||||
|
let rank = encoding_rank(pref);
|
||||||
|
|
||||||
|
if (pref.quality, rank) > (max_pref, max_rank) {
|
||||||
max_pref = pref.quality;
|
max_pref = pref.quality;
|
||||||
max_item = Some(pref.item.clone());
|
max_item = Some(pref.item.clone());
|
||||||
|
max_rank = rank;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -203,6 +208,8 @@ impl AcceptEncoding {
|
|||||||
/// Returns a sorted list of encodings from highest to lowest precedence, accounting
|
/// Returns a sorted list of encodings from highest to lowest precedence, accounting
|
||||||
/// for [q-factor weighting].
|
/// for [q-factor weighting].
|
||||||
///
|
///
|
||||||
|
/// If no q-factors are provided, we prefer brotli > zstd > gzip.
|
||||||
|
///
|
||||||
/// [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
|
/// [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
|
||||||
pub fn ranked(&self) -> Vec<Preference<Encoding>> {
|
pub fn ranked(&self) -> Vec<Preference<Encoding>> {
|
||||||
self.ranked_items().map(|q| q.item).collect()
|
self.ranked_items().map(|q| q.item).collect()
|
||||||
@ -210,21 +217,44 @@ impl AcceptEncoding {
|
|||||||
|
|
||||||
fn ranked_items(&self) -> impl Iterator<Item = QualityItem<Preference<Encoding>>> {
|
fn ranked_items(&self) -> impl Iterator<Item = QualityItem<Preference<Encoding>>> {
|
||||||
if self.0.is_empty() {
|
if self.0.is_empty() {
|
||||||
return vec![].into_iter();
|
return Vec::new().into_iter();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut types = self.0.clone();
|
let mut types = self.0.clone();
|
||||||
|
|
||||||
// use stable sort so items with equal q-factor retain listed order
|
// use stable sort so items with equal q-factor retain listed order
|
||||||
types.sort_by(|a, b| {
|
types.sort_by(|a, b| {
|
||||||
// sort by q-factor descending
|
// sort by q-factor descending then server ranking descending
|
||||||
b.quality.cmp(&a.quality)
|
|
||||||
|
b.quality
|
||||||
|
.cmp(&a.quality)
|
||||||
|
.then(encoding_rank(b).cmp(&encoding_rank(a)))
|
||||||
});
|
});
|
||||||
|
|
||||||
types.into_iter()
|
types.into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns server-defined encoding ranking.
|
||||||
|
fn encoding_rank(qv: &QualityItem<Preference<Encoding>>) -> u8 {
|
||||||
|
// ensure that q=0 items are never sorted above identity encoding
|
||||||
|
// invariant: sorting methods calling this fn use first-on-equal approach
|
||||||
|
if qv.quality == Quality::ZERO {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
match qv.item {
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Brotli)) => 5,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Zstd)) => 4,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Gzip)) => 3,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Deflate)) => 2,
|
||||||
|
Preference::Any => 0,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Identity)) => 0,
|
||||||
|
Preference::Specific(Encoding::Known(_)) => 1,
|
||||||
|
Preference::Specific(Encoding::Unknown(_)) => 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if "identity" is an acceptable encoding.
|
/// Returns true if "identity" is an acceptable encoding.
|
||||||
///
|
///
|
||||||
/// Internal algorithm relies on item list being in descending order of quality.
|
/// Internal algorithm relies on item list being in descending order of quality.
|
||||||
@ -377,11 +407,11 @@ mod tests {
|
|||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
test.negotiate([Encoding::gzip(), Encoding::brotli(), Encoding::identity()].iter()),
|
test.negotiate([Encoding::gzip(), Encoding::brotli(), Encoding::identity()].iter()),
|
||||||
Some(Encoding::gzip())
|
Some(Encoding::brotli())
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
test.negotiate([Encoding::brotli(), Encoding::gzip(), Encoding::identity()].iter()),
|
test.negotiate([Encoding::brotli(), Encoding::gzip(), Encoding::identity()].iter()),
|
||||||
Some(Encoding::gzip())
|
Some(Encoding::brotli())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -398,6 +428,9 @@ mod tests {
|
|||||||
|
|
||||||
let test = accept_encoding!("br", "gzip", "*");
|
let test = accept_encoding!("br", "gzip", "*");
|
||||||
assert_eq!(test.ranked(), vec![enc("br"), enc("gzip"), enc("*")]);
|
assert_eq!(test.ranked(), vec![enc("br"), enc("gzip"), enc("*")]);
|
||||||
|
|
||||||
|
let test = accept_encoding!("gzip", "br", "*");
|
||||||
|
assert_eq!(test.ranked(), vec![enc("br"), enc("gzip"), enc("*")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -420,5 +453,8 @@ mod tests {
|
|||||||
|
|
||||||
let test = accept_encoding!("br", "gzip", "*");
|
let test = accept_encoding!("br", "gzip", "*");
|
||||||
assert_eq!(test.preference().unwrap(), enc("br"));
|
assert_eq!(test.preference().unwrap(), enc("br"));
|
||||||
|
|
||||||
|
let test = accept_encoding!("gzip", "br", "*");
|
||||||
|
assert_eq!(test.preference().unwrap(), enc("br"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -69,7 +69,6 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible)]
|
#![warn(future_incompatible)]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
@ -373,7 +373,7 @@ mod tests {
|
|||||||
.default_service(web::to(move || {
|
.default_service(web::to(move || {
|
||||||
HttpResponse::Ok()
|
HttpResponse::Ok()
|
||||||
.insert_header((header::VARY, "x-test"))
|
.insert_header((header::VARY, "x-test"))
|
||||||
.finish()
|
.body(TEXT_DATA)
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@ -429,4 +429,47 @@ mod tests {
|
|||||||
assert_successful_identity_res_with_content_type(&res, "image/jpeg");
|
assert_successful_identity_res_with_content_type(&res, "image/jpeg");
|
||||||
assert_eq!(test::read_body(res).await, TEXT_DATA.as_bytes());
|
assert_eq!(test::read_body(res).await, TEXT_DATA.as_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn prevents_compression_empty() {
|
||||||
|
let app = test::init_service({
|
||||||
|
App::new()
|
||||||
|
.wrap(Compress::default())
|
||||||
|
.default_service(web::to(move || HttpResponse::Ok().finish()))
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = test::TestRequest::default()
|
||||||
|
.insert_header((header::ACCEPT_ENCODING, "gzip"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
|
||||||
|
assert!(test::read_body(res).await.is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests_brotli {
|
||||||
|
use super::*;
|
||||||
|
use crate::{test, web, App};
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn prevents_compression_empty() {
|
||||||
|
let app = test::init_service({
|
||||||
|
App::new()
|
||||||
|
.wrap(Compress::default())
|
||||||
|
.default_service(web::to(move || HttpResponse::Ok().finish()))
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = test::TestRequest::default()
|
||||||
|
.insert_header((header::ACCEPT_ENCODING, "br"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
|
||||||
|
assert!(test::read_body(res).await.is_empty());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -356,7 +356,7 @@ where
|
|||||||
|
|
||||||
let res = match ready!(this.fut.poll(cx)) {
|
let res = match ready!(this.fut.poll(cx)) {
|
||||||
Ok(res) => res,
|
Ok(res) => res,
|
||||||
Err(e) => return Poll::Ready(Err(e)),
|
Err(err) => return Poll::Ready(Err(err)),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(error) = res.response().error() {
|
if let Some(error) = res.response().error() {
|
||||||
|
@ -72,7 +72,7 @@
|
|||||||
//! processes the request as well and passes it to `MiddlewareA`, which then passes it to the
|
//! processes the request as well and passes it to `MiddlewareA`, which then passes it to the
|
||||||
//! [`Service`]. In the [`Service`], the extractors will run first. They don't pass the request on,
|
//! [`Service`]. In the [`Service`], the extractors will run first. They don't pass the request on,
|
||||||
//! but only view it (see [`FromRequest`]). After the [`Service`] responds to the request, the
|
//! but only view it (see [`FromRequest`]). After the [`Service`] responds to the request, the
|
||||||
//! response it passed back through `MiddlewareA`, `MiddlewareB`, and `MiddlewareC`.
|
//! response is passed back through `MiddlewareA`, `MiddlewareB`, and `MiddlewareC`.
|
||||||
//!
|
//!
|
||||||
//! As you register middleware using [`wrap`][crate::App::wrap] and [`wrap_fn`][crate::App::wrap_fn]
|
//! As you register middleware using [`wrap`][crate::App::wrap] and [`wrap_fn`][crate::App::wrap_fn]
|
||||||
//! in the [`App`] builder, imagine wrapping layers around an inner [`App`]. The first middleware
|
//! in the [`App`] builder, imagine wrapping layers around an inner [`App`]. The first middleware
|
||||||
|
@ -171,7 +171,7 @@ impl Responder for Redirect {
|
|||||||
} else {
|
} else {
|
||||||
log::error!(
|
log::error!(
|
||||||
"redirect target location can not be converted to header value: {:?}",
|
"redirect target location can not be converted to header value: {:?}",
|
||||||
self.to
|
self.to,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@ impl HttpResponseBuilder {
|
|||||||
Ok((key, value)) => {
|
Ok((key, value)) => {
|
||||||
parts.headers.insert(key, value);
|
parts.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.error = Some(e.into()),
|
Err(err) => self.error = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ impl HttpResponseBuilder {
|
|||||||
if let Some(parts) = self.inner() {
|
if let Some(parts) = self.inner() {
|
||||||
match header.try_into_pair() {
|
match header.try_into_pair() {
|
||||||
Ok((key, value)) => parts.headers.append(key, value),
|
Ok((key, value)) => parts.headers.append(key, value),
|
||||||
Err(e) => self.error = Some(e.into()),
|
Err(err) => self.error = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -210,7 +210,7 @@ impl HttpResponseBuilder {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
parts.headers.insert(header::CONTENT_TYPE, value);
|
parts.headers.insert(header::CONTENT_TYPE, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.error = Some(e.into()),
|
Err(err) => self.error = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
|
@ -5,8 +5,6 @@ mod responder;
|
|||||||
#[allow(clippy::module_inception)]
|
#[allow(clippy::module_inception)]
|
||||||
mod response;
|
mod response;
|
||||||
|
|
||||||
#[cfg(feature = "cookies")]
|
|
||||||
pub use self::response::CookieIter;
|
|
||||||
pub use self::{
|
pub use self::{
|
||||||
builder::HttpResponseBuilder, customize_responder::CustomizeResponder, responder::Responder,
|
builder::HttpResponseBuilder, customize_responder::CustomizeResponder, responder::Responder,
|
||||||
response::HttpResponse,
|
response::HttpResponse,
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
//! architecture in [`actix-rt`]'s docs.
|
//! architecture in [`actix-rt`]'s docs.
|
||||||
//!
|
//!
|
||||||
//! # Running Actix Web Without Macros
|
//! # Running Actix Web Without Macros
|
||||||
|
//!
|
||||||
//! ```no_run
|
//! ```no_run
|
||||||
//! use actix_web::{middleware, rt, web, App, HttpRequest, HttpServer};
|
//! use actix_web::{middleware, rt, web, App, HttpRequest, HttpServer};
|
||||||
//!
|
//!
|
||||||
@ -25,6 +26,7 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! # Running Actix Web Using `#[tokio::main]`
|
//! # Running Actix Web Using `#[tokio::main]`
|
||||||
|
//!
|
||||||
//! If you need to run something that uses Tokio's work stealing functionality alongside Actix Web,
|
//! If you need to run something that uses Tokio's work stealing functionality alongside Actix Web,
|
||||||
//! you can run Actix Web under `#[tokio::main]`. The [`Server`](crate::dev::Server) object returned
|
//! you can run Actix Web under `#[tokio::main]`. The [`Server`](crate::dev::Server) object returned
|
||||||
//! from [`HttpServer::run`](crate::HttpServer::run) can also be [`spawn`]ed, if preferred.
|
//! from [`HttpServer::run`](crate::HttpServer::run) can also be [`spawn`]ed, if preferred.
|
||||||
@ -32,6 +34,10 @@
|
|||||||
//! Note that `actix` actor support (and therefore WebSocket support through `actix-web-actors`)
|
//! Note that `actix` actor support (and therefore WebSocket support through `actix-web-actors`)
|
||||||
//! still require `#[actix_web::main]` since they require a [`System`] to be set up.
|
//! still require `#[actix_web::main]` since they require a [`System`] to be set up.
|
||||||
//!
|
//!
|
||||||
|
//! Also note that calls to this module's [`spawn()`] re-export require an `#[actix_web::main]`
|
||||||
|
//! runtime (or a manually configured `LocalSet`) since it makes calls into to the current thread's
|
||||||
|
//! `LocalSet`, which `#[tokio::main]` does not set up.
|
||||||
|
//!
|
||||||
//! ```no_run
|
//! ```no_run
|
||||||
//! use actix_web::{get, middleware, rt, web, App, HttpRequest, HttpServer};
|
//! use actix_web::{get, middleware, rt, web, App, HttpRequest, HttpServer};
|
||||||
//!
|
//!
|
||||||
|
@ -99,6 +99,12 @@ where
|
|||||||
B: MessageBody + 'static,
|
B: MessageBody + 'static,
|
||||||
{
|
{
|
||||||
/// Create new HTTP server with application factory
|
/// Create new HTTP server with application factory
|
||||||
|
///
|
||||||
|
/// # Worker Count
|
||||||
|
///
|
||||||
|
/// The `factory` will be instantiated multiple times in most configurations. See
|
||||||
|
/// [`bind()`](Self::bind()) docs for more on how worker count and bind address resolution
|
||||||
|
/// causes multiple server factory instantiations.
|
||||||
pub fn new(factory: F) -> Self {
|
pub fn new(factory: F) -> Self {
|
||||||
HttpServer {
|
HttpServer {
|
||||||
factory,
|
factory,
|
||||||
@ -119,7 +125,18 @@ where
|
|||||||
|
|
||||||
/// Sets number of workers to start (per bind address).
|
/// Sets number of workers to start (per bind address).
|
||||||
///
|
///
|
||||||
/// By default, the number of available physical CPUs is used as the worker count.
|
/// The default worker count is the determined by [`std::thread::available_parallelism()`]. See
|
||||||
|
/// its documentation to determine what behavior you should expect when server is run.
|
||||||
|
///
|
||||||
|
/// Note that the server factory passed to [`new`](Self::new()) will be instantiated **at least
|
||||||
|
/// once per worker**. See [`bind()`](Self::bind()) docs for more on how worker count and bind
|
||||||
|
/// address resolution causes multiple server factory instantiations.
|
||||||
|
///
|
||||||
|
/// `num` must be greater than 0.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Panics if `num` is 0.
|
||||||
pub fn workers(mut self, num: usize) -> Self {
|
pub fn workers(mut self, num: usize) -> Self {
|
||||||
self.builder = self.builder.workers(num);
|
self.builder = self.builder.workers(num);
|
||||||
self
|
self
|
||||||
@ -319,23 +336,41 @@ where
|
|||||||
/// Resolves socket address(es) and binds server to created listener(s).
|
/// Resolves socket address(es) and binds server to created listener(s).
|
||||||
///
|
///
|
||||||
/// # Hostname Resolution
|
/// # Hostname Resolution
|
||||||
/// When `addr` includes a hostname, it is possible for this method to bind to both the IPv4 and
|
///
|
||||||
/// IPv6 addresses that result from a DNS lookup. You can test this by passing `localhost:8080`
|
/// When `addrs` includes a hostname, it is possible for this method to bind to both the IPv4
|
||||||
/// and noting that the server binds to `127.0.0.1:8080` _and_ `[::1]:8080`. To bind additional
|
/// and IPv6 addresses that result from a DNS lookup. You can test this by passing
|
||||||
/// addresses, call this method multiple times.
|
/// `localhost:8080` and noting that the server binds to `127.0.0.1:8080` _and_ `[::1]:8080`. To
|
||||||
|
/// bind additional addresses, call this method multiple times.
|
||||||
///
|
///
|
||||||
/// Note that, if a DNS lookup is required, resolving hostnames is a blocking operation.
|
/// Note that, if a DNS lookup is required, resolving hostnames is a blocking operation.
|
||||||
///
|
///
|
||||||
|
/// # Worker Count
|
||||||
|
///
|
||||||
|
/// The `factory` will be instantiated multiple times in most scenarios. The number of
|
||||||
|
/// instantiations is number of [`workers`](Self::workers()) × number of sockets resolved by
|
||||||
|
/// `addrs`.
|
||||||
|
///
|
||||||
|
/// For example, if you've manually set [`workers`](Self::workers()) to 2, and use `127.0.0.1`
|
||||||
|
/// as the bind `addrs`, then `factory` will be instantiated twice. However, using `localhost`
|
||||||
|
/// as the bind `addrs` can often resolve to both `127.0.0.1` (IPv4) _and_ `::1` (IPv6), causing
|
||||||
|
/// the `factory` to be instantiated 4 times (2 workers × 2 bind addresses).
|
||||||
|
///
|
||||||
|
/// Using a bind address of `0.0.0.0`, which signals to use all interfaces, may also multiple
|
||||||
|
/// the number of instantiations in a similar way.
|
||||||
|
///
|
||||||
/// # Typical Usage
|
/// # Typical Usage
|
||||||
|
///
|
||||||
/// In general, use `127.0.0.1:<port>` when testing locally and `0.0.0.0:<port>` when deploying
|
/// In general, use `127.0.0.1:<port>` when testing locally and `0.0.0.0:<port>` when deploying
|
||||||
/// (with or without a reverse proxy or load balancer) so that the server is accessible.
|
/// (with or without a reverse proxy or load balancer) so that the server is accessible.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
|
///
|
||||||
/// Returns an `io::Error` if:
|
/// Returns an `io::Error` if:
|
||||||
/// - `addrs` cannot be resolved into one or more socket addresses;
|
/// - `addrs` cannot be resolved into one or more socket addresses;
|
||||||
/// - all the resolved socket addresses are already bound.
|
/// - all the resolved socket addresses are already bound.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use actix_web::{App, HttpServer};
|
/// # use actix_web::{App, HttpServer};
|
||||||
/// # fn inner() -> std::io::Result<()> {
|
/// # fn inner() -> std::io::Result<()> {
|
||||||
@ -356,6 +391,8 @@ where
|
|||||||
|
|
||||||
/// Resolves socket address(es) and binds server to created listener(s) for plaintext HTTP/1.x
|
/// Resolves socket address(es) and binds server to created listener(s) for plaintext HTTP/1.x
|
||||||
/// or HTTP/2 connections.
|
/// or HTTP/2 connections.
|
||||||
|
///
|
||||||
|
/// See [`bind()`](Self::bind()) for more details on `addrs` argument.
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
pub fn bind_auto_h2c<A: net::ToSocketAddrs>(mut self, addrs: A) -> io::Result<Self> {
|
pub fn bind_auto_h2c<A: net::ToSocketAddrs>(mut self, addrs: A) -> io::Result<Self> {
|
||||||
let sockets = bind_addrs(addrs, self.backlog)?;
|
let sockets = bind_addrs(addrs, self.backlog)?;
|
||||||
@ -370,7 +407,7 @@ where
|
|||||||
/// Resolves socket address(es) and binds server to created listener(s) for TLS connections
|
/// Resolves socket address(es) and binds server to created listener(s) for TLS connections
|
||||||
/// using Rustls v0.20.
|
/// using Rustls v0.20.
|
||||||
///
|
///
|
||||||
/// See [`bind()`](Self::bind) for more details on `addrs` argument.
|
/// See [`bind()`](Self::bind()) for more details on `addrs` argument.
|
||||||
///
|
///
|
||||||
/// ALPN protocols "h2" and "http/1.1" are added to any configured ones.
|
/// ALPN protocols "h2" and "http/1.1" are added to any configured ones.
|
||||||
#[cfg(feature = "rustls-0_20")]
|
#[cfg(feature = "rustls-0_20")]
|
||||||
@ -389,7 +426,7 @@ where
|
|||||||
/// Resolves socket address(es) and binds server to created listener(s) for TLS connections
|
/// Resolves socket address(es) and binds server to created listener(s) for TLS connections
|
||||||
/// using Rustls v0.21.
|
/// using Rustls v0.21.
|
||||||
///
|
///
|
||||||
/// See [`bind()`](Self::bind) for more details on `addrs` argument.
|
/// See [`bind()`](Self::bind()) for more details on `addrs` argument.
|
||||||
///
|
///
|
||||||
/// ALPN protocols "h2" and "http/1.1" are added to any configured ones.
|
/// ALPN protocols "h2" and "http/1.1" are added to any configured ones.
|
||||||
#[cfg(feature = "rustls-0_21")]
|
#[cfg(feature = "rustls-0_21")]
|
||||||
@ -408,7 +445,7 @@ where
|
|||||||
/// Resolves socket address(es) and binds server to created listener(s) for TLS connections
|
/// Resolves socket address(es) and binds server to created listener(s) for TLS connections
|
||||||
/// using OpenSSL.
|
/// using OpenSSL.
|
||||||
///
|
///
|
||||||
/// See [`bind()`](Self::bind) for more details on `addrs` argument.
|
/// See [`bind()`](Self::bind()) for more details on `addrs` argument.
|
||||||
///
|
///
|
||||||
/// ALPN protocols "h2" and "http/1.1" are added to any configured ones.
|
/// ALPN protocols "h2" and "http/1.1" are added to any configured ones.
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
@ -853,7 +890,7 @@ fn bind_addrs(addrs: impl net::ToSocketAddrs, backlog: u32) -> io::Result<Vec<ne
|
|||||||
success = true;
|
success = true;
|
||||||
sockets.push(lst);
|
sockets.push(lst);
|
||||||
}
|
}
|
||||||
Err(e) => err = Some(e),
|
Err(error) => err = Some(error),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
use std::{fmt, ops};
|
use std::{fmt, ops};
|
||||||
|
|
||||||
use actix_utils::future::{err, ok, Ready};
|
use actix_utils::future::{ready, Ready};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
dev::Payload, error::ParseError, extract::FromRequest, http::header::Header as ParseHeader,
|
dev::Payload, error::ParseError, extract::FromRequest, http::header::Header as ParseHeader,
|
||||||
@ -66,8 +66,8 @@ where
|
|||||||
#[inline]
|
#[inline]
|
||||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||||
match ParseHeader::parse(req) {
|
match ParseHeader::parse(req) {
|
||||||
Ok(header) => ok(Header(header)),
|
Ok(header) => ready(Ok(Header(header))),
|
||||||
Err(e) => err(e),
|
Err(err) => ready(Err(err)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -328,14 +328,19 @@ impl<T: DeserializeOwned> JsonBody<T> {
|
|||||||
ctype_required: bool,
|
ctype_required: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
// check content-type
|
// check content-type
|
||||||
let can_parse_json = if let Ok(Some(mime)) = req.mime_type() {
|
let can_parse_json = match (ctype_required, req.mime_type()) {
|
||||||
mime.subtype() == mime::JSON
|
(true, Ok(Some(mime))) => {
|
||||||
|| mime.suffix() == Some(mime::JSON)
|
mime.subtype() == mime::JSON
|
||||||
|| ctype_fn.map_or(false, |predicate| predicate(mime))
|
|| mime.suffix() == Some(mime::JSON)
|
||||||
} else {
|
|| ctype_fn.map_or(false, |predicate| predicate(mime))
|
||||||
// if `ctype_required` is false, assume payload is
|
}
|
||||||
// json even when content-type header is missing
|
|
||||||
!ctype_required
|
// if content-type is expected but not parsable as mime type, bail
|
||||||
|
(true, _) => false,
|
||||||
|
|
||||||
|
// if content-type validation is disabled, assume payload is JSON
|
||||||
|
// even when content-type header is missing or invalid mime type
|
||||||
|
(false, _) => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
if !can_parse_json {
|
if !can_parse_json {
|
||||||
@ -725,6 +730,25 @@ mod tests {
|
|||||||
assert!(s.is_ok())
|
assert!(s.is_ok())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_json_ignoring_content_type() {
|
||||||
|
let (req, mut pl) = TestRequest::default()
|
||||||
|
.insert_header((
|
||||||
|
header::CONTENT_LENGTH,
|
||||||
|
header::HeaderValue::from_static("16"),
|
||||||
|
))
|
||||||
|
.insert_header((
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static("invalid/value"),
|
||||||
|
))
|
||||||
|
.set_payload(Bytes::from_static(b"{\"name\": \"test\"}"))
|
||||||
|
.app_data(JsonConfig::default().content_type_required(false))
|
||||||
|
.to_http_parts();
|
||||||
|
|
||||||
|
let s = Json::<MyObject>::from_request(&req, &mut pl).await;
|
||||||
|
assert!(s.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_with_config_in_data_wrapper() {
|
async fn test_with_config_in_data_wrapper() {
|
||||||
let (req, mut pl) = TestRequest::default()
|
let (req, mut pl) = TestRequest::default()
|
||||||
|
@ -96,7 +96,7 @@ async fn negotiate_encoding_gzip() {
|
|||||||
|
|
||||||
let req = srv
|
let req = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.insert_header((header::ACCEPT_ENCODING, "gzip,br,zstd"))
|
.insert_header((header::ACCEPT_ENCODING, "gzip, br;q=0.8, zstd;q=0.5"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
@ -109,7 +109,7 @@ async fn negotiate_encoding_gzip() {
|
|||||||
let mut res = srv
|
let mut res = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
.insert_header((header::ACCEPT_ENCODING, "gzip,br,zstd"))
|
.insert_header((header::ACCEPT_ENCODING, "gzip, br;q=0.8, zstd;q=0.5"))
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -123,9 +123,11 @@ async fn negotiate_encoding_gzip() {
|
|||||||
async fn negotiate_encoding_br() {
|
async fn negotiate_encoding_br() {
|
||||||
let srv = test_server!();
|
let srv = test_server!();
|
||||||
|
|
||||||
|
// check that brotli content-encoding header is returned
|
||||||
|
|
||||||
let req = srv
|
let req = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.insert_header((header::ACCEPT_ENCODING, "br,zstd,gzip"))
|
.insert_header((header::ACCEPT_ENCODING, "br, zstd, gzip"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
@ -135,10 +137,26 @@ async fn negotiate_encoding_br() {
|
|||||||
let bytes = res.body().await.unwrap();
|
let bytes = res.body().await.unwrap();
|
||||||
assert_eq!(bytes, Bytes::from_static(LOREM));
|
assert_eq!(bytes, Bytes::from_static(LOREM));
|
||||||
|
|
||||||
|
// check that brotli is preferred even when later in (q-less) list
|
||||||
|
|
||||||
|
let req = srv
|
||||||
|
.post("/static")
|
||||||
|
.insert_header((header::ACCEPT_ENCODING, "gzip, zstd, br"))
|
||||||
|
.send();
|
||||||
|
|
||||||
|
let mut res = req.await.unwrap();
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
assert_eq!(res.headers().get(header::CONTENT_ENCODING).unwrap(), "br");
|
||||||
|
|
||||||
|
let bytes = res.body().await.unwrap();
|
||||||
|
assert_eq!(bytes, Bytes::from_static(LOREM));
|
||||||
|
|
||||||
|
// check that returned content is actually brotli encoded
|
||||||
|
|
||||||
let mut res = srv
|
let mut res = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
.insert_header((header::ACCEPT_ENCODING, "br,zstd,gzip"))
|
.insert_header((header::ACCEPT_ENCODING, "br, zstd, gzip"))
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -154,7 +172,7 @@ async fn negotiate_encoding_zstd() {
|
|||||||
|
|
||||||
let req = srv
|
let req = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.insert_header((header::ACCEPT_ENCODING, "zstd,gzip,br"))
|
.insert_header((header::ACCEPT_ENCODING, "zstd, gzip, br;q=0.8"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
@ -167,7 +185,7 @@ async fn negotiate_encoding_zstd() {
|
|||||||
let mut res = srv
|
let mut res = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
.insert_header((header::ACCEPT_ENCODING, "zstd,gzip,br"))
|
.insert_header((header::ACCEPT_ENCODING, "zstd, gzip, br;q=0.8"))
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -207,7 +225,7 @@ async fn gzip_no_decompress() {
|
|||||||
// don't decompress response body
|
// don't decompress response body
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
// signal that we want a compressed body
|
// signal that we want a compressed body
|
||||||
.insert_header((header::ACCEPT_ENCODING, "gzip,br,zstd"))
|
.insert_header((header::ACCEPT_ENCODING, "gzip, br;q=0.8, zstd;q=0.5"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
extern crate tls_openssl as openssl;
|
extern crate tls_openssl as openssl;
|
||||||
|
|
||||||
|
@ -2,6 +2,11 @@
|
|||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 3.3.0
|
||||||
|
|
||||||
|
- Update `trust-dns-resolver` dependency to `0.23`.
|
||||||
|
- Updated `zstd` dependency to `0.13`.
|
||||||
|
|
||||||
## 3.2.0
|
## 3.2.0
|
||||||
|
|
||||||
- Add `awc::Connector::rustls_021()` method for Rustls v0.21 support behind new `rustls-0_21` crate feature.
|
- Add `awc::Connector::rustls_021()` method for Rustls v0.21 support behind new `rustls-0_21` crate feature.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "awc"
|
name = "awc"
|
||||||
version = "3.2.0"
|
version = "3.3.0"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "Async HTTP and WebSocket client library"
|
description = "Async HTTP and WebSocket client library"
|
||||||
keywords = ["actix", "http", "framework", "async", "web"]
|
keywords = ["actix", "http", "framework", "async", "web"]
|
||||||
@ -11,7 +11,7 @@ categories = [
|
|||||||
"web-programming::websocket",
|
"web-programming::websocket",
|
||||||
]
|
]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
@ -61,7 +61,7 @@ dangerous-h2c = []
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
actix-codec = "0.5"
|
actix-codec = "0.5"
|
||||||
actix-service = "2"
|
actix-service = "2"
|
||||||
actix-http = { version = "3.4", features = ["http2", "ws"] }
|
actix-http = { version = "3.5", features = ["http2", "ws"] }
|
||||||
actix-rt = { version = "2.1", default-features = false }
|
actix-rt = { version = "2.1", default-features = false }
|
||||||
actix-tls = { version = "3.1", features = ["connect", "uri"] }
|
actix-tls = { version = "3.1", features = ["connect", "uri"] }
|
||||||
actix-utils = "3"
|
actix-utils = "3"
|
||||||
@ -91,10 +91,10 @@ tls-openssl = { package = "openssl", version = "0.10.55", optional = true }
|
|||||||
tls-rustls-0_20 = { package = "rustls", version = "0.20", optional = true, features = ["dangerous_configuration"] }
|
tls-rustls-0_20 = { package = "rustls", version = "0.20", optional = true, features = ["dangerous_configuration"] }
|
||||||
tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true, features = ["dangerous_configuration"] }
|
tls-rustls-0_21 = { package = "rustls", version = "0.21", optional = true, features = ["dangerous_configuration"] }
|
||||||
|
|
||||||
trust-dns-resolver = { version = "0.22", optional = true }
|
trust-dns-resolver = { version = "0.23", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-http = { version = "3.4", features = ["openssl"] }
|
actix-http = { version = "3.5", features = ["openssl"] }
|
||||||
actix-http-test = { version = "3", features = ["openssl"] }
|
actix-http-test = { version = "3", features = ["openssl"] }
|
||||||
actix-server = "2"
|
actix-server = "2"
|
||||||
actix-test = { version = "0.1", features = ["openssl", "rustls-0_21"] }
|
actix-test = { version = "0.1", features = ["openssl", "rustls-0_21"] }
|
||||||
@ -111,7 +111,7 @@ static_assertions = "1.1"
|
|||||||
rcgen = "0.11"
|
rcgen = "0.11"
|
||||||
rustls-pemfile = "1"
|
rustls-pemfile = "1"
|
||||||
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
|
tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
|
||||||
zstd = "0.12"
|
zstd = "0.13"
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "client"
|
name = "client"
|
||||||
|
@ -3,9 +3,9 @@
|
|||||||
> Async HTTP and WebSocket client library.
|
> Async HTTP and WebSocket client library.
|
||||||
|
|
||||||
[](https://crates.io/crates/awc)
|
[](https://crates.io/crates/awc)
|
||||||
[](https://docs.rs/awc/3.2.0)
|
[](https://docs.rs/awc/3.3.0)
|
||||||

|

|
||||||
[](https://deps.rs/crate/awc/3.2.0)
|
[](https://deps.rs/crate/awc/3.3.0)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
## Documentation & Resources
|
## Documentation & Resources
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use std::error::Error as StdError;
|
use std::error::Error as StdError;
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
|
@ -909,13 +909,13 @@ mod resolver {
|
|||||||
None => {
|
None => {
|
||||||
let (cfg, opts) = match read_system_conf() {
|
let (cfg, opts) = match read_system_conf() {
|
||||||
Ok((cfg, opts)) => (cfg, opts),
|
Ok((cfg, opts)) => (cfg, opts),
|
||||||
Err(e) => {
|
Err(err) => {
|
||||||
log::error!("TRust-DNS can not load system config: {}", e);
|
log::error!("Trust-DNS can not load system config: {err}");
|
||||||
(ResolverConfig::default(), ResolverOpts::default())
|
(ResolverConfig::default(), ResolverOpts::default())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let resolver = TokioAsyncResolver::tokio(cfg, opts).unwrap();
|
let resolver = TokioAsyncResolver::tokio(cfg, opts);
|
||||||
|
|
||||||
// box trust dns resolver and put it in thread local.
|
// box trust dns resolver and put it in thread local.
|
||||||
let resolver = Resolver::custom(TrustDnsResolver(resolver));
|
let resolver = Resolver::custom(TrustDnsResolver(resolver));
|
||||||
|
@ -56,7 +56,7 @@ where
|
|||||||
headers.insert(HOST, value);
|
headers.insert(HOST, value);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(e) => log::error!("Can not set HOST header {}", e),
|
Err(err) => log::error!("Can not set HOST header {err}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -106,9 +106,9 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
let res = poll_fn(|cx| io.poll_ready(cx)).await;
|
let res = poll_fn(|cx| io.poll_ready(cx)).await;
|
||||||
if let Err(e) = res {
|
if let Err(err) = res {
|
||||||
io.on_release(e.is_io());
|
io.on_release(err.is_io());
|
||||||
return Err(SendRequestError::from(e));
|
return Err(SendRequestError::from(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
let resp = match io.send_request(req, eof) {
|
let resp = match io.send_request(req, eof) {
|
||||||
@ -120,9 +120,9 @@ where
|
|||||||
}
|
}
|
||||||
fut.await.map_err(SendRequestError::from)?
|
fut.await.map_err(SendRequestError::from)?
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(err) => {
|
||||||
io.on_release(e.is_io());
|
io.on_release(err.is_io());
|
||||||
return Err(e.into());
|
return Err(err.into());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -169,8 +169,8 @@ where
|
|||||||
let len = b.len();
|
let len = b.len();
|
||||||
let bytes = b.split_to(std::cmp::min(cap, len));
|
let bytes = b.split_to(std::cmp::min(cap, len));
|
||||||
|
|
||||||
if let Err(e) = send.send_data(bytes, false) {
|
if let Err(err) = send.send_data(bytes, false) {
|
||||||
return Err(e.into());
|
return Err(err.into());
|
||||||
}
|
}
|
||||||
if !b.is_empty() {
|
if !b.is_empty() {
|
||||||
send.reserve_capacity(b.len());
|
send.reserve_capacity(b.len());
|
||||||
@ -179,7 +179,7 @@ where
|
|||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Some(Err(e)) => return Err(e.into()),
|
Some(Err(err)) => return Err(err.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -105,8 +105,7 @@
|
|||||||
#![allow(
|
#![allow(
|
||||||
clippy::type_complexity,
|
clippy::type_complexity,
|
||||||
clippy::borrow_interior_mutable_const,
|
clippy::borrow_interior_mutable_const,
|
||||||
clippy::needless_doctest_main,
|
clippy::needless_doctest_main
|
||||||
clippy::uninlined_format_args
|
|
||||||
)]
|
)]
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
|
@ -83,7 +83,7 @@ impl ClientRequest {
|
|||||||
{
|
{
|
||||||
match Uri::try_from(uri) {
|
match Uri::try_from(uri) {
|
||||||
Ok(uri) => self.head.uri = uri,
|
Ok(uri) => self.head.uri = uri,
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -152,7 +152,7 @@ impl ClientRequest {
|
|||||||
Ok((key, value)) => {
|
Ok((key, value)) => {
|
||||||
self.head.headers.insert(key, value);
|
self.head.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
self
|
self
|
||||||
@ -166,7 +166,7 @@ impl ClientRequest {
|
|||||||
self.head.headers.insert(key, value);
|
self.head.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
self
|
self
|
||||||
@ -185,7 +185,7 @@ impl ClientRequest {
|
|||||||
pub fn append_header(mut self, header: impl TryIntoHeaderPair) -> Self {
|
pub fn append_header(mut self, header: impl TryIntoHeaderPair) -> Self {
|
||||||
match header.try_into_pair() {
|
match header.try_into_pair() {
|
||||||
Ok((key, value)) => self.head.headers.append(key, value),
|
Ok((key, value)) => self.head.headers.append(key, value),
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
self
|
self
|
||||||
@ -217,7 +217,7 @@ impl ClientRequest {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
self.head.headers.insert(header::CONTENT_TYPE, value);
|
self.head.headers.insert(header::CONTENT_TYPE, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -299,7 +299,7 @@ impl ClientRequest {
|
|||||||
|
|
||||||
match Uri::from_parts(parts) {
|
match Uri::from_parts(parts) {
|
||||||
Ok(uri) => self.head.uri = uri,
|
Ok(uri) => self.head.uri = uri,
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -311,7 +311,7 @@ impl ClientRequest {
|
|||||||
pub fn freeze(self) -> Result<FrozenClientRequest, FreezeRequestError> {
|
pub fn freeze(self) -> Result<FrozenClientRequest, FreezeRequestError> {
|
||||||
let slf = match self.prep_for_sending() {
|
let slf = match self.prep_for_sending() {
|
||||||
Ok(slf) => slf,
|
Ok(slf) => slf,
|
||||||
Err(e) => return Err(e.into()),
|
Err(err) => return Err(err.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let request = FrozenClientRequest {
|
let request = FrozenClientRequest {
|
||||||
@ -332,7 +332,7 @@ impl ClientRequest {
|
|||||||
{
|
{
|
||||||
let slf = match self.prep_for_sending() {
|
let slf = match self.prep_for_sending() {
|
||||||
Ok(slf) => slf,
|
Ok(slf) => slf,
|
||||||
Err(e) => return e.into(),
|
Err(err) => return err.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
RequestSender::Owned(slf.head).send_body(
|
RequestSender::Owned(slf.head).send_body(
|
||||||
@ -348,7 +348,7 @@ impl ClientRequest {
|
|||||||
pub fn send_json<T: Serialize>(self, value: &T) -> SendClientRequest {
|
pub fn send_json<T: Serialize>(self, value: &T) -> SendClientRequest {
|
||||||
let slf = match self.prep_for_sending() {
|
let slf = match self.prep_for_sending() {
|
||||||
Ok(slf) => slf,
|
Ok(slf) => slf,
|
||||||
Err(e) => return e.into(),
|
Err(err) => return err.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
RequestSender::Owned(slf.head).send_json(
|
RequestSender::Owned(slf.head).send_json(
|
||||||
@ -366,7 +366,7 @@ impl ClientRequest {
|
|||||||
pub fn send_form<T: Serialize>(self, value: &T) -> SendClientRequest {
|
pub fn send_form<T: Serialize>(self, value: &T) -> SendClientRequest {
|
||||||
let slf = match self.prep_for_sending() {
|
let slf = match self.prep_for_sending() {
|
||||||
Ok(slf) => slf,
|
Ok(slf) => slf,
|
||||||
Err(e) => return e.into(),
|
Err(err) => return err.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
RequestSender::Owned(slf.head).send_form(
|
RequestSender::Owned(slf.head).send_form(
|
||||||
@ -386,7 +386,7 @@ impl ClientRequest {
|
|||||||
{
|
{
|
||||||
let slf = match self.prep_for_sending() {
|
let slf = match self.prep_for_sending() {
|
||||||
Ok(slf) => slf,
|
Ok(slf) => slf,
|
||||||
Err(e) => return e.into(),
|
Err(err) => return err.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
RequestSender::Owned(slf.head).send_stream(
|
RequestSender::Owned(slf.head).send_stream(
|
||||||
@ -402,7 +402,7 @@ impl ClientRequest {
|
|||||||
pub fn send(self) -> SendClientRequest {
|
pub fn send(self) -> SendClientRequest {
|
||||||
let slf = match self.prep_for_sending() {
|
let slf = match self.prep_for_sending() {
|
||||||
Ok(slf) => slf,
|
Ok(slf) => slf,
|
||||||
Err(e) => return e.into(),
|
Err(err) => return err.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
RequestSender::Owned(slf.head).send(
|
RequestSender::Owned(slf.head).send(
|
||||||
|
@ -122,8 +122,8 @@ impl Future for SendClientRequest {
|
|||||||
|
|
||||||
Poll::Ready(res)
|
Poll::Ready(res)
|
||||||
}
|
}
|
||||||
SendClientRequest::Err(ref mut e) => match e.take() {
|
SendClientRequest::Err(ref mut err) => match err.take() {
|
||||||
Some(e) => Poll::Ready(Err(e)),
|
Some(err) => Poll::Ready(Err(err)),
|
||||||
None => panic!("Attempting to call completed future"),
|
None => panic!("Attempting to call completed future"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -147,8 +147,8 @@ impl Future for SendClientRequest {
|
|||||||
.poll(cx)
|
.poll(cx)
|
||||||
.map_ok(|res| res.into_client_response()._timeout(delay.take()))
|
.map_ok(|res| res.into_client_response()._timeout(delay.take()))
|
||||||
}
|
}
|
||||||
SendClientRequest::Err(ref mut e) => match e.take() {
|
SendClientRequest::Err(ref mut err) => match err.take() {
|
||||||
Some(e) => Poll::Ready(Err(e)),
|
Some(err) => Poll::Ready(Err(err)),
|
||||||
None => panic!("Attempting to call completed future"),
|
None => panic!("Attempting to call completed future"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -219,8 +219,8 @@ impl RequestSender {
|
|||||||
Err(err) => return PrepForSendingError::Json(err).into(),
|
Err(err) => return PrepForSendingError::Json(err).into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(e) = self.set_header_if_none(header::CONTENT_TYPE, "application/json") {
|
if let Err(err) = self.set_header_if_none(header::CONTENT_TYPE, "application/json") {
|
||||||
return e.into();
|
return err.into();
|
||||||
}
|
}
|
||||||
|
|
||||||
self.send_body(addr, response_decompress, timeout, config, body)
|
self.send_body(addr, response_decompress, timeout, config, body)
|
||||||
@ -291,7 +291,7 @@ impl RequestSender {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
head.headers.insert(key, value);
|
head.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => return Err(e.into()),
|
Err(err) => return Err(err.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -304,7 +304,7 @@ impl RequestSender {
|
|||||||
let h = extra_headers.get_or_insert(HeaderMap::new());
|
let h = extra_headers.get_or_insert(HeaderMap::new());
|
||||||
h.insert(key, v)
|
h.insert(key, v)
|
||||||
}
|
}
|
||||||
Err(e) => return Err(e.into()),
|
Err(err) => return Err(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -64,7 +64,7 @@ pub struct WebsocketsRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl WebsocketsRequest {
|
impl WebsocketsRequest {
|
||||||
/// Create new WebSocket connection
|
/// Create new WebSocket connection.
|
||||||
pub(crate) fn new<U>(uri: U, config: ClientConfig) -> Self
|
pub(crate) fn new<U>(uri: U, config: ClientConfig) -> Self
|
||||||
where
|
where
|
||||||
Uri: TryFrom<U>,
|
Uri: TryFrom<U>,
|
||||||
@ -82,7 +82,7 @@ impl WebsocketsRequest {
|
|||||||
|
|
||||||
match Uri::try_from(uri) {
|
match Uri::try_from(uri) {
|
||||||
Ok(uri) => head.uri = uri,
|
Ok(uri) => head.uri = uri,
|
||||||
Err(e) => err = Some(e.into()),
|
Err(error) => err = Some(error.into()),
|
||||||
}
|
}
|
||||||
|
|
||||||
WebsocketsRequest {
|
WebsocketsRequest {
|
||||||
@ -143,7 +143,7 @@ impl WebsocketsRequest {
|
|||||||
{
|
{
|
||||||
match HeaderValue::try_from(origin) {
|
match HeaderValue::try_from(origin) {
|
||||||
Ok(value) => self.origin = Some(value),
|
Ok(value) => self.origin = Some(value),
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -177,9 +177,9 @@ impl WebsocketsRequest {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
self.head.headers.append(key, value);
|
self.head.headers.append(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
},
|
},
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -196,9 +196,9 @@ impl WebsocketsRequest {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
self.head.headers.insert(key, value);
|
self.head.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
},
|
},
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -217,11 +217,11 @@ impl WebsocketsRequest {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
self.head.headers.insert(key, value);
|
self.head.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
convert::Infallible,
|
convert::Infallible,
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
# requires github cli tool for automatic release draft creation
|
# requires github cli tool for automatic release draft creation
|
||||||
|
|
||||||
set -euo pipefail
|
set -eEuo pipefail
|
||||||
|
|
||||||
DIR=$1
|
DIR=$1
|
||||||
|
|
||||||
@ -93,9 +93,12 @@ fi
|
|||||||
|
|
||||||
# done; remove backup files
|
# done; remove backup files
|
||||||
rm -f $CARGO_MANIFEST.bak
|
rm -f $CARGO_MANIFEST.bak
|
||||||
rm -f $CHANGELOG_FILE.bak
|
|
||||||
rm -f $README_FILE.bak
|
rm -f $README_FILE.bak
|
||||||
|
|
||||||
|
if [ -n "${CHANGELOG_FILE-}" ]; then
|
||||||
|
rm -f $CHANGELOG_FILE.bak
|
||||||
|
fi
|
||||||
|
|
||||||
echo "manifest, changelog, and readme updated"
|
echo "manifest, changelog, and readme updated"
|
||||||
echo
|
echo
|
||||||
echo "check other references:"
|
echo "check other references:"
|
||||||
@ -129,6 +132,7 @@ fi
|
|||||||
|
|
||||||
if [ $MACOS ]; then
|
if [ $MACOS ]; then
|
||||||
printf "chore($PACKAGE_NAME): prepare release $NEW_VERSION" | pbcopy
|
printf "chore($PACKAGE_NAME): prepare release $NEW_VERSION" | pbcopy
|
||||||
|
echo "placed the recommended commit message on the clipboard"
|
||||||
else
|
else
|
||||||
echo
|
echo
|
||||||
echo "commit message:"
|
echo "commit message:"
|
||||||
|
Reference in New Issue
Block a user