mirror of
https://github.com/fafhrd91/actix-web
synced 2024-12-03 20:02:12 +01:00
Merge branch 'master' into pr/topenkoff/2948
This commit is contained in:
commit
dda31217db
@ -1,17 +1,10 @@
|
|||||||
[alias]
|
[alias]
|
||||||
lint = "clippy --workspace --tests --examples --bins -- -Dclippy::todo"
|
lint = "clippy --workspace --all-targets -- -Dclippy::todo"
|
||||||
lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dclippy::todo"
|
lint-all = "clippy --workspace --all-features --all-targets -- -Dclippy::todo"
|
||||||
|
|
||||||
# lib checking
|
# lib checking
|
||||||
ci-check-min = "hack --workspace check --no-default-features"
|
ci-check-min = "hack --workspace check --no-default-features"
|
||||||
ci-check-default = "hack --workspace check"
|
ci-check-default = "hack --workspace check"
|
||||||
ci-check-default-tests = "check --workspace --tests"
|
ci-check-default-tests = "check --workspace --tests"
|
||||||
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,experimental-io-uring check"
|
ci-check-all-feature-powerset="hack --workspace --feature-powerset --depth=4 --skip=__compress,experimental-io-uring check"
|
||||||
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
|
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --depth=4 --skip=__compress check"
|
||||||
|
|
||||||
# testing
|
|
||||||
ci-doctest-default = "test --workspace --doc --no-fail-fast -- --nocapture"
|
|
||||||
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
|
||||||
|
|
||||||
# compile docs as docs.rs would
|
|
||||||
# RUSTDOCFLAGS="--cfg=docsrs" cargo +nightly doc --no-deps --workspace
|
|
||||||
|
10
.github/ISSUE_TEMPLATE/bug_report.md
vendored
10
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -3,34 +3,40 @@ name: Bug Report
|
|||||||
about: Create a bug report.
|
about: Create a bug report.
|
||||||
---
|
---
|
||||||
|
|
||||||
Your issue may already be reported!
|
Your issue may already be reported! Please search on the [Actix Web issue tracker](https://github.com/actix/actix-web/issues) before creating one.
|
||||||
Please search on the [Actix Web issue tracker](https://github.com/actix/actix-web/issues) before creating one.
|
|
||||||
|
|
||||||
## Expected Behavior
|
## Expected Behavior
|
||||||
|
|
||||||
<!--- If you're describing a bug, tell us what should happen -->
|
<!--- If you're describing a bug, tell us what should happen -->
|
||||||
<!--- If you're suggesting a change/improvement, tell us how it should work -->
|
<!--- If you're suggesting a change/improvement, tell us how it should work -->
|
||||||
|
|
||||||
## Current Behavior
|
## Current Behavior
|
||||||
|
|
||||||
<!--- If describing a bug, tell us what happens instead of the expected behavior -->
|
<!--- If describing a bug, tell us what happens instead of the expected behavior -->
|
||||||
<!--- If suggesting a change/improvement, explain the difference from current behavior -->
|
<!--- If suggesting a change/improvement, explain the difference from current behavior -->
|
||||||
|
|
||||||
## Possible Solution
|
## Possible Solution
|
||||||
|
|
||||||
<!--- Not obligatory, but suggest a fix/reason for the bug, -->
|
<!--- Not obligatory, but suggest a fix/reason for the bug, -->
|
||||||
<!--- or ideas how to implement the addition or change -->
|
<!--- or ideas how to implement the addition or change -->
|
||||||
|
|
||||||
## Steps to Reproduce (for bugs)
|
## Steps to Reproduce (for bugs)
|
||||||
|
|
||||||
<!--- Provide a link to a live example, or an unambiguous set of steps to -->
|
<!--- Provide a link to a live example, or an unambiguous set of steps to -->
|
||||||
<!--- reproduce this bug. Include code to reproduce, if relevant -->
|
<!--- reproduce this bug. Include code to reproduce, if relevant -->
|
||||||
|
|
||||||
1.
|
1.
|
||||||
2.
|
2.
|
||||||
3.
|
3.
|
||||||
4.
|
4.
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
<!--- How has this issue affected you? What are you trying to accomplish? -->
|
<!--- How has this issue affected you? What are you trying to accomplish? -->
|
||||||
<!--- Providing context helps us come up with a solution that is most useful in the real world -->
|
<!--- Providing context helps us come up with a solution that is most useful in the real world -->
|
||||||
|
|
||||||
## Your Environment
|
## Your Environment
|
||||||
|
|
||||||
<!--- Include as many relevant details about the environment you experienced the bug in -->
|
<!--- Include as many relevant details about the environment you experienced the bug in -->
|
||||||
|
|
||||||
- Rust Version (I.e, output of `rustc -V`):
|
- Rust Version (I.e, output of `rustc -V`):
|
||||||
|
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -2,12 +2,14 @@
|
|||||||
<!-- Please fill out the following to get your PR reviewed quicker. -->
|
<!-- Please fill out the following to get your PR reviewed quicker. -->
|
||||||
|
|
||||||
## PR Type
|
## PR Type
|
||||||
|
|
||||||
<!-- What kind of change does this PR make? -->
|
<!-- What kind of change does this PR make? -->
|
||||||
<!-- Bug Fix / Feature / Refactor / Code Style / Other -->
|
<!-- Bug Fix / Feature / Refactor / Code Style / Other -->
|
||||||
|
|
||||||
PR_TYPE
|
PR_TYPE
|
||||||
|
|
||||||
|
|
||||||
## PR Checklist
|
## PR Checklist
|
||||||
|
|
||||||
<!-- Check your PR fulfills the following items. -->
|
<!-- Check your PR fulfills the following items. -->
|
||||||
<!-- For draft PRs check the boxes as you complete them. -->
|
<!-- For draft PRs check the boxes as you complete them. -->
|
||||||
|
|
||||||
@ -17,11 +19,10 @@ PR_TYPE
|
|||||||
- [ ] Format code with the latest stable rustfmt.
|
- [ ] Format code with the latest stable rustfmt.
|
||||||
- [ ] (Team) Label with affected crates and semver status.
|
- [ ] (Team) Label with affected crates and semver status.
|
||||||
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
<!-- Describe the current and new behavior. -->
|
<!-- Describe the current and new behavior. -->
|
||||||
<!-- Emphasize any breaking changes. -->
|
<!-- Emphasize any breaking changes. -->
|
||||||
|
|
||||||
|
|
||||||
<!-- If this PR fixes or closes an issue, reference it here. -->
|
<!-- If this PR fixes or closes an issue, reference it here. -->
|
||||||
<!-- Closes #000 -->
|
<!-- Closes #000 -->
|
||||||
|
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: cargo
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
25
.github/workflows/bench.yml
vendored
25
.github/workflows/bench.yml
vendored
@ -2,28 +2,27 @@ name: Benchmark
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches: [master]
|
||||||
- master
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_benchmark:
|
check_benchmark:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rs/toolchain@v1
|
run: |
|
||||||
with:
|
rustup set profile minimal
|
||||||
toolchain: nightly
|
rustup install nightly
|
||||||
profile: minimal
|
rustup override set nightly
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Check benchmark
|
- name: Check benchmark
|
||||||
uses: actions-rs/cargo@v1
|
run: cargo bench --bench=server -- --sample-size=15
|
||||||
with:
|
|
||||||
command: bench
|
|
||||||
args: --bench=server -- --sample-size=15
|
|
||||||
|
129
.github/workflows/ci-post-merge.yml
vendored
129
.github/workflows/ci-post-merge.yml
vendored
@ -5,132 +5,87 @@ on:
|
|||||||
branches: [master]
|
branches: [master]
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_and_test_nightly:
|
build_and_test_nightly:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
# prettier-ignore
|
||||||
target:
|
target:
|
||||||
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
||||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||||
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
|
||||||
version:
|
version:
|
||||||
- nightly
|
- { name: nightly, version: nightly }
|
||||||
|
|
||||||
name: ${{ matrix.target.name }} / ${{ matrix.version }}
|
name: ${{ matrix.target.name }} / ${{ matrix.version.name }}
|
||||||
runs-on: ${{ matrix.target.os }}
|
runs-on: ${{ matrix.target.os }}
|
||||||
|
|
||||||
env:
|
|
||||||
CI: 1
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
VCPKGRS_DYNAMIC: 1
|
|
||||||
CARGO_UNSTABLE_SPARSE_REGISTRY: true
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install nasm
|
||||||
|
if: matrix.target.os == 'windows-latest'
|
||||||
|
uses: ilammy/setup-nasm@v1.5.1
|
||||||
|
|
||||||
# install OpenSSL on Windows
|
|
||||||
# TODO: GitHub actions docs state that OpenSSL is
|
|
||||||
# already installed on these Windows machines somewhere
|
|
||||||
- name: Set vcpkg root
|
|
||||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
|
||||||
run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
|
|
||||||
- name: Install OpenSSL
|
- name: Install OpenSSL
|
||||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
if: matrix.target.os == 'windows-latest'
|
||||||
run: vcpkg install openssl:x64-windows
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
choco install openssl --version=1.1.1.2100 -y --no-progress
|
||||||
|
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
|
||||||
|
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install ${{ matrix.version }}
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ matrix.version }}-${{ matrix.target.triple }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
uses: taiki-e/install-action@cargo-hack
|
uses: taiki-e/install-action@v2.38.0
|
||||||
|
with:
|
||||||
- name: Generate Cargo.lock
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with: { command: generate-lockfile }
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: check minimal
|
- name: check minimal
|
||||||
uses: actions-rs/cargo@v1
|
run: cargo ci-check-min
|
||||||
with: { command: ci-check-min }
|
|
||||||
|
|
||||||
- name: check default
|
- name: check default
|
||||||
uses: actions-rs/cargo@v1
|
run: cargo ci-check-default
|
||||||
with: { command: ci-check-default }
|
|
||||||
|
|
||||||
- name: tests
|
- name: tests
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
run: |
|
run: just test
|
||||||
cargo test --lib --tests -p=actix-router --all-features
|
|
||||||
cargo test --lib --tests -p=actix-http --all-features
|
|
||||||
cargo test --lib --tests -p=actix-web --features=rustls,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
|
|
||||||
cargo test --lib --tests -p=actix-web-codegen --all-features
|
|
||||||
cargo test --lib --tests -p=awc --all-features
|
|
||||||
cargo test --lib --tests -p=actix-http-test --all-features
|
|
||||||
cargo test --lib --tests -p=actix-test --all-features
|
|
||||||
cargo test --lib --tests -p=actix-files
|
|
||||||
cargo test --lib --tests -p=actix-multipart --all-features
|
|
||||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
|
||||||
|
|
||||||
- name: Clear the cargo caches
|
- name: CI cache clean
|
||||||
run: |
|
run: cargo-ci-cache-clean
|
||||||
cargo install cargo-cache --version 0.8.2 --no-default-features --features ci-autoclean
|
|
||||||
cargo-cache
|
|
||||||
|
|
||||||
ci_feature_powerset_check:
|
ci_feature_powerset_check:
|
||||||
name: Verify Feature Combinations
|
name: Verify Feature Combinations
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
env:
|
|
||||||
CI: 1
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
- name: Free Disk Space
|
||||||
|
run: ./scripts/free-disk-space.sh
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install cargo-hack
|
||||||
uses: taiki-e/install-action@cargo-hack
|
uses: taiki-e/install-action@v2.38.0
|
||||||
|
with:
|
||||||
- name: Generate Cargo.lock
|
tool: cargo-hack
|
||||||
run: cargo generate-lockfile
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: check feature combinations
|
- name: check feature combinations
|
||||||
run: cargo ci-check-all-feature-powerset
|
run: cargo ci-check-all-feature-powerset
|
||||||
|
|
||||||
- name: check feature combinations
|
- name: check feature combinations
|
||||||
run: cargo ci-check-all-feature-powerset-linux
|
run: cargo ci-check-all-feature-powerset-linux
|
||||||
|
|
||||||
nextest:
|
|
||||||
name: nextest
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
env:
|
|
||||||
CI: 1
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
|
||||||
|
|
||||||
- name: Install nextest
|
|
||||||
uses: taiki-e/install-action@nextest
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
|
||||||
run: cargo generate-lockfile
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
|
||||||
|
|
||||||
- name: Test with cargo-nextest
|
|
||||||
run: cargo nextest run
|
|
||||||
|
144
.github/workflows/ci.yml
vendored
144
.github/workflows/ci.yml
vendored
@ -3,135 +3,119 @@ name: CI
|
|||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
types: [opened, synchronize, reopened]
|
types: [opened, synchronize, reopened]
|
||||||
|
merge_group:
|
||||||
|
types: [checks_requested]
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
read_msrv:
|
||||||
|
name: Read MSRV
|
||||||
|
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@v0.1.0
|
||||||
|
|
||||||
build_and_test:
|
build_and_test:
|
||||||
|
needs: read_msrv
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
# prettier-ignore
|
||||||
target:
|
target:
|
||||||
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
||||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||||
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
|
||||||
version:
|
version:
|
||||||
- 1.59.0 # MSRV
|
- { name: msrv, version: "${{ needs.read_msrv.outputs.msrv }}" }
|
||||||
- stable
|
- { name: stable, version: stable }
|
||||||
|
|
||||||
name: ${{ matrix.target.name }} / ${{ matrix.version }}
|
name: ${{ matrix.target.name }} / ${{ matrix.version.name }}
|
||||||
runs-on: ${{ matrix.target.os }}
|
runs-on: ${{ matrix.target.os }}
|
||||||
|
|
||||||
env:
|
|
||||||
CI: 1
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
VCPKGRS_DYNAMIC: 1
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install nasm
|
||||||
|
if: matrix.target.os == 'windows-latest'
|
||||||
|
uses: ilammy/setup-nasm@v1.5.1
|
||||||
|
|
||||||
# install OpenSSL on Windows
|
|
||||||
# TODO: GitHub actions docs state that OpenSSL is
|
|
||||||
# already installed on these Windows machines somewhere
|
|
||||||
- name: Set vcpkg root
|
|
||||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
|
||||||
run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
|
|
||||||
- name: Install OpenSSL
|
- name: Install OpenSSL
|
||||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
if: matrix.target.os == 'windows-latest'
|
||||||
run: vcpkg install openssl:x64-windows
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
choco install openssl --version=1.1.1.2100 -y --no-progress
|
||||||
|
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
|
||||||
|
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install ${{ matrix.version }}
|
- name: Setup mold linker
|
||||||
uses: actions-rs/toolchain@v1
|
if: matrix.target.os == 'ubuntu-latest'
|
||||||
|
uses: rui314/setup-mold@v1
|
||||||
|
|
||||||
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ matrix.version }}-${{ matrix.target.triple }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
uses: taiki-e/install-action@cargo-hack
|
uses: taiki-e/install-action@v2.38.0
|
||||||
|
with:
|
||||||
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
|
|
||||||
- name: workaround MSRV issues
|
- name: workaround MSRV issues
|
||||||
if: matrix.version != 'stable'
|
if: matrix.version.name == 'msrv'
|
||||||
run: |
|
run: just downgrade-for-msrv
|
||||||
cargo install cargo-edit --version=0.8.0
|
|
||||||
cargo add const-str@0.3 --dev -p=actix-web
|
|
||||||
cargo add const-str@0.3 --dev -p=awc
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with: { command: generate-lockfile }
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: workaround MSRV issues
|
|
||||||
if: matrix.version != 'stable'
|
|
||||||
run: |
|
|
||||||
cargo update -p=zstd-sys --precise=2.0.1+zstd.1.5.2
|
|
||||||
|
|
||||||
- name: check minimal
|
- name: check minimal
|
||||||
uses: actions-rs/cargo@v1
|
run: cargo ci-check-min
|
||||||
with: { command: ci-check-min }
|
|
||||||
|
|
||||||
- name: check default
|
- name: check default
|
||||||
uses: actions-rs/cargo@v1
|
run: cargo ci-check-default
|
||||||
with: { command: ci-check-default }
|
|
||||||
|
|
||||||
- name: tests
|
- name: tests
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
run: |
|
run: just test
|
||||||
cargo test --lib --tests -p=actix-router --all-features
|
|
||||||
cargo test --lib --tests -p=actix-http --all-features
|
|
||||||
cargo test --lib --tests -p=actix-web --features=rustls,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
|
|
||||||
cargo test --lib --tests -p=actix-web-codegen --all-features
|
|
||||||
cargo test --lib --tests -p=awc --all-features
|
|
||||||
cargo test --lib --tests -p=actix-http-test --all-features
|
|
||||||
cargo test --lib --tests -p=actix-test --all-features
|
|
||||||
cargo test --lib --tests -p=actix-files
|
|
||||||
cargo test --lib --tests -p=actix-multipart --all-features
|
|
||||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
|
||||||
|
|
||||||
- name: Clear the cargo caches
|
- name: CI cache clean
|
||||||
run: |
|
run: cargo-ci-cache-clean
|
||||||
cargo install cargo-cache --version 0.8.2 --no-default-features --features ci-autoclean
|
|
||||||
cargo-cache
|
|
||||||
|
|
||||||
io-uring:
|
io-uring:
|
||||||
name: io-uring tests
|
name: io-uring tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
- name: Generate Cargo.lock
|
with:
|
||||||
run: cargo generate-lockfile
|
toolchain: nightly
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
|
||||||
|
|
||||||
- name: tests (io-uring)
|
- name: tests (io-uring)
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
run: >
|
run: >
|
||||||
sudo bash -c "ulimit -Sl 512
|
sudo bash -c "ulimit -Sl 512 && ulimit -Hl 512 && PATH=$PATH:/usr/share/rust/.cargo/bin && RUSTUP_TOOLCHAIN=stable cargo test --lib --tests -p=actix-files --all-features"
|
||||||
&& ulimit -Hl 512
|
|
||||||
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
|
||||||
&& RUSTUP_TOOLCHAIN=stable cargo test --lib --tests -p=actix-files --all-features"
|
|
||||||
|
|
||||||
rustdoc:
|
rustdoc:
|
||||||
name: doc tests
|
name: doc tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@nightly
|
- name: Install Rust (nightly)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
- name: Install just
|
||||||
run: cargo generate-lockfile
|
uses: taiki-e/install-action@v2.38.0
|
||||||
- name: Cache Dependencies
|
with:
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
tool: just
|
||||||
|
|
||||||
- name: doc tests
|
- name: doc tests
|
||||||
run: cargo ci-doctest
|
run: just test-docs
|
||||||
timeout-minutes: 60
|
|
||||||
|
49
.github/workflows/clippy-fmt.yml
vendored
49
.github/workflows/clippy-fmt.yml
vendored
@ -1,49 +0,0 @@
|
|||||||
name: Lint
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize, reopened]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: dtolnay/rust-toolchain@nightly
|
|
||||||
with: { components: rustfmt }
|
|
||||||
- run: cargo fmt --all -- --check
|
|
||||||
|
|
||||||
clippy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
|
||||||
with: { components: clippy }
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
|
||||||
run: cargo generate-lockfile
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: Check with Clippy
|
|
||||||
uses: actions-rs/clippy-check@v1
|
|
||||||
with:
|
|
||||||
args: --workspace --tests --examples --all-features
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
lint-docs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@stable
|
|
||||||
with: { components: rust-docs }
|
|
||||||
|
|
||||||
- name: Check for broken intra-doc links
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
env:
|
|
||||||
RUSTDOCFLAGS: "-D warnings"
|
|
||||||
with:
|
|
||||||
command: doc
|
|
||||||
args: --no-deps --all-features --workspace
|
|
48
.github/workflows/coverage.yml
vendored
48
.github/workflows/coverage.yml
vendored
@ -1,36 +1,40 @@
|
|||||||
# disabled because `cargo tarpaulin` currently segfaults
|
|
||||||
|
|
||||||
name: Coverage
|
name: Coverage
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# job currently (1st Feb 2022) segfaults
|
|
||||||
coverage:
|
coverage:
|
||||||
name: coverage
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install stable
|
- name: Install Rust (nightly)
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
with:
|
with:
|
||||||
toolchain: stable-x86_64-unknown-linux-gnu
|
toolchain: nightly
|
||||||
profile: minimal
|
components: llvm-tools
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
- name: Install just, cargo-llvm-cov, cargo-nextest
|
||||||
uses: actions-rs/cargo@v1
|
uses: taiki-e/install-action@v2.38.0
|
||||||
with: { command: generate-lockfile }
|
with:
|
||||||
- name: Cache Dependencies
|
tool: just,cargo-llvm-cov,cargo-nextest
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: Generate coverage file
|
- name: Generate code coverage
|
||||||
run: |
|
run: just test-coverage-codecov
|
||||||
cargo install cargo-tarpaulin --vers "^0.13"
|
|
||||||
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
|
- name: Upload coverage to Codecov
|
||||||
- name: Upload to Codecov
|
uses: codecov/codecov-action@v4.4.1
|
||||||
uses: codecov/codecov-action@v1
|
with:
|
||||||
with: { file: cobertura.xml }
|
files: codecov.json
|
||||||
|
fail_ci_if_error: true
|
||||||
|
env:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
116
.github/workflows/lint.yml
vendored
Normal file
116
.github/workflows/lint.yml
vendored
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
name: Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
fmt:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
|
- name: Check with Rustfmt
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
|
clippy:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
checks: write # to add clippy checks to PR diffs
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
|
with:
|
||||||
|
components: clippy
|
||||||
|
|
||||||
|
- name: Check with Clippy
|
||||||
|
uses: giraffate/clippy-action@v1.0.1
|
||||||
|
with:
|
||||||
|
reporter: github-pr-check
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
clippy_flags: >-
|
||||||
|
--workspace --all-features --tests --examples --bins --
|
||||||
|
-A unknown_lints -D clippy::todo -D clippy::dbg_macro
|
||||||
|
|
||||||
|
lint-docs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: rust-docs
|
||||||
|
|
||||||
|
- name: Check for broken intra-doc links
|
||||||
|
env:
|
||||||
|
RUSTDOCFLAGS: -D warnings
|
||||||
|
run: cargo +nightly doc --no-deps --workspace --all-features
|
||||||
|
|
||||||
|
check-external-types:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly-2024-05-01)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly-2024-05-01
|
||||||
|
|
||||||
|
- name: Install just
|
||||||
|
uses: taiki-e/install-action@v2.38.0
|
||||||
|
with:
|
||||||
|
tool: just
|
||||||
|
|
||||||
|
- name: Install cargo-check-external-types
|
||||||
|
uses: taiki-e/cache-cargo-install-action@v1.2.2
|
||||||
|
with:
|
||||||
|
tool: cargo-check-external-types
|
||||||
|
|
||||||
|
- name: check external types
|
||||||
|
run: just check-external-types-all +nightly-2024-05-01
|
||||||
|
|
||||||
|
public-api-diff:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout main branch
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.base_ref }}
|
||||||
|
|
||||||
|
- name: Checkout PR branch
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly-2024-06-07)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly-2024-06-07
|
||||||
|
|
||||||
|
- name: Install cargo-public-api
|
||||||
|
uses: taiki-e/install-action@v2.38.0
|
||||||
|
with:
|
||||||
|
tool: cargo-public-api
|
||||||
|
|
||||||
|
- name: Generate API diff
|
||||||
|
run: |
|
||||||
|
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
|
||||||
|
cargo public-api --manifest-path "$f" --simplified diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
|
||||||
|
done
|
32
.github/workflows/upload-doc.yml
vendored
32
.github/workflows/upload-doc.yml
vendored
@ -1,32 +0,0 @@
|
|||||||
name: Upload Documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
permissions:
|
|
||||||
contents: write # to push changes in repo (jamesives/github-pages-deploy-action)
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@nightly
|
|
||||||
|
|
||||||
- name: Build Docs
|
|
||||||
run: cargo +nightly doc --no-deps --workspace --all-features
|
|
||||||
env:
|
|
||||||
RUSTDOCFLAGS: --cfg=docsrs
|
|
||||||
|
|
||||||
- name: Tweak HTML
|
|
||||||
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
|
|
||||||
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
uses: JamesIves/github-pages-deploy-action@v4.4.1
|
|
||||||
with:
|
|
||||||
folder: target/doc
|
|
||||||
single-commit: true
|
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -19,3 +19,7 @@ guide/build/
|
|||||||
|
|
||||||
# Configuration directory generated by VSCode
|
# Configuration directory generated by VSCode
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
|
# code coverage
|
||||||
|
/lcov.info
|
||||||
|
/codecov.json
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"proseWrap": "never"
|
|
||||||
}
|
|
5
.prettierrc.yml
Normal file
5
.prettierrc.yml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
overrides:
|
||||||
|
- files: "*.md"
|
||||||
|
options:
|
||||||
|
printWidth: 9999
|
||||||
|
proseWrap: never
|
3
.rustfmt.toml
Normal file
3
.rustfmt.toml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
group_imports = "StdExternalCrate"
|
||||||
|
imports_granularity = "Crate"
|
||||||
|
use_field_init_shorthand = true
|
@ -5,6 +5,7 @@ members = [
|
|||||||
"actix-http-test",
|
"actix-http-test",
|
||||||
"actix-http",
|
"actix-http",
|
||||||
"actix-multipart",
|
"actix-multipart",
|
||||||
|
"actix-multipart-derive",
|
||||||
"actix-router",
|
"actix-router",
|
||||||
"actix-test",
|
"actix-test",
|
||||||
"actix-web-actors",
|
"actix-web-actors",
|
||||||
@ -13,6 +14,13 @@ members = [
|
|||||||
"awc",
|
"awc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[workspace.package]
|
||||||
|
homepage = "https://actix.rs"
|
||||||
|
repository = "https://github.com/actix/actix-web"
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.72"
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||||
debug = 0
|
debug = 0
|
||||||
@ -27,6 +35,7 @@ actix-files = { path = "actix-files" }
|
|||||||
actix-http = { path = "actix-http" }
|
actix-http = { path = "actix-http" }
|
||||||
actix-http-test = { path = "actix-http-test" }
|
actix-http-test = { path = "actix-http-test" }
|
||||||
actix-multipart = { path = "actix-multipart" }
|
actix-multipart = { path = "actix-multipart" }
|
||||||
|
actix-multipart-derive = { path = "actix-multipart-derive" }
|
||||||
actix-router = { path = "actix-router" }
|
actix-router = { path = "actix-router" }
|
||||||
actix-test = { path = "actix-test" }
|
actix-test = { path = "actix-test" }
|
||||||
actix-web = { path = "actix-web" }
|
actix-web = { path = "actix-web" }
|
||||||
|
@ -1,19 +1,38 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2022-xx-xx
|
## Unreleased
|
||||||
|
|
||||||
|
## 0.6.6
|
||||||
|
|
||||||
|
- Update `tokio-uring` dependency to `0.4`.
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
|
## 0.6.5
|
||||||
|
|
||||||
|
- Fix handling of special characters in filenames.
|
||||||
|
|
||||||
|
## 0.6.4
|
||||||
|
|
||||||
|
- Fix handling of newlines in filenames.
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
## 0.6.3
|
||||||
|
|
||||||
- XHTML files now use `Content-Disposition: inline` instead of `attachment`. [#2903]
|
- XHTML files now use `Content-Disposition: inline` instead of `attachment`. [#2903]
|
||||||
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
|
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
|
||||||
|
- Update `tokio-uring` dependency to `0.4`.
|
||||||
|
|
||||||
[#2903]: https://github.com/actix/actix-web/pull/2903
|
[#2903]: https://github.com/actix/actix-web/pull/2903
|
||||||
|
|
||||||
## 0.6.2 - 2022-07-23
|
## 0.6.2
|
||||||
|
|
||||||
- Allow partial range responses for video content to start streaming sooner. [#2817]
|
- Allow partial range responses for video content to start streaming sooner. [#2817]
|
||||||
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
[#2817]: https://github.com/actix/actix-web/pull/2817
|
[#2817]: https://github.com/actix/actix-web/pull/2817
|
||||||
|
|
||||||
|
## 0.6.1
|
||||||
|
|
||||||
## 0.6.1 - 2022-06-11
|
|
||||||
- Add `NamedFile::{modified, metadata, content_type, content_disposition, encoding}()` getters. [#2021]
|
- Add `NamedFile::{modified, metadata, content_type, content_disposition, encoding}()` getters. [#2021]
|
||||||
- Update `tokio-uring` dependency to `0.3`.
|
- Update `tokio-uring` dependency to `0.3`.
|
||||||
- Audio files now use `Content-Disposition: inline` instead of `attachment`. [#2645]
|
- Audio files now use `Content-Disposition: inline` instead of `attachment`. [#2645]
|
||||||
@ -22,46 +41,46 @@
|
|||||||
[#2021]: https://github.com/actix/actix-web/pull/2021
|
[#2021]: https://github.com/actix/actix-web/pull/2021
|
||||||
[#2645]: https://github.com/actix/actix-web/pull/2645
|
[#2645]: https://github.com/actix/actix-web/pull/2645
|
||||||
|
|
||||||
|
## 0.6.0
|
||||||
|
|
||||||
## 0.6.0 - 2022-02-25
|
|
||||||
- No significant changes since `0.6.0-beta.16`.
|
- No significant changes since `0.6.0-beta.16`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.16
|
||||||
|
|
||||||
## 0.6.0-beta.16 - 2022-01-31
|
|
||||||
- No significant changes since `0.6.0-beta.15`.
|
- No significant changes since `0.6.0-beta.15`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.15
|
||||||
|
|
||||||
## 0.6.0-beta.15 - 2022-01-21
|
|
||||||
- No significant changes since `0.6.0-beta.14`.
|
- No significant changes since `0.6.0-beta.14`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.14
|
||||||
|
|
||||||
## 0.6.0-beta.14 - 2022-01-14
|
|
||||||
- The `prefer_utf8` option introduced in `0.4.0` is now true by default. [#2583]
|
- The `prefer_utf8` option introduced in `0.4.0` is now true by default. [#2583]
|
||||||
|
|
||||||
[#2583]: https://github.com/actix/actix-web/pull/2583
|
[#2583]: https://github.com/actix/actix-web/pull/2583
|
||||||
|
|
||||||
|
## 0.6.0-beta.13
|
||||||
|
|
||||||
## 0.6.0-beta.13 - 2022-01-04
|
|
||||||
- The `Files` service now rejects requests with URL paths that include `%2F` (decoded: `/`). [#2398]
|
- The `Files` service now rejects requests with URL paths that include `%2F` (decoded: `/`). [#2398]
|
||||||
- The `Files` service now correctly decodes `%25` in the URL path to `%` for the file path. [#2398]
|
- The `Files` service now correctly decodes `%25` in the URL path to `%` for the file path. [#2398]
|
||||||
- Minimum supported Rust version (MSRV) is now 1.54.
|
- Minimum supported Rust version (MSRV) is now 1.54.
|
||||||
|
|
||||||
[#2398]: https://github.com/actix/actix-web/pull/2398
|
[#2398]: https://github.com/actix/actix-web/pull/2398
|
||||||
|
|
||||||
|
## 0.6.0-beta.12
|
||||||
|
|
||||||
## 0.6.0-beta.12 - 2021-12-29
|
|
||||||
- No significant changes since `0.6.0-beta.11`.
|
- No significant changes since `0.6.0-beta.11`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.11
|
||||||
|
|
||||||
## 0.6.0-beta.11 - 2021-12-27
|
|
||||||
- No significant changes since `0.6.0-beta.10`.
|
- No significant changes since `0.6.0-beta.10`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.10
|
||||||
|
|
||||||
## 0.6.0-beta.10 - 2021-12-11
|
|
||||||
- No significant changes since `0.6.0-beta.9`.
|
- No significant changes since `0.6.0-beta.9`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.9
|
||||||
|
|
||||||
## 0.6.0-beta.9 - 2021-11-22
|
|
||||||
- Add crate feature `experimental-io-uring`, enabling async file I/O to be utilized. This feature is only available on Linux OSes with recent kernel versions. This feature is semver-exempt. [#2408]
|
- Add crate feature `experimental-io-uring`, enabling async file I/O to be utilized. This feature is only available on Linux OSes with recent kernel versions. This feature is semver-exempt. [#2408]
|
||||||
- Add `NamedFile::open_async`. [#2408]
|
- Add `NamedFile::open_async`. [#2408]
|
||||||
- Fix 304 Not Modified responses to omit the Content-Length header, as per the spec. [#2453]
|
- Fix 304 Not Modified responses to omit the Content-Length header, as per the spec. [#2453]
|
||||||
@ -72,24 +91,24 @@
|
|||||||
[#2408]: https://github.com/actix/actix-web/pull/2408
|
[#2408]: https://github.com/actix/actix-web/pull/2408
|
||||||
[#2453]: https://github.com/actix/actix-web/pull/2453
|
[#2453]: https://github.com/actix/actix-web/pull/2453
|
||||||
|
|
||||||
|
## 0.6.0-beta.8
|
||||||
|
|
||||||
## 0.6.0-beta.8 - 2021-10-20
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.52.
|
- Minimum supported Rust version (MSRV) is now 1.52.
|
||||||
|
|
||||||
|
## 0.6.0-beta.7
|
||||||
|
|
||||||
## 0.6.0-beta.7 - 2021-09-09
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.51.
|
- Minimum supported Rust version (MSRV) is now 1.51.
|
||||||
|
|
||||||
|
## 0.6.0-beta.6
|
||||||
|
|
||||||
## 0.6.0-beta.6 - 2021-06-26
|
|
||||||
- Added `Files::path_filter()`. [#2274]
|
- Added `Files::path_filter()`. [#2274]
|
||||||
- `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]
|
- `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]
|
||||||
|
|
||||||
[#2274]: https://github.com/actix/actix-web/pull/2274
|
[#2274]: https://github.com/actix/actix-web/pull/2274
|
||||||
[#2228]: https://github.com/actix/actix-web/pull/2228
|
[#2228]: https://github.com/actix/actix-web/pull/2228
|
||||||
|
|
||||||
|
## 0.6.0-beta.5
|
||||||
|
|
||||||
## 0.6.0-beta.5 - 2021-06-17
|
|
||||||
- `NamedFile` now implements `ServiceFactory` and `HttpServiceFactory` making it much more useful in routing. For example, it can be used directly as a default service. [#2135]
|
- `NamedFile` now implements `ServiceFactory` and `HttpServiceFactory` making it much more useful in routing. For example, it can be used directly as a default service. [#2135]
|
||||||
- For symbolic links, `Content-Disposition` header no longer shows the filename of the original file. [#2156]
|
- For symbolic links, `Content-Disposition` header no longer shows the filename of the original file. [#2156]
|
||||||
- `Files::redirect_to_slash_directory()` now works as expected when used with `Files::show_files_listing()`. [#2225]
|
- `Files::redirect_to_slash_directory()` now works as expected when used with `Files::show_files_listing()`. [#2225]
|
||||||
@ -100,58 +119,58 @@
|
|||||||
[#2225]: https://github.com/actix/actix-web/pull/2225
|
[#2225]: https://github.com/actix/actix-web/pull/2225
|
||||||
[#2257]: https://github.com/actix/actix-web/pull/2257
|
[#2257]: https://github.com/actix/actix-web/pull/2257
|
||||||
|
|
||||||
|
## 0.6.0-beta.4
|
||||||
|
|
||||||
## 0.6.0-beta.4 - 2021-04-02
|
|
||||||
- Add support for `.guard` in `Files` to selectively filter `Files` services. [#2046]
|
- Add support for `.guard` in `Files` to selectively filter `Files` services. [#2046]
|
||||||
|
|
||||||
[#2046]: https://github.com/actix/actix-web/pull/2046
|
[#2046]: https://github.com/actix/actix-web/pull/2046
|
||||||
|
|
||||||
|
## 0.6.0-beta.3
|
||||||
|
|
||||||
## 0.6.0-beta.3 - 2021-03-09
|
|
||||||
- No notable changes.
|
- No notable changes.
|
||||||
|
|
||||||
|
## 0.6.0-beta.2
|
||||||
|
|
||||||
## 0.6.0-beta.2 - 2021-02-10
|
|
||||||
- Fix If-Modified-Since and If-Unmodified-Since to not compare using sub-second timestamps. [#1887]
|
- Fix If-Modified-Since and If-Unmodified-Since to not compare using sub-second timestamps. [#1887]
|
||||||
- Replace `v_htmlescape` with `askama_escape`. [#1953]
|
- Replace `v_htmlescape` with `askama_escape`. [#1953]
|
||||||
|
|
||||||
[#1887]: https://github.com/actix/actix-web/pull/1887
|
[#1887]: https://github.com/actix/actix-web/pull/1887
|
||||||
[#1953]: https://github.com/actix/actix-web/pull/1953
|
[#1953]: https://github.com/actix/actix-web/pull/1953
|
||||||
|
|
||||||
|
## 0.6.0-beta.1
|
||||||
|
|
||||||
## 0.6.0-beta.1 - 2021-01-07
|
|
||||||
- `HttpRange::parse` now has its own error type.
|
- `HttpRange::parse` now has its own error type.
|
||||||
- Update `bytes` to `1.0`. [#1813]
|
- Update `bytes` to `1.0`. [#1813]
|
||||||
|
|
||||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||||
|
|
||||||
|
## 0.5.0
|
||||||
|
|
||||||
## 0.5.0 - 2020-12-26
|
|
||||||
- Optionally support hidden files/directories. [#1811]
|
- Optionally support hidden files/directories. [#1811]
|
||||||
|
|
||||||
[#1811]: https://github.com/actix/actix-web/pull/1811
|
[#1811]: https://github.com/actix/actix-web/pull/1811
|
||||||
|
|
||||||
|
## 0.4.1
|
||||||
|
|
||||||
## 0.4.1 - 2020-11-24
|
|
||||||
- Clarify order of parameters in `Files::new` and improve docs.
|
- Clarify order of parameters in `Files::new` and improve docs.
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
## 0.4.0 - 2020-10-06
|
|
||||||
- Add `Files::prefer_utf8` option that adds UTF-8 charset on certain response types. [#1714]
|
- Add `Files::prefer_utf8` option that adds UTF-8 charset on certain response types. [#1714]
|
||||||
|
|
||||||
[#1714]: https://github.com/actix/actix-web/pull/1714
|
[#1714]: https://github.com/actix/actix-web/pull/1714
|
||||||
|
|
||||||
|
## 0.3.0
|
||||||
|
|
||||||
## 0.3.0 - 2020-09-11
|
|
||||||
- No significant changes from 0.3.0-beta.1.
|
- No significant changes from 0.3.0-beta.1.
|
||||||
|
|
||||||
|
## 0.3.0-beta.1
|
||||||
|
|
||||||
## 0.3.0-beta.1 - 2020-07-15
|
|
||||||
- Update `v_htmlescape` to 0.10
|
- Update `v_htmlescape` to 0.10
|
||||||
- Update `actix-web` and `actix-http` dependencies to beta.1
|
- Update `actix-web` and `actix-http` dependencies to beta.1
|
||||||
|
|
||||||
|
## 0.3.0-alpha.1
|
||||||
|
|
||||||
## 0.3.0-alpha.1 - 2020-05-23
|
|
||||||
- Update `actix-web` and `actix-http` dependencies to alpha
|
- Update `actix-web` and `actix-http` dependencies to alpha
|
||||||
- Fix some typos in the docs
|
- Fix some typos in the docs
|
||||||
- Bump minimum supported Rust version to 1.40
|
- Bump minimum supported Rust version to 1.40
|
||||||
@ -159,73 +178,73 @@
|
|||||||
|
|
||||||
[#1384]: https://github.com/actix/actix-web/pull/1384
|
[#1384]: https://github.com/actix/actix-web/pull/1384
|
||||||
|
|
||||||
|
## 0.2.1
|
||||||
|
|
||||||
## 0.2.1 - 2019-12-22
|
|
||||||
- Use the same format for file URLs regardless of platforms
|
- Use the same format for file URLs regardless of platforms
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
## 0.2.0 - 2019-12-20
|
|
||||||
- Fix BodyEncoding trait import #1220
|
- Fix BodyEncoding trait import #1220
|
||||||
|
|
||||||
|
## 0.2.0-alpha.1
|
||||||
|
|
||||||
## 0.2.0-alpha.1 - 2019-12-07
|
|
||||||
- Migrate to `std::future`
|
- Migrate to `std::future`
|
||||||
|
|
||||||
|
## 0.1.7
|
||||||
|
|
||||||
## 0.1.7 - 2019-11-06
|
- Add an additional `filename*` param in the `Content-Disposition` header of `actix_files::NamedFile` to be more compatible. (#1151)
|
||||||
- Add an additional `filename*` param in the `Content-Disposition` header of
|
|
||||||
`actix_files::NamedFile` to be more compatible. (#1151)
|
## 0.1.6
|
||||||
|
|
||||||
## 0.1.6 - 2019-10-14
|
|
||||||
- Add option to redirect to a slash-ended path `Files` #1132
|
- Add option to redirect to a slash-ended path `Files` #1132
|
||||||
|
|
||||||
|
## 0.1.5
|
||||||
|
|
||||||
## 0.1.5 - 2019-10-08
|
|
||||||
- Bump up `mime_guess` crate version to 2.0.1
|
- Bump up `mime_guess` crate version to 2.0.1
|
||||||
- Bump up `percent-encoding` crate version to 2.1
|
- Bump up `percent-encoding` crate version to 2.1
|
||||||
- Allow user defined request guards for `Files` #1113
|
- Allow user defined request guards for `Files` #1113
|
||||||
|
|
||||||
|
## 0.1.4
|
||||||
|
|
||||||
## 0.1.4 - 2019-07-20
|
|
||||||
- Allow to disable `Content-Disposition` header #686
|
- Allow to disable `Content-Disposition` header #686
|
||||||
|
|
||||||
|
## 0.1.3
|
||||||
|
|
||||||
## 0.1.3 - 2019-06-28
|
|
||||||
- Do not set `Content-Length` header, let actix-http set it #930
|
- Do not set `Content-Length` header, let actix-http set it #930
|
||||||
|
|
||||||
|
## 0.1.2
|
||||||
|
|
||||||
## 0.1.2 - 2019-06-13
|
|
||||||
- Content-Length is 0 for NamedFile HEAD request #914
|
- Content-Length is 0 for NamedFile HEAD request #914
|
||||||
- Fix ring dependency from actix-web default features for #741
|
- Fix ring dependency from actix-web default features for #741
|
||||||
|
|
||||||
|
## 0.1.1
|
||||||
|
|
||||||
## 0.1.1 - 2019-06-01
|
|
||||||
- Static files are incorrectly served as both chunked and with length #812
|
- Static files are incorrectly served as both chunked and with length #812
|
||||||
|
|
||||||
|
## 0.1.0
|
||||||
|
|
||||||
## 0.1.0 - 2019-05-25
|
|
||||||
- NamedFile last-modified check always fails due to nano-seconds in file modified date #820
|
- NamedFile last-modified check always fails due to nano-seconds in file modified date #820
|
||||||
|
|
||||||
|
## 0.1.0-beta.4
|
||||||
|
|
||||||
## 0.1.0-beta.4 - 2019-05-12
|
|
||||||
- Update actix-web to beta.4
|
- Update actix-web to beta.4
|
||||||
|
|
||||||
|
## 0.1.0-beta.1
|
||||||
|
|
||||||
## 0.1.0-beta.1 - 2019-04-20
|
|
||||||
- Update actix-web to beta.1
|
- Update actix-web to beta.1
|
||||||
|
|
||||||
|
## 0.1.0-alpha.6
|
||||||
|
|
||||||
## 0.1.0-alpha.6 - 2019-04-14
|
|
||||||
- Update actix-web to alpha6
|
- Update actix-web to alpha6
|
||||||
|
|
||||||
|
## 0.1.0-alpha.4
|
||||||
|
|
||||||
## 0.1.0-alpha.4 - 2019-04-08
|
|
||||||
- Update actix-web to alpha4
|
- Update actix-web to alpha4
|
||||||
|
|
||||||
|
## 0.1.0-alpha.2
|
||||||
|
|
||||||
## 0.1.0-alpha.2 - 2019-04-02
|
|
||||||
- Add default handler support
|
- Add default handler support
|
||||||
|
|
||||||
|
## 0.1.0-alpha.1
|
||||||
|
|
||||||
## 0.1.0-alpha.1 - 2019-03-28
|
|
||||||
- Initial impl
|
- Initial impl
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-files"
|
name = "actix-files"
|
||||||
version = "0.6.2"
|
version = "0.6.6"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
@ -11,11 +11,16 @@ homepage = "https://actix.rs"
|
|||||||
repository = "https://github.com/actix/actix-web"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = ["asynchronous", "web-programming::http-server"]
|
categories = ["asynchronous", "web-programming::http-server"]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_files"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_http::*",
|
||||||
|
"actix_service::*",
|
||||||
|
"actix_web::*",
|
||||||
|
"http::*",
|
||||||
|
"mime::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
||||||
@ -26,25 +31,26 @@ actix-service = "2"
|
|||||||
actix-utils = "3"
|
actix-utils = "3"
|
||||||
actix-web = { version = "4", default-features = false }
|
actix-web = { version = "4", default-features = false }
|
||||||
|
|
||||||
bitflags = "1"
|
bitflags = "2"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
derive_more = "0.99.5"
|
derive_more = "0.99.5"
|
||||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
http-range = "0.1.4"
|
http-range = "0.1.4"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
mime = "0.3"
|
mime = "0.3.9"
|
||||||
mime_guess = "2.0.1"
|
mime_guess = "2.0.1"
|
||||||
percent-encoding = "2.1"
|
percent-encoding = "2.1"
|
||||||
pin-project-lite = "0.2.7"
|
pin-project-lite = "0.2.7"
|
||||||
v_htmlescape= "0.15"
|
v_htmlescape = "0.15.5"
|
||||||
|
|
||||||
# experimental-io-uring
|
# experimental-io-uring
|
||||||
[target.'cfg(target_os = "linux")'.dependencies]
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
tokio-uring = { version = "0.5", optional = true, features = ["bytes"] }
|
||||||
actix-server = { version = "2.1", optional = true } # ensure matching tokio-uring versions
|
actix-server = { version = "2.4", optional = true } # ensure matching tokio-uring versions
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.7"
|
actix-rt = "2.7"
|
||||||
actix-test = "0.1"
|
actix-test = "0.1"
|
||||||
actix-web = "4"
|
actix-web = "4"
|
||||||
|
env_logger = "0.11"
|
||||||
tempfile = "3.2"
|
tempfile = "3.2"
|
||||||
|
@ -1,18 +1,32 @@
|
|||||||
# actix-files
|
# `actix-files`
|
||||||
|
|
||||||
> Static file serving for Actix Web
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
|
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
|
||||||
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.2)](https://docs.rs/actix-files/0.6.2)
|
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.6)](https://docs.rs/actix-files/0.6.6)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![License](https://img.shields.io/crates/l/actix-files.svg)
|
![License](https://img.shields.io/crates/l/actix-files.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-files/0.6.2/status.svg)](https://deps.rs/crate/actix-files/0.6.2)
|
[![dependency status](https://deps.rs/crate/actix-files/0.6.6/status.svg)](https://deps.rs/crate/actix-files/0.6.6)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
|
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
## Documentation & Resources
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
- [API Documentation](https://docs.rs/actix-files)
|
<!-- cargo-rdme start -->
|
||||||
- [Example Project](https://github.com/actix/examples/tree/master/basics/static-files)
|
|
||||||
- Minimum Supported Rust Version (MSRV): 1.54
|
Static file serving for Actix Web.
|
||||||
|
|
||||||
|
Provides a non-blocking service for serving static files from disk.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use actix_web::App;
|
||||||
|
use actix_files::Files;
|
||||||
|
|
||||||
|
let app = App::new()
|
||||||
|
.service(Files::new("/static", ".").prefer_utf8(true));
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- cargo-rdme end -->
|
||||||
|
33
actix-files/examples/guarded-listing.rs
Normal file
33
actix-files/examples/guarded-listing.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use actix_files::Files;
|
||||||
|
use actix_web::{get, guard, middleware, App, HttpServer, Responder};
|
||||||
|
|
||||||
|
const EXAMPLES_DIR: &str = concat![env!("CARGO_MANIFEST_DIR"), "/examples"];
|
||||||
|
|
||||||
|
#[get("/")]
|
||||||
|
async fn index() -> impl Responder {
|
||||||
|
"Hello world!"
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() -> std::io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
log::info!("starting HTTP server at http://localhost:8080");
|
||||||
|
|
||||||
|
HttpServer::new(|| {
|
||||||
|
App::new()
|
||||||
|
.service(index)
|
||||||
|
.service(
|
||||||
|
Files::new("/assets", EXAMPLES_DIR)
|
||||||
|
.show_files_listing()
|
||||||
|
.guard(guard::Header("show-listing", "?1")),
|
||||||
|
)
|
||||||
|
.service(Files::new("/assets", EXAMPLES_DIR))
|
||||||
|
.wrap(middleware::Compress::default())
|
||||||
|
.wrap(middleware::Logger::default())
|
||||||
|
})
|
||||||
|
.bind(("127.0.0.1", 8080))?
|
||||||
|
.workers(2)
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
@ -7,11 +7,10 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use actix_web::{error::Error, web::Bytes};
|
use actix_web::{error::Error, web::Bytes};
|
||||||
use futures_core::{ready, Stream};
|
|
||||||
use pin_project_lite::pin_project;
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental-io-uring")]
|
#[cfg(feature = "experimental-io-uring")]
|
||||||
use bytes::BytesMut;
|
use bytes::BytesMut;
|
||||||
|
use futures_core::{ready, Stream};
|
||||||
|
use pin_project_lite::pin_project;
|
||||||
|
|
||||||
use super::named::File;
|
use super::named::File;
|
||||||
|
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
use std::{fmt::Write, fs::DirEntry, io, path::Path, path::PathBuf};
|
use std::{
|
||||||
|
fmt::Write,
|
||||||
|
fs::DirEntry,
|
||||||
|
io,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
use actix_web::{dev::ServiceResponse, HttpRequest, HttpResponse};
|
use actix_web::{dev::ServiceResponse, HttpRequest, HttpResponse};
|
||||||
use percent_encoding::{utf8_percent_encode, CONTROLS};
|
use percent_encoding::{utf8_percent_encode, CONTROLS};
|
||||||
|
@ -4,46 +4,45 @@ use derive_more::Display;
|
|||||||
/// Errors which can occur when serving static files.
|
/// Errors which can occur when serving static files.
|
||||||
#[derive(Debug, PartialEq, Eq, Display)]
|
#[derive(Debug, PartialEq, Eq, Display)]
|
||||||
pub enum FilesError {
|
pub enum FilesError {
|
||||||
/// Path is not a directory
|
/// Path is not a directory.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[display(fmt = "Path is not a directory. Unable to serve static files")]
|
#[display(fmt = "path is not a directory. Unable to serve static files")]
|
||||||
IsNotDirectory,
|
IsNotDirectory,
|
||||||
|
|
||||||
/// Cannot render directory
|
/// Cannot render directory.
|
||||||
#[display(fmt = "Unable to render directory without index file")]
|
#[display(fmt = "unable to render directory without index file")]
|
||||||
IsDirectory,
|
IsDirectory,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `NotFound` for `FilesError`
|
|
||||||
impl ResponseError for FilesError {
|
impl ResponseError for FilesError {
|
||||||
|
/// Returns `404 Not Found`.
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
StatusCode::NOT_FOUND
|
StatusCode::NOT_FOUND
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
#[derive(Debug, PartialEq, Eq, Display)]
|
#[derive(Debug, PartialEq, Eq, Display)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum UriSegmentError {
|
pub enum UriSegmentError {
|
||||||
/// The segment started with the wrapped invalid character.
|
/// Segment started with the wrapped invalid character.
|
||||||
#[display(fmt = "The segment started with the wrapped invalid character")]
|
#[display(fmt = "segment started with invalid character: ('{_0}')")]
|
||||||
BadStart(char),
|
BadStart(char),
|
||||||
|
|
||||||
/// The segment contained the wrapped invalid character.
|
/// Segment contained the wrapped invalid character.
|
||||||
#[display(fmt = "The segment contained the wrapped invalid character")]
|
#[display(fmt = "segment contained invalid character ('{_0}')")]
|
||||||
BadChar(char),
|
BadChar(char),
|
||||||
|
|
||||||
/// The segment ended with the wrapped invalid character.
|
/// Segment ended with the wrapped invalid character.
|
||||||
#[display(fmt = "The segment ended with the wrapped invalid character")]
|
#[display(fmt = "segment ended with invalid character: ('{_0}')")]
|
||||||
BadEnd(char),
|
BadEnd(char),
|
||||||
|
|
||||||
/// The path is not a valid UTF-8 string after doing percent decoding.
|
/// Path is not a valid UTF-8 string after percent-decoding.
|
||||||
#[display(fmt = "The path is not a valid UTF-8 string after percent-decoding")]
|
#[display(fmt = "path is not a valid UTF-8 string after percent-decoding")]
|
||||||
NotValidUtf8,
|
NotValidUtf8,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `BadRequest` for `UriSegmentError`
|
|
||||||
impl ResponseError for UriSegmentError {
|
impl ResponseError for UriSegmentError {
|
||||||
|
/// Returns `400 Bad Request`.
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
StatusCode::BAD_REQUEST
|
StatusCode::BAD_REQUEST
|
||||||
}
|
}
|
||||||
|
@ -8,8 +8,7 @@ use std::{
|
|||||||
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
|
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
dev::{
|
dev::{
|
||||||
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest,
|
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest, ServiceResponse,
|
||||||
ServiceResponse,
|
|
||||||
},
|
},
|
||||||
error::Error,
|
error::Error,
|
||||||
guard::Guard,
|
guard::Guard,
|
||||||
@ -142,7 +141,7 @@ impl Files {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set custom directory renderer
|
/// Set custom directory renderer.
|
||||||
pub fn files_listing_renderer<F>(mut self, f: F) -> Self
|
pub fn files_listing_renderer<F>(mut self, f: F) -> Self
|
||||||
where
|
where
|
||||||
for<'r, 's> F:
|
for<'r, 's> F:
|
||||||
@ -152,7 +151,7 @@ impl Files {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Specifies mime override callback
|
/// Specifies MIME override callback.
|
||||||
pub fn mime_override<F>(mut self, f: F) -> Self
|
pub fn mime_override<F>(mut self, f: F) -> Self
|
||||||
where
|
where
|
||||||
F: Fn(&mime::Name<'_>) -> DispositionType + 'static,
|
F: Fn(&mime::Name<'_>) -> DispositionType + 'static,
|
||||||
@ -236,7 +235,7 @@ impl Files {
|
|||||||
/// request starts being handled by the file service, it will not be able to back-out and try
|
/// request starts being handled by the file service, it will not be able to back-out and try
|
||||||
/// the next service, you will simply get a 404 (or 405) error response.
|
/// the next service, you will simply get a 404 (or 405) error response.
|
||||||
///
|
///
|
||||||
/// To allow `POST` requests to retrieve files, see [`Files::use_guards`].
|
/// To allow `POST` requests to retrieve files, see [`Files::method_guard()`].
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
@ -301,12 +300,8 @@ impl Files {
|
|||||||
pub fn default_handler<F, U>(mut self, f: F) -> Self
|
pub fn default_handler<F, U>(mut self, f: F) -> Self
|
||||||
where
|
where
|
||||||
F: IntoServiceFactory<U, ServiceRequest>,
|
F: IntoServiceFactory<U, ServiceRequest>,
|
||||||
U: ServiceFactory<
|
U: ServiceFactory<ServiceRequest, Config = (), Response = ServiceResponse, Error = Error>
|
||||||
ServiceRequest,
|
+ 'static,
|
||||||
Config = (),
|
|
||||||
Response = ServiceResponse,
|
|
||||||
Error = Error,
|
|
||||||
> + 'static,
|
|
||||||
{
|
{
|
||||||
// create and configure default resource
|
// create and configure default resource
|
||||||
self.default = Rc::new(RefCell::new(Some(Rc::new(boxed::factory(
|
self.default = Rc::new(RefCell::new(Some(Rc::new(boxed::factory(
|
||||||
@ -390,3 +385,46 @@ impl ServiceFactory<ServiceRequest> for Files {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use actix_web::{
|
||||||
|
http::StatusCode,
|
||||||
|
test::{self, TestRequest},
|
||||||
|
App, HttpResponse,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[actix_web::test]
|
||||||
|
async fn custom_files_listing_renderer() {
|
||||||
|
let srv = test::init_service(
|
||||||
|
App::new().service(
|
||||||
|
Files::new("/", "./tests")
|
||||||
|
.show_files_listing()
|
||||||
|
.files_listing_renderer(|dir, req| {
|
||||||
|
Ok(ServiceResponse::new(
|
||||||
|
req.clone(),
|
||||||
|
HttpResponse::Ok().body(dir.path.to_str().unwrap().to_owned()),
|
||||||
|
))
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = TestRequest::with_uri("/").to_request();
|
||||||
|
let res = test::call_service(&srv, req).await;
|
||||||
|
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let body_str = std::str::from_utf8(&body).unwrap();
|
||||||
|
let actual_path = Path::new(&body_str);
|
||||||
|
let expected_path = Path::new("actix-files/tests");
|
||||||
|
assert!(
|
||||||
|
actual_path.ends_with(expected_path),
|
||||||
|
"body {:?} does not end with {:?}",
|
||||||
|
actual_path,
|
||||||
|
expected_path
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -13,7 +13,11 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
|
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use actix_service::boxed::{BoxService, BoxServiceFactory};
|
use actix_service::boxed::{BoxService, BoxServiceFactory};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
@ -22,7 +26,6 @@ use actix_web::{
|
|||||||
http::header::DispositionType,
|
http::header::DispositionType,
|
||||||
};
|
};
|
||||||
use mime_guess::from_ext;
|
use mime_guess::from_ext;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
mod chunked;
|
mod chunked;
|
||||||
mod directory;
|
mod directory;
|
||||||
@ -34,16 +37,15 @@ mod path_buf;
|
|||||||
mod range;
|
mod range;
|
||||||
mod service;
|
mod service;
|
||||||
|
|
||||||
pub use self::chunked::ChunkedReadFile;
|
pub use self::{
|
||||||
pub use self::directory::Directory;
|
chunked::ChunkedReadFile, directory::Directory, files::Files, named::NamedFile,
|
||||||
pub use self::files::Files;
|
range::HttpRange, service::FilesService,
|
||||||
pub use self::named::NamedFile;
|
};
|
||||||
pub use self::range::HttpRange;
|
use self::{
|
||||||
pub use self::service::FilesService;
|
directory::{directory_listing, DirectoryRenderer},
|
||||||
|
error::FilesError,
|
||||||
use self::directory::{directory_listing, DirectoryRenderer};
|
path_buf::PathBufWrap,
|
||||||
use self::error::FilesError;
|
};
|
||||||
use self::path_buf::PathBufWrap;
|
|
||||||
|
|
||||||
type HttpService = BoxService<ServiceRequest, ServiceResponse, Error>;
|
type HttpService = BoxService<ServiceRequest, ServiceResponse, Error>;
|
||||||
type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Error, ()>;
|
type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Error, ()>;
|
||||||
@ -63,6 +65,7 @@ type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::{
|
use std::{
|
||||||
|
fmt::Write as _,
|
||||||
fs::{self},
|
fs::{self},
|
||||||
ops::Add,
|
ops::Add,
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
@ -72,7 +75,7 @@ mod tests {
|
|||||||
dev::ServiceFactory,
|
dev::ServiceFactory,
|
||||||
guard,
|
guard,
|
||||||
http::{
|
http::{
|
||||||
header::{self, ContentDisposition, DispositionParam, DispositionType},
|
header::{self, ContentDisposition, DispositionParam},
|
||||||
Method, StatusCode,
|
Method, StatusCode,
|
||||||
},
|
},
|
||||||
middleware::Compress,
|
middleware::Compress,
|
||||||
@ -551,10 +554,9 @@ mod tests {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_static_files_with_spaces() {
|
async fn test_static_files_with_spaces() {
|
||||||
let srv = test::init_service(
|
let srv =
|
||||||
App::new().service(Files::new("/", ".").index_file("Cargo.toml")),
|
test::init_service(App::new().service(Files::new("/", ".").index_file("Cargo.toml")))
|
||||||
)
|
.await;
|
||||||
.await;
|
|
||||||
let request = TestRequest::get()
|
let request = TestRequest::get()
|
||||||
.uri("/tests/test%20space.binary")
|
.uri("/tests/test%20space.binary")
|
||||||
.to_request();
|
.to_request();
|
||||||
@ -566,6 +568,30 @@ mod tests {
|
|||||||
assert_eq!(bytes, data);
|
assert_eq!(bytes, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_static_files_with_special_characters() {
|
||||||
|
// Create the file we want to test against ad-hoc. We can't check it in as otherwise
|
||||||
|
// Windows can't even checkout this repository.
|
||||||
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
|
let file_with_newlines = temp_dir.path().join("test\n\x0B\x0C\rnewline.text");
|
||||||
|
fs::write(&file_with_newlines, "Look at my newlines").unwrap();
|
||||||
|
|
||||||
|
let srv = test::init_service(
|
||||||
|
App::new().service(Files::new("/", temp_dir.path()).index_file("Cargo.toml")),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
let request = TestRequest::get()
|
||||||
|
.uri("/test%0A%0B%0C%0Dnewline.text")
|
||||||
|
.to_request();
|
||||||
|
let response = test::call_service(&srv, request).await;
|
||||||
|
assert_eq!(response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let bytes = test::read_body(response).await;
|
||||||
|
let data = web::Bytes::from(fs::read(file_with_newlines).unwrap());
|
||||||
|
assert_eq!(bytes, data);
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_files_not_allowed() {
|
async fn test_files_not_allowed() {
|
||||||
let srv = test::init_service(App::new().service(Files::new("/", "."))).await;
|
let srv = test::init_service(App::new().service(Files::new("/", "."))).await;
|
||||||
@ -664,8 +690,7 @@ mod tests {
|
|||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_static_files() {
|
async fn test_static_files() {
|
||||||
let srv =
|
let srv =
|
||||||
test::init_service(App::new().service(Files::new("/", ".").show_files_listing()))
|
test::init_service(App::new().service(Files::new("/", ".").show_files_listing())).await;
|
||||||
.await;
|
|
||||||
let req = TestRequest::with_uri("/missing").to_request();
|
let req = TestRequest::with_uri("/missing").to_request();
|
||||||
|
|
||||||
let resp = test::call_service(&srv, req).await;
|
let resp = test::call_service(&srv, req).await;
|
||||||
@ -678,8 +703,7 @@ mod tests {
|
|||||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
let srv =
|
let srv =
|
||||||
test::init_service(App::new().service(Files::new("/", ".").show_files_listing()))
|
test::init_service(App::new().service(Files::new("/", ".").show_files_listing())).await;
|
||||||
.await;
|
|
||||||
let req = TestRequest::with_uri("/tests").to_request();
|
let req = TestRequest::with_uri("/tests").to_request();
|
||||||
let resp = test::call_service(&srv, req).await;
|
let resp = test::call_service(&srv, req).await;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -840,19 +864,21 @@ mod tests {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_percent_encoding_2() {
|
async fn test_percent_encoding_2() {
|
||||||
let tmpdir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let filename = match cfg!(unix) {
|
let filename = match cfg!(unix) {
|
||||||
true => "ض:?#[]{}<>()@!$&'`|*+,;= %20.test",
|
true => "ض:?#[]{}<>()@!$&'`|*+,;= %20\n.test",
|
||||||
false => "ض#[]{}()@!$&'`+,;= %20.test",
|
false => "ض#[]{}()@!$&'`+,;= %20.test",
|
||||||
};
|
};
|
||||||
let filename_encoded = filename
|
let filename_encoded = filename
|
||||||
.as_bytes()
|
.as_bytes()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|c| format!("%{:02X}", c))
|
.fold(String::new(), |mut buf, c| {
|
||||||
.collect::<String>();
|
write!(&mut buf, "%{:02X}", c).unwrap();
|
||||||
std::fs::File::create(tmpdir.path().join(filename)).unwrap();
|
buf
|
||||||
|
});
|
||||||
|
std::fs::File::create(temp_dir.path().join(filename)).unwrap();
|
||||||
|
|
||||||
let srv = test::init_service(App::new().service(Files::new("", tmpdir.path()))).await;
|
let srv = test::init_service(App::new().service(Files::new("/", temp_dir.path()))).await;
|
||||||
|
|
||||||
let req = TestRequest::get()
|
let req = TestRequest::get()
|
||||||
.uri(&format!("/{}", filename_encoded))
|
.uri(&format!("/{}", filename_encoded))
|
||||||
|
@ -8,13 +8,13 @@ use std::{
|
|||||||
use actix_web::{
|
use actix_web::{
|
||||||
body::{self, BoxBody, SizedStream},
|
body::{self, BoxBody, SizedStream},
|
||||||
dev::{
|
dev::{
|
||||||
self, AppService, HttpServiceFactory, ResourceDef, Service, ServiceFactory,
|
self, AppService, HttpServiceFactory, ResourceDef, Service, ServiceFactory, ServiceRequest,
|
||||||
ServiceRequest, ServiceResponse,
|
ServiceResponse,
|
||||||
},
|
},
|
||||||
http::{
|
http::{
|
||||||
header::{
|
header::{
|
||||||
self, Charset, ContentDisposition, ContentEncoding, DispositionParam,
|
self, Charset, ContentDisposition, ContentEncoding, DispositionParam, DispositionType,
|
||||||
DispositionType, ExtendedValue, HeaderValue,
|
ExtendedValue, HeaderValue,
|
||||||
},
|
},
|
||||||
StatusCode,
|
StatusCode,
|
||||||
},
|
},
|
||||||
@ -24,11 +24,11 @@ use bitflags::bitflags;
|
|||||||
use derive_more::{Deref, DerefMut};
|
use derive_more::{Deref, DerefMut};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
use mime_guess::from_path;
|
|
||||||
|
|
||||||
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub(crate) struct Flags: u8 {
|
pub(crate) struct Flags: u8 {
|
||||||
const ETAG = 0b0000_0001;
|
const ETAG = 0b0000_0001;
|
||||||
const LAST_MD = 0b0000_0010;
|
const LAST_MD = 0b0000_0010;
|
||||||
@ -84,6 +84,7 @@ pub struct NamedFile {
|
|||||||
|
|
||||||
#[cfg(not(feature = "experimental-io-uring"))]
|
#[cfg(not(feature = "experimental-io-uring"))]
|
||||||
pub(crate) use std::fs::File;
|
pub(crate) use std::fs::File;
|
||||||
|
|
||||||
#[cfg(feature = "experimental-io-uring")]
|
#[cfg(feature = "experimental-io-uring")]
|
||||||
pub(crate) use tokio_uring::fs::File;
|
pub(crate) use tokio_uring::fs::File;
|
||||||
|
|
||||||
@ -126,7 +127,7 @@ impl NamedFile {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let ct = from_path(&path).first_or_octet_stream();
|
let ct = mime_guess::from_path(&path).first_or_octet_stream();
|
||||||
|
|
||||||
let disposition = match ct.type_() {
|
let disposition = match ct.type_() {
|
||||||
mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline,
|
mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline,
|
||||||
@ -138,8 +139,13 @@ impl NamedFile {
|
|||||||
_ => DispositionType::Attachment,
|
_ => DispositionType::Attachment,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut parameters =
|
// replace special characters in filenames which could occur on some filesystems
|
||||||
vec![DispositionParam::Filename(String::from(filename.as_ref()))];
|
let filename_s = filename
|
||||||
|
.replace('\n', "%0A") // \n line break
|
||||||
|
.replace('\x0B', "%0B") // \v vertical tab
|
||||||
|
.replace('\x0C', "%0C") // \f form feed
|
||||||
|
.replace('\r', "%0D"); // \r carriage return
|
||||||
|
let mut parameters = vec![DispositionParam::Filename(filename_s)];
|
||||||
|
|
||||||
if !filename.is_ascii() {
|
if !filename.is_ascii() {
|
||||||
parameters.push(DispositionParam::FilenameExt(ExtendedValue {
|
parameters.push(DispositionParam::FilenameExt(ExtendedValue {
|
||||||
|
@ -30,7 +30,7 @@ impl PathBufWrap {
|
|||||||
let mut segment_count = path.matches('/').count() + 1;
|
let mut segment_count = path.matches('/').count() + 1;
|
||||||
|
|
||||||
// we can decode the whole path here (instead of per-segment decoding)
|
// we can decode the whole path here (instead of per-segment decoding)
|
||||||
// because we will reject `%2F` in paths using `segement_count`.
|
// because we will reject `%2F` in paths using `segment_count`.
|
||||||
let path = percent_encoding::percent_decode_str(path)
|
let path = percent_encoding::percent_decode_str(path)
|
||||||
.decode_utf8()
|
.decode_utf8()
|
||||||
.map_err(|_| UriSegmentError::NotValidUtf8)?;
|
.map_err(|_| UriSegmentError::NotValidUtf8)?;
|
||||||
@ -97,8 +97,6 @@ impl FromRequest for PathBufWrap {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::iter::FromIterator;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1,4 +1,36 @@
|
|||||||
use derive_more::{Display, Error};
|
use std::fmt;
|
||||||
|
|
||||||
|
use derive_more::Error;
|
||||||
|
|
||||||
|
/// Copy of `http_range::HttpRangeParseError`.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum HttpRangeParseError {
|
||||||
|
InvalidRange,
|
||||||
|
NoOverlap,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http_range::HttpRangeParseError> for HttpRangeParseError {
|
||||||
|
fn from(err: http_range::HttpRangeParseError) -> Self {
|
||||||
|
match err {
|
||||||
|
http_range::HttpRangeParseError::InvalidRange => Self::InvalidRange,
|
||||||
|
http_range::HttpRangeParseError::NoOverlap => Self::NoOverlap,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct ParseRangeErr(#[error(not(source))] HttpRangeParseError);
|
||||||
|
|
||||||
|
impl fmt::Display for ParseRangeErr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str("invalid Range header: ")?;
|
||||||
|
f.write_str(match self.0 {
|
||||||
|
HttpRangeParseError::InvalidRange => "invalid syntax",
|
||||||
|
HttpRangeParseError::NoOverlap => "range starts after end of content",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// HTTP Range header representation.
|
/// HTTP Range header representation.
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
@ -10,26 +42,22 @@ pub struct HttpRange {
|
|||||||
pub length: u64,
|
pub length: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Display, Error)]
|
|
||||||
#[display(fmt = "Parse HTTP Range failed")]
|
|
||||||
pub struct ParseRangeErr(#[error(not(source))] ());
|
|
||||||
|
|
||||||
impl HttpRange {
|
impl HttpRange {
|
||||||
/// Parses Range HTTP header string as per RFC 2616.
|
/// Parses Range HTTP header string as per RFC 2616.
|
||||||
///
|
///
|
||||||
/// `header` is HTTP Range header (e.g. `bytes=bytes=0-9`).
|
/// `header` is HTTP Range header (e.g. `bytes=bytes=0-9`).
|
||||||
/// `size` is full size of response (file).
|
/// `size` is full size of response (file).
|
||||||
pub fn parse(header: &str, size: u64) -> Result<Vec<HttpRange>, ParseRangeErr> {
|
pub fn parse(header: &str, size: u64) -> Result<Vec<HttpRange>, ParseRangeErr> {
|
||||||
match http_range::HttpRange::parse(header, size) {
|
let ranges =
|
||||||
Ok(ranges) => Ok(ranges
|
http_range::HttpRange::parse(header, size).map_err(|err| ParseRangeErr(err.into()))?;
|
||||||
.iter()
|
|
||||||
.map(|range| HttpRange {
|
Ok(ranges
|
||||||
start: range.start,
|
.iter()
|
||||||
length: range.length,
|
.map(|range| HttpRange {
|
||||||
})
|
start: range.start,
|
||||||
.collect()),
|
length: range.length,
|
||||||
Err(_) => Err(ParseRangeErr(())),
|
})
|
||||||
}
|
.collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,11 +62,7 @@ impl FilesService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serve_named_file(
|
fn serve_named_file(&self, req: ServiceRequest, mut named_file: NamedFile) -> ServiceResponse {
|
||||||
&self,
|
|
||||||
req: ServiceRequest,
|
|
||||||
mut named_file: NamedFile,
|
|
||||||
) -> ServiceResponse {
|
|
||||||
if let Some(ref mime_override) = self.mime_override {
|
if let Some(ref mime_override) = self.mime_override {
|
||||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||||
named_file.content_disposition.disposition = new_disposition;
|
named_file.content_disposition.disposition = new_disposition;
|
||||||
@ -120,13 +116,11 @@ impl Service<ServiceRequest> for FilesService {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let path_on_disk = match PathBufWrap::parse_path(
|
let path_on_disk =
|
||||||
req.match_info().unprocessed(),
|
match PathBufWrap::parse_path(req.match_info().unprocessed(), this.hidden_files) {
|
||||||
this.hidden_files,
|
Ok(item) => item,
|
||||||
) {
|
Err(err) => return Ok(req.error_response(err)),
|
||||||
Ok(item) => item,
|
};
|
||||||
Err(err) => return Ok(req.error_response(err)),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(filter) = &this.path_filter {
|
if let Some(filter) = &this.path_filter {
|
||||||
if !filter(path_on_disk.as_ref(), req.head()) {
|
if !filter(path_on_disk.as_ref(), req.head()) {
|
||||||
@ -177,8 +171,7 @@ impl Service<ServiceRequest> for FilesService {
|
|||||||
match NamedFile::open_async(&path).await {
|
match NamedFile::open_async(&path).await {
|
||||||
Ok(mut named_file) => {
|
Ok(mut named_file) => {
|
||||||
if let Some(ref mime_override) = this.mime_override {
|
if let Some(ref mime_override) = this.mime_override {
|
||||||
let new_disposition =
|
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||||
mime_override(&named_file.content_type.type_());
|
|
||||||
named_file.content_disposition.disposition = new_disposition;
|
named_file.content_disposition.disposition = new_disposition;
|
||||||
}
|
}
|
||||||
named_file.flags = this.file_flags;
|
named_file.flags = this.file_flags;
|
||||||
|
@ -24,8 +24,7 @@ async fn test_utf8_file_contents() {
|
|||||||
|
|
||||||
// disable UTF-8 attribute
|
// disable UTF-8 attribute
|
||||||
let srv =
|
let srv =
|
||||||
test::init_service(App::new().service(Files::new("/", "./tests").prefer_utf8(false)))
|
test::init_service(App::new().service(Files::new("/", "./tests").prefer_utf8(false))).await;
|
||||||
.await;
|
|
||||||
|
|
||||||
let req = TestRequest::with_uri("/utf8.txt").to_request();
|
let req = TestRequest::with_uri("/utf8.txt").to_request();
|
||||||
let res = test::call_service(&srv, req).await;
|
let res = test::call_service(&srv, req).await;
|
||||||
|
@ -12,9 +12,7 @@ async fn test_guard_filter() {
|
|||||||
let srv = test::init_service(
|
let srv = test::init_service(
|
||||||
App::new()
|
App::new()
|
||||||
.service(Files::new("/", "./tests/fixtures/guards/first").guard(Host("first.com")))
|
.service(Files::new("/", "./tests/fixtures/guards/first").guard(Host("first.com")))
|
||||||
.service(
|
.service(Files::new("/", "./tests/fixtures/guards/second").guard(Host("second.com"))),
|
||||||
Files::new("/", "./tests/fixtures/guards/second").guard(Host("second.com")),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
@ -9,8 +9,7 @@ use actix_web::{
|
|||||||
async fn test_directory_traversal_prevention() {
|
async fn test_directory_traversal_prevention() {
|
||||||
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
|
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
|
||||||
|
|
||||||
let req =
|
let req = TestRequest::with_uri("/../../../../../../../../../../../etc/passwd").to_request();
|
||||||
TestRequest::with_uri("/../../../../../../../../../../../etc/passwd").to_request();
|
|
||||||
let res = test::call_service(&srv, req).await;
|
let res = test::call_service(&srv, req).await;
|
||||||
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
@ -1,10 +1,19 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2022-xx-xx
|
## Unreleased
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
|
## 3.2.0
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
## 3.1.0
|
||||||
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.59.
|
- Minimum supported Rust version (MSRV) is now 1.59.
|
||||||
|
|
||||||
|
## 3.0.0
|
||||||
|
|
||||||
## 3.0.0 - 2022-07-24
|
|
||||||
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
||||||
- Added `TestServer::client_headers` method. [#2097]
|
- Added `TestServer::client_headers` method. [#2097]
|
||||||
- Update `actix-server` dependency to `2`.
|
- Update `actix-server` dependency to `2`.
|
||||||
@ -16,78 +25,79 @@
|
|||||||
[#2097]: https://github.com/actix/actix-web/pull/2097
|
[#2097]: https://github.com/actix/actix-web/pull/2097
|
||||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||||
|
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary>3.0.0 Pre-Releases</summary>
|
<summary>3.0.0 Pre-Releases</summary>
|
||||||
|
|
||||||
## 3.0.0-beta.13 - 2022-02-16
|
## 3.0.0-beta.13
|
||||||
|
|
||||||
- No significant changes since `3.0.0-beta.12`.
|
- No significant changes since `3.0.0-beta.12`.
|
||||||
|
|
||||||
|
## 3.0.0-beta.12
|
||||||
|
|
||||||
## 3.0.0-beta.12 - 2022-01-31
|
|
||||||
- No significant changes since `3.0.0-beta.11`.
|
- No significant changes since `3.0.0-beta.11`.
|
||||||
|
|
||||||
|
## 3.0.0-beta.11
|
||||||
|
|
||||||
## 3.0.0-beta.11 - 2022-01-04
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.54.
|
- Minimum supported Rust version (MSRV) is now 1.54.
|
||||||
|
|
||||||
|
## 3.0.0-beta.10
|
||||||
|
|
||||||
## 3.0.0-beta.10 - 2021-12-27
|
|
||||||
- Update `actix-server` to `2.0.0-rc.2`. [#2550]
|
- Update `actix-server` to `2.0.0-rc.2`. [#2550]
|
||||||
|
|
||||||
[#2550]: https://github.com/actix/actix-web/pull/2550
|
[#2550]: https://github.com/actix/actix-web/pull/2550
|
||||||
|
|
||||||
|
## 3.0.0-beta.9
|
||||||
|
|
||||||
## 3.0.0-beta.9 - 2021-12-11
|
|
||||||
- No significant changes since `3.0.0-beta.8`.
|
- No significant changes since `3.0.0-beta.8`.
|
||||||
|
|
||||||
|
## 3.0.0-beta.8
|
||||||
|
|
||||||
## 3.0.0-beta.8 - 2021-11-30
|
|
||||||
- Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
- Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
||||||
|
|
||||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
[#2474]: https://github.com/actix/actix-web/pull/2474
|
||||||
|
|
||||||
|
## 3.0.0-beta.7
|
||||||
|
|
||||||
## 3.0.0-beta.7 - 2021-11-22
|
|
||||||
- Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
|
- Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
|
||||||
|
|
||||||
[#2408]: https://github.com/actix/actix-web/pull/2408
|
[#2408]: https://github.com/actix/actix-web/pull/2408
|
||||||
|
|
||||||
|
## 3.0.0-beta.6
|
||||||
|
|
||||||
## 3.0.0-beta.6 - 2021-11-15
|
|
||||||
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
||||||
- Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
- Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
||||||
- Minimum supported Rust version (MSRV) is now 1.52.
|
- Minimum supported Rust version (MSRV) is now 1.52.
|
||||||
|
|
||||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||||
|
|
||||||
|
## 3.0.0-beta.5
|
||||||
|
|
||||||
## 3.0.0-beta.5 - 2021-09-09
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.51.
|
- Minimum supported Rust version (MSRV) is now 1.51.
|
||||||
|
|
||||||
|
## 3.0.0-beta.4
|
||||||
|
|
||||||
## 3.0.0-beta.4 - 2021-04-02
|
|
||||||
- Added `TestServer::client_headers` method. [#2097]
|
- Added `TestServer::client_headers` method. [#2097]
|
||||||
|
|
||||||
[#2097]: https://github.com/actix/actix-web/pull/2097
|
[#2097]: https://github.com/actix/actix-web/pull/2097
|
||||||
|
|
||||||
|
## 3.0.0-beta.3
|
||||||
|
|
||||||
## 3.0.0-beta.3 - 2021-03-09
|
|
||||||
- No notable changes.
|
- No notable changes.
|
||||||
|
|
||||||
|
## 3.0.0-beta.2
|
||||||
|
|
||||||
## 3.0.0-beta.2 - 2021-02-10
|
|
||||||
- No notable changes.
|
- No notable changes.
|
||||||
|
|
||||||
|
## 3.0.0-beta.1
|
||||||
|
|
||||||
## 3.0.0-beta.1 - 2021-01-07
|
|
||||||
- Update `bytes` to `1.0`. [#1813]
|
- Update `bytes` to `1.0`. [#1813]
|
||||||
|
|
||||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
## 2.1.0 - 2020-11-25
|
## 2.1.0
|
||||||
|
|
||||||
- Add ability to set address for `TestServer`. [#1645]
|
- Add ability to set address for `TestServer`. [#1645]
|
||||||
- Upgrade `base64` to `0.13`.
|
- Upgrade `base64` to `0.13`.
|
||||||
- Upgrade `serde_urlencoded` to `0.7`. [#1773]
|
- Upgrade `serde_urlencoded` to `0.7`. [#1773]
|
||||||
@ -95,12 +105,12 @@
|
|||||||
[#1773]: https://github.com/actix/actix-web/pull/1773
|
[#1773]: https://github.com/actix/actix-web/pull/1773
|
||||||
[#1645]: https://github.com/actix/actix-web/pull/1645
|
[#1645]: https://github.com/actix/actix-web/pull/1645
|
||||||
|
|
||||||
|
## 2.0.0
|
||||||
|
|
||||||
## 2.0.0 - 2020-09-11
|
|
||||||
- Update actix-codec and actix-utils dependencies.
|
- Update actix-codec and actix-utils dependencies.
|
||||||
|
|
||||||
|
## 2.0.0-alpha.1
|
||||||
|
|
||||||
## 2.0.0-alpha.1 - 2020-05-23
|
|
||||||
- Update the `time` dependency to 0.2.7
|
- Update the `time` dependency to 0.2.7
|
||||||
- Update `actix-connect` dependency to 2.0.0-alpha.2
|
- Update `actix-connect` dependency to 2.0.0-alpha.2
|
||||||
- Make `test_server` `async` fn.
|
- Make `test_server` `async` fn.
|
||||||
@ -109,56 +119,57 @@
|
|||||||
- Update `base64` dependency to 0.12
|
- Update `base64` dependency to 0.12
|
||||||
- Update `env_logger` dependency to 0.7
|
- Update `env_logger` dependency to 0.7
|
||||||
|
|
||||||
## 1.0.0 - 2019-12-13
|
## 1.0.0
|
||||||
|
|
||||||
- Replaced `TestServer::start()` with `test_server()`
|
- Replaced `TestServer::start()` with `test_server()`
|
||||||
|
|
||||||
|
## 1.0.0-alpha.3
|
||||||
|
|
||||||
## 1.0.0-alpha.3 - 2019-12-07
|
|
||||||
- Migrate to `std::future`
|
- Migrate to `std::future`
|
||||||
|
|
||||||
|
## 0.2.5
|
||||||
|
|
||||||
## 0.2.5 - 2019-09-17
|
|
||||||
- Update serde_urlencoded to "0.6.1"
|
- Update serde_urlencoded to "0.6.1"
|
||||||
- Increase TestServerRuntime timeouts from 500ms to 3000ms
|
- Increase TestServerRuntime timeouts from 500ms to 3000ms
|
||||||
- Do not override current `System`
|
- Do not override current `System`
|
||||||
|
|
||||||
|
## 0.2.4
|
||||||
|
|
||||||
## 0.2.4 - 2019-07-18
|
|
||||||
- Update actix-server to 0.6
|
- Update actix-server to 0.6
|
||||||
|
|
||||||
|
## 0.2.3
|
||||||
|
|
||||||
## 0.2.3 - 2019-07-16
|
|
||||||
- Add `delete`, `options`, `patch` methods to `TestServerRunner`
|
- Add `delete`, `options`, `patch` methods to `TestServerRunner`
|
||||||
|
|
||||||
|
## 0.2.2
|
||||||
|
|
||||||
## 0.2.2 - 2019-06-16
|
|
||||||
- Add .put() and .sput() methods
|
- Add .put() and .sput() methods
|
||||||
|
|
||||||
|
## 0.2.1
|
||||||
|
|
||||||
## 0.2.1 - 2019-06-05
|
|
||||||
- Add license files
|
- Add license files
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
## 0.2.0 - 2019-05-12
|
|
||||||
- Update awc and actix-http deps
|
- Update awc and actix-http deps
|
||||||
|
|
||||||
|
## 0.1.1
|
||||||
|
|
||||||
## 0.1.1 - 2019-04-24
|
|
||||||
- Always make new connection for http client
|
- Always make new connection for http client
|
||||||
|
|
||||||
|
## 0.1.0
|
||||||
|
|
||||||
## 0.1.0 - 2019-04-16
|
|
||||||
- No changes
|
- No changes
|
||||||
|
|
||||||
|
## 0.1.0-alpha.3
|
||||||
|
|
||||||
## 0.1.0-alpha.3 - 2019-04-02
|
|
||||||
- Request functions accept path #743
|
- Request functions accept path #743
|
||||||
|
|
||||||
|
## 0.1.0-alpha.2
|
||||||
|
|
||||||
## 0.1.0-alpha.2 - 2019-03-29
|
|
||||||
- Added TestServerRuntime::load_body() method
|
- Added TestServerRuntime::load_body() method
|
||||||
- Update actix-http and awc libraries
|
- Update actix-http and awc libraries
|
||||||
|
|
||||||
|
## 0.1.0-alpha.1
|
||||||
|
|
||||||
## 0.1.0-alpha.1 - 2019-03-28
|
|
||||||
- Initial impl
|
- Initial impl
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-http-test"
|
name = "actix-http-test"
|
||||||
version = "3.0.0"
|
version = "3.2.0"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "Various helpers for Actix applications to use during testing"
|
description = "Various helpers for Actix applications to use during testing"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
@ -13,14 +13,22 @@ categories = [
|
|||||||
"web-programming::websocket",
|
"web-programming::websocket",
|
||||||
]
|
]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
features = []
|
features = []
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_http_test"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_codec::*",
|
||||||
|
"actix_http::*",
|
||||||
|
"actix_server::*",
|
||||||
|
"awc::*",
|
||||||
|
"bytes::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"http::*",
|
||||||
|
"tokio::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
@ -37,19 +45,17 @@ actix-rt = "2.2"
|
|||||||
actix-server = "2"
|
actix-server = "2"
|
||||||
awc = { version = "3", default-features = false }
|
awc = { version = "3", default-features = false }
|
||||||
|
|
||||||
base64 = "0.13"
|
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
futures-core = { version = "0.3.17", default-features = false }
|
futures-core = { version = "0.3.17", default-features = false }
|
||||||
http = "0.2.5"
|
http = "0.2.7"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
socket2 = "0.4"
|
socket2 = "0.5"
|
||||||
serde = "1.0"
|
serde = "1"
|
||||||
serde_json = "1.0"
|
serde_json = "1"
|
||||||
slab = "0.4"
|
slab = "0.4"
|
||||||
serde_urlencoded = "0.7"
|
serde_urlencoded = "0.7"
|
||||||
tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
tls-openssl = { version = "0.10.55", package = "openssl", optional = true }
|
||||||
tokio = { version = "1.18.4", features = ["sync"] }
|
tokio = { version = "1.24.2", features = ["sync"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-web = { version = "4", default-features = false, features = ["cookies"] }
|
|
||||||
actix-http = "3"
|
actix-http = "3"
|
||||||
|
@ -1,17 +1,20 @@
|
|||||||
# actix-http-test
|
# `actix-http-test`
|
||||||
|
|
||||||
> Various helpers for Actix applications to use during testing.
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
|
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
|
||||||
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0)](https://docs.rs/actix-http-test/3.0.0)
|
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.2.0)](https://docs.rs/actix-http-test/3.2.0)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
|
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
|
||||||
<br>
|
<br>
|
||||||
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0)
|
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.2.0/status.svg)](https://deps.rs/crate/actix-http-test/3.2.0)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test)
|
[![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
## Documentation & Resources
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
- [API Documentation](https://docs.rs/actix-http-test)
|
<!-- cargo-rdme start -->
|
||||||
- Minimum Supported Rust Version (MSRV): 1.54
|
|
||||||
|
Various helpers for Actix applications to use during testing.
|
||||||
|
|
||||||
|
<!-- cargo-rdme end -->
|
||||||
|
@ -2,9 +2,9 @@
|
|||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||||
#![warn(future_incompatible)]
|
#![warn(future_incompatible)]
|
||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
extern crate tls_openssl as openssl;
|
extern crate tls_openssl as openssl;
|
||||||
@ -30,27 +30,31 @@ use tokio::sync::mpsc;
|
|||||||
/// for HTTP applications.
|
/// for HTTP applications.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```no_run
|
///
|
||||||
/// use actix_http::HttpService;
|
/// ```
|
||||||
|
/// use actix_http::{HttpService, Response, Error, StatusCode};
|
||||||
/// use actix_http_test::test_server;
|
/// use actix_http_test::test_server;
|
||||||
/// use actix_web::{web, App, HttpResponse, Error};
|
/// use actix_service::{fn_service, map_config, ServiceFactoryExt as _};
|
||||||
///
|
///
|
||||||
/// async fn my_handler() -> Result<HttpResponse, Error> {
|
/// #[actix_rt::test]
|
||||||
/// Ok(HttpResponse::Ok().into())
|
/// # async fn hidden_test() {}
|
||||||
/// }
|
|
||||||
///
|
|
||||||
/// #[actix_web::test]
|
|
||||||
/// async fn test_example() {
|
/// async fn test_example() {
|
||||||
/// let mut srv = TestServer::start(||
|
/// let srv = test_server(|| {
|
||||||
/// HttpService::new(
|
/// HttpService::build()
|
||||||
/// App::new().service(web::resource("/").to(my_handler))
|
/// .h1(fn_service(|req| async move {
|
||||||
/// )
|
/// Ok::<_, Error>(Response::ok())
|
||||||
/// );
|
/// }))
|
||||||
|
/// .tcp()
|
||||||
|
/// .map_err(|_| ())
|
||||||
|
/// })
|
||||||
|
/// .await;
|
||||||
///
|
///
|
||||||
/// let req = srv.get("/");
|
/// let req = srv.get("/");
|
||||||
/// let response = req.send().await.unwrap();
|
/// let response = req.send().await.unwrap();
|
||||||
/// assert!(response.status().is_success());
|
///
|
||||||
|
/// assert_eq!(response.status(), StatusCode::OK);
|
||||||
/// }
|
/// }
|
||||||
|
/// # actix_rt::System::new().block_on(test_example());
|
||||||
/// ```
|
/// ```
|
||||||
pub async fn test_server<F: ServerServiceFactory<TcpStream>>(factory: F) -> TestServer {
|
pub async fn test_server<F: ServerServiceFactory<TcpStream>>(factory: F) -> TestServer {
|
||||||
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
|
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,30 +1,59 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "3.2.2"
|
version = "3.7.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
]
|
]
|
||||||
description = "HTTP primitives for the Actix ecosystem"
|
description = "HTTP types and services for the Actix ecosystem"
|
||||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
"web-programming::http-server",
|
"web-programming::http-server",
|
||||||
"web-programming::websocket",
|
"web-programming::websocket",
|
||||||
]
|
]
|
||||||
license = "MIT OR Apache-2.0"
|
license.workspace = true
|
||||||
edition = "2018"
|
edition.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
# features that docs.rs will build with
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
features = ["http2", "ws", "openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
features = [
|
||||||
|
"http2",
|
||||||
|
"ws",
|
||||||
|
"openssl",
|
||||||
|
"rustls-0_20",
|
||||||
|
"rustls-0_21",
|
||||||
|
"rustls-0_22",
|
||||||
|
"rustls-0_23",
|
||||||
|
"compress-brotli",
|
||||||
|
"compress-gzip",
|
||||||
|
"compress-zstd",
|
||||||
|
]
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_http"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_codec::*",
|
||||||
|
"actix_service::*",
|
||||||
|
"actix_tls::*",
|
||||||
|
"actix_utils::*",
|
||||||
|
"bytes::*",
|
||||||
|
"bytestring::*",
|
||||||
|
"encoding_rs::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"h2::*",
|
||||||
|
"http::*",
|
||||||
|
"httparse::*",
|
||||||
|
"language_tags::*",
|
||||||
|
"mime::*",
|
||||||
|
"openssl::*",
|
||||||
|
"rustls::*",
|
||||||
|
"tokio_util::*",
|
||||||
|
"tokio::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
@ -43,15 +72,27 @@ ws = [
|
|||||||
# TLS via OpenSSL
|
# TLS via OpenSSL
|
||||||
openssl = ["actix-tls/accept", "actix-tls/openssl"]
|
openssl = ["actix-tls/accept", "actix-tls/openssl"]
|
||||||
|
|
||||||
# TLS via Rustls
|
# TLS via Rustls v0.20
|
||||||
rustls = ["actix-tls/accept", "actix-tls/rustls"]
|
rustls = ["rustls-0_20"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.20
|
||||||
|
rustls-0_20 = ["actix-tls/accept", "actix-tls/rustls-0_20"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.21
|
||||||
|
rustls-0_21 = ["actix-tls/accept", "actix-tls/rustls-0_21"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.22
|
||||||
|
rustls-0_22 = ["actix-tls/accept", "actix-tls/rustls-0_22"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.23
|
||||||
|
rustls-0_23 = ["actix-tls/accept", "actix-tls/rustls-0_23"]
|
||||||
|
|
||||||
# Compression codecs
|
# Compression codecs
|
||||||
compress-brotli = ["__compress", "brotli"]
|
compress-brotli = ["__compress", "brotli"]
|
||||||
compress-gzip = ["__compress", "flate2"]
|
compress-gzip = ["__compress", "flate2"]
|
||||||
compress-zstd = ["__compress", "zstd"]
|
compress-zstd = ["__compress", "zstd"]
|
||||||
|
|
||||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
||||||
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
|
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
|
||||||
__compress = []
|
__compress = []
|
||||||
|
|
||||||
@ -61,82 +102,80 @@ actix-codec = "0.5"
|
|||||||
actix-utils = "3"
|
actix-utils = "3"
|
||||||
actix-rt = { version = "2.2", default-features = false }
|
actix-rt = { version = "2.2", default-features = false }
|
||||||
|
|
||||||
ahash = "0.7"
|
ahash = "0.8"
|
||||||
bitflags = "1.2"
|
bitflags = "2"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
bytestring = "1"
|
bytestring = "1"
|
||||||
derive_more = "0.99.5"
|
derive_more = "0.99.5"
|
||||||
encoding_rs = "0.8"
|
encoding_rs = "0.8"
|
||||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
http = "0.2.5"
|
http = "0.2.7"
|
||||||
httparse = "1.5.1"
|
httparse = "1.5.1"
|
||||||
httpdate = "1.0.1"
|
httpdate = "1.0.1"
|
||||||
itoa = "1"
|
itoa = "1"
|
||||||
language-tags = "0.3"
|
language-tags = "0.3"
|
||||||
mime = "0.3"
|
mime = "0.3.4"
|
||||||
percent-encoding = "2.1"
|
percent-encoding = "2.1"
|
||||||
pin-project-lite = "0.2"
|
pin-project-lite = "0.2"
|
||||||
smallvec = "1.6.1"
|
smallvec = "1.6.1"
|
||||||
tokio = { version = "1.18.4", features = [] }
|
tokio = { version = "1.24.2", features = [] }
|
||||||
tokio-util = { version = "0.7", features = ["io", "codec"] }
|
tokio-util = { version = "0.7", features = ["io", "codec"] }
|
||||||
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||||
|
|
||||||
# http2
|
# http2
|
||||||
h2 = { version = "0.3.9", optional = true }
|
h2 = { version = "0.3.26", optional = true }
|
||||||
|
|
||||||
# websockets
|
# websockets
|
||||||
local-channel = { version = "0.1", optional = true }
|
local-channel = { version = "0.1", optional = true }
|
||||||
base64 = { version = "0.13", optional = true }
|
base64 = { version = "0.22", optional = true }
|
||||||
rand = { version = "0.8", optional = true }
|
rand = { version = "0.8", optional = true }
|
||||||
sha1 = { version = "0.10", optional = true }
|
sha1 = { version = "0.10", optional = true }
|
||||||
|
|
||||||
# openssl/rustls
|
# openssl/rustls
|
||||||
actix-tls = { version = "3", default-features = false, optional = true }
|
actix-tls = { version = "3.4", default-features = false, optional = true }
|
||||||
|
|
||||||
# compress-*
|
# compress-*
|
||||||
brotli = { version = "3.3.3", optional = true }
|
brotli = { version = "6", optional = true }
|
||||||
flate2 = { version = "1.0.13", optional = true }
|
flate2 = { version = "1.0.13", optional = true }
|
||||||
zstd = { version = "0.12", optional = true }
|
zstd = { version = "0.13", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-http-test = { version = "3", features = ["openssl"] }
|
actix-http-test = { version = "3", features = ["openssl"] }
|
||||||
actix-server = "2"
|
actix-server = "2"
|
||||||
actix-tls = { version = "3", features = ["openssl"] }
|
actix-tls = { version = "3.4", features = ["openssl", "rustls-0_23-webpki-roots"] }
|
||||||
actix-web = "4"
|
actix-web = "4"
|
||||||
|
|
||||||
async-stream = "0.3"
|
async-stream = "0.3"
|
||||||
criterion = { version = "0.4", features = ["html_reports"] }
|
criterion = { version = "0.5", features = ["html_reports"] }
|
||||||
env_logger = "0.9"
|
divan = "0.1.8"
|
||||||
|
env_logger = "0.11"
|
||||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
memchr = "2.4"
|
memchr = "2.4"
|
||||||
once_cell = "1.9"
|
once_cell = "1.9"
|
||||||
rcgen = "0.9"
|
rcgen = "0.13"
|
||||||
regex = "1.3"
|
regex = "1.3"
|
||||||
rustversion = "1"
|
rustversion = "1"
|
||||||
rustls-pemfile = "1"
|
rustls-pemfile = "2"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
static_assertions = "1"
|
static_assertions = "1"
|
||||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
tls-openssl = { package = "openssl", version = "0.10.55" }
|
||||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
tls-rustls_023 = { package = "rustls", version = "0.23" }
|
||||||
tokio = { version = "1.18.4", features = ["net", "rt", "macros"] }
|
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "ws"
|
name = "ws"
|
||||||
required-features = ["ws", "rustls"]
|
required-features = ["ws", "rustls-0_23"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "tls_rustls"
|
||||||
|
required-features = ["http2", "rustls-0_23"]
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "write-camel-case"
|
name = "response-body-compression"
|
||||||
harness = false
|
harness = false
|
||||||
|
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "status-line"
|
name = "date-formatting"
|
||||||
harness = false
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "uninit-headers"
|
|
||||||
harness = false
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "quality-value"
|
|
||||||
harness = false
|
harness = false
|
||||||
|
@ -1,22 +1,21 @@
|
|||||||
# actix-http
|
# `actix-http`
|
||||||
|
|
||||||
> HTTP primitives for the Actix ecosystem.
|
> HTTP types and services for the Actix ecosystem.
|
||||||
|
|
||||||
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
|
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
|
||||||
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.2.2)](https://docs.rs/actix-http/3.2.2)
|
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.7.0)](https://docs.rs/actix-http/3.7.0)
|
||||||
![Version](https://img.shields.io/badge/rustc-1.59+-ab6000.svg)
|
![Version](https://img.shields.io/badge/rustc-1.72+-ab6000.svg)
|
||||||
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
|
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
|
||||||
<br />
|
<br />
|
||||||
[![dependency status](https://deps.rs/crate/actix-http/3.2.2/status.svg)](https://deps.rs/crate/actix-http/3.2.2)
|
[![dependency status](https://deps.rs/crate/actix-http/3.7.0/status.svg)](https://deps.rs/crate/actix-http/3.7.0)
|
||||||
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
|
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
|
||||||
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
## Documentation & Resources
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
- [API Documentation](https://docs.rs/actix-http)
|
## Examples
|
||||||
- Minimum Supported Rust Version (MSRV): 1.54
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
use std::{env, io};
|
use std::{env, io};
|
||||||
@ -49,18 +48,3 @@ async fn main() -> io::Result<()> {
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This project is licensed under either of
|
|
||||||
|
|
||||||
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0))
|
|
||||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or [http://opensource.org/licenses/MIT](http://opensource.org/licenses/MIT))
|
|
||||||
|
|
||||||
at your option.
|
|
||||||
|
|
||||||
## Code of Conduct
|
|
||||||
|
|
||||||
Contribution to the actix-http crate is organized under the terms of the
|
|
||||||
Contributor Covenant, the maintainer of actix-http, @fafhrd91, promises to
|
|
||||||
intervene to uphold that code of conduct.
|
|
||||||
|
20
actix-http/benches/date-formatting.rs
Normal file
20
actix-http/benches/date-formatting.rs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
use std::time::SystemTime;
|
||||||
|
|
||||||
|
use actix_http::header::HttpDate;
|
||||||
|
use divan::{black_box, AllocProfiler, Bencher};
|
||||||
|
|
||||||
|
#[global_allocator]
|
||||||
|
static ALLOC: AllocProfiler = AllocProfiler::system();
|
||||||
|
|
||||||
|
#[divan::bench]
|
||||||
|
fn date_formatting(b: Bencher<'_, '_>) {
|
||||||
|
let now = SystemTime::now();
|
||||||
|
|
||||||
|
b.bench(|| {
|
||||||
|
black_box(HttpDate::from(black_box(now)).to_string());
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
divan::main();
|
||||||
|
}
|
@ -1,97 +0,0 @@
|
|||||||
#![allow(clippy::uninlined_format_args)]
|
|
||||||
|
|
||||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
|
||||||
|
|
||||||
const CODES: &[u16] = &[0, 1000, 201, 800, 550];
|
|
||||||
|
|
||||||
fn bench_quality_display_impls(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("quality value display impls");
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (fast?)", i), i, |b, &i| {
|
|
||||||
b.iter(|| _new::Quality(i).to_string())
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| _naive::Quality(i).to_string())
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_quality_display_impls);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
pub struct Quality(pub(crate) u16);
|
|
||||||
|
|
||||||
impl fmt::Display for Quality {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self.0 {
|
|
||||||
0 => f.write_str("0"),
|
|
||||||
1000 => f.write_str("1"),
|
|
||||||
|
|
||||||
// some number in the range 1–999
|
|
||||||
x => {
|
|
||||||
f.write_str("0.")?;
|
|
||||||
|
|
||||||
// this implementation avoids string allocation otherwise required
|
|
||||||
// for `.trim_end_matches('0')`
|
|
||||||
|
|
||||||
if x < 10 {
|
|
||||||
f.write_str("00")?;
|
|
||||||
// 0 is handled so it's not possible to have a trailing 0, we can just return
|
|
||||||
itoa_fmt(f, x)
|
|
||||||
} else if x < 100 {
|
|
||||||
f.write_str("0")?;
|
|
||||||
if x % 10 == 0 {
|
|
||||||
// trailing 0, divide by 10 and write
|
|
||||||
itoa_fmt(f, x / 10)
|
|
||||||
} else {
|
|
||||||
itoa_fmt(f, x)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// x is in range 101–999
|
|
||||||
|
|
||||||
if x % 100 == 0 {
|
|
||||||
// two trailing 0s, divide by 100 and write
|
|
||||||
itoa_fmt(f, x / 100)
|
|
||||||
} else if x % 10 == 0 {
|
|
||||||
// one trailing 0, divide by 10 and write
|
|
||||||
itoa_fmt(f, x / 10)
|
|
||||||
} else {
|
|
||||||
itoa_fmt(f, x)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn itoa_fmt<W: fmt::Write, V: itoa::Integer>(mut wr: W, value: V) -> fmt::Result {
|
|
||||||
let mut buf = itoa::Buffer::new();
|
|
||||||
wr.write_str(buf.format(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _naive {
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
pub struct Quality(pub(crate) u16);
|
|
||||||
|
|
||||||
impl fmt::Display for Quality {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self.0 {
|
|
||||||
0 => f.write_str("0"),
|
|
||||||
1000 => f.write_str("1"),
|
|
||||||
|
|
||||||
x => {
|
|
||||||
write!(f, "{}", format!("{:03}", x).trim_end_matches('0'))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
88
actix-http/benches/response-body-compression.rs
Normal file
88
actix-http/benches/response-body-compression.rs
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
use std::convert::Infallible;
|
||||||
|
|
||||||
|
use actix_http::{encoding::Encoder, ContentEncoding, Request, Response, StatusCode};
|
||||||
|
use actix_service::{fn_service, Service as _};
|
||||||
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||||
|
|
||||||
|
static BODY: &[u8] = include_bytes!("../Cargo.toml");
|
||||||
|
|
||||||
|
fn compression_responses(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("compression responses");
|
||||||
|
|
||||||
|
group.bench_function("identity", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Identity,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("gzip", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Gzip,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("br", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Brotli,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("zstd", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Zstd,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(benches, compression_responses);
|
||||||
|
criterion_main!(benches);
|
@ -1,214 +0,0 @@
|
|||||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
|
||||||
|
|
||||||
use bytes::BytesMut;
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
const CODES: &[u16] = &[201, 303, 404, 515];
|
|
||||||
|
|
||||||
fn bench_write_status_line_11(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_status_line v1.1");
|
|
||||||
|
|
||||||
let version = Version::HTTP_11;
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original (unsafe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_original::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (safe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_new::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_naive::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_write_status_line_10(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_status_line v1.0");
|
|
||||||
|
|
||||||
let version = Version::HTTP_10;
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original (unsafe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_original::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (safe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_new::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_naive::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_write_status_line_09(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_status_line v0.9");
|
|
||||||
|
|
||||||
let version = Version::HTTP_09;
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original (unsafe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_original::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (safe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_new::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_naive::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(
|
|
||||||
benches,
|
|
||||||
bench_write_status_line_11,
|
|
||||||
bench_write_status_line_10,
|
|
||||||
bench_write_status_line_09
|
|
||||||
);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
mod _naive {
|
|
||||||
use bytes::{BufMut, BytesMut};
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
pub(crate) fn write_status_line(version: Version, n: u16, bytes: &mut BytesMut) {
|
|
||||||
match version {
|
|
||||||
Version::HTTP_11 => bytes.put_slice(b"HTTP/1.1 "),
|
|
||||||
Version::HTTP_10 => bytes.put_slice(b"HTTP/1.0 "),
|
|
||||||
Version::HTTP_09 => bytes.put_slice(b"HTTP/0.9 "),
|
|
||||||
_ => {
|
|
||||||
// other HTTP version handlers do not use this method
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bytes.put_slice(n.to_string().as_bytes());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
use bytes::{BufMut, BytesMut};
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
const DIGITS_START: u8 = b'0';
|
|
||||||
|
|
||||||
pub(crate) fn write_status_line(version: Version, n: u16, bytes: &mut BytesMut) {
|
|
||||||
match version {
|
|
||||||
Version::HTTP_11 => bytes.put_slice(b"HTTP/1.1 "),
|
|
||||||
Version::HTTP_10 => bytes.put_slice(b"HTTP/1.0 "),
|
|
||||||
Version::HTTP_09 => bytes.put_slice(b"HTTP/0.9 "),
|
|
||||||
_ => {
|
|
||||||
// other HTTP version handlers do not use this method
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let d100 = (n / 100) as u8;
|
|
||||||
let d10 = ((n / 10) % 10) as u8;
|
|
||||||
let d1 = (n % 10) as u8;
|
|
||||||
|
|
||||||
bytes.put_u8(DIGITS_START + d100);
|
|
||||||
bytes.put_u8(DIGITS_START + d10);
|
|
||||||
bytes.put_u8(DIGITS_START + d1);
|
|
||||||
|
|
||||||
bytes.put_u8(b' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _original {
|
|
||||||
use std::ptr;
|
|
||||||
|
|
||||||
use bytes::{BufMut, BytesMut};
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
const DEC_DIGITS_LUT: &[u8] = b"0001020304050607080910111213141516171819\
|
|
||||||
2021222324252627282930313233343536373839\
|
|
||||||
4041424344454647484950515253545556575859\
|
|
||||||
6061626364656667686970717273747576777879\
|
|
||||||
8081828384858687888990919293949596979899";
|
|
||||||
|
|
||||||
pub(crate) const STATUS_LINE_BUF_SIZE: usize = 13;
|
|
||||||
|
|
||||||
pub(crate) fn write_status_line(version: Version, mut n: u16, bytes: &mut BytesMut) {
|
|
||||||
let mut buf: [u8; STATUS_LINE_BUF_SIZE] = *b"HTTP/1.1 ";
|
|
||||||
|
|
||||||
match version {
|
|
||||||
Version::HTTP_2 => buf[5] = b'2',
|
|
||||||
Version::HTTP_10 => buf[7] = b'0',
|
|
||||||
Version::HTTP_09 => {
|
|
||||||
buf[5] = b'0';
|
|
||||||
buf[7] = b'9';
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut curr: isize = 12;
|
|
||||||
let buf_ptr = buf.as_mut_ptr();
|
|
||||||
let lut_ptr = DEC_DIGITS_LUT.as_ptr();
|
|
||||||
let four = n > 999;
|
|
||||||
|
|
||||||
// decode 2 more chars, if > 2 chars
|
|
||||||
let d1 = (n % 100) << 1;
|
|
||||||
n /= 100;
|
|
||||||
curr -= 2;
|
|
||||||
unsafe {
|
|
||||||
ptr::copy_nonoverlapping(lut_ptr.offset(d1 as isize), buf_ptr.offset(curr), 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// decode last 1 or 2 chars
|
|
||||||
if n < 10 {
|
|
||||||
curr -= 1;
|
|
||||||
unsafe {
|
|
||||||
*buf_ptr.offset(curr) = (n as u8) + b'0';
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let d1 = n << 1;
|
|
||||||
curr -= 2;
|
|
||||||
unsafe {
|
|
||||||
ptr::copy_nonoverlapping(lut_ptr.offset(d1 as isize), buf_ptr.offset(curr), 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bytes.put_slice(&buf);
|
|
||||||
if four {
|
|
||||||
bytes.put_u8(b' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,135 +0,0 @@
|
|||||||
use criterion::{criterion_group, criterion_main, Criterion};
|
|
||||||
|
|
||||||
use bytes::BytesMut;
|
|
||||||
|
|
||||||
// A Miri run detects UB, seen on this playground:
|
|
||||||
// https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=f5d9aa166aa48df8dca05fce2b6c3915
|
|
||||||
|
|
||||||
fn bench_header_parsing(c: &mut Criterion) {
|
|
||||||
c.bench_function("Original (Unsound) [short]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ_SHORT);
|
|
||||||
_original::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
c.bench_function("New (safe) [short]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ_SHORT);
|
|
||||||
_new::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
c.bench_function("Original (Unsound) [realistic]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ);
|
|
||||||
_original::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
c.bench_function("New (safe) [realistic]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ);
|
|
||||||
_new::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_header_parsing);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
const MAX_HEADERS: usize = 96;
|
|
||||||
|
|
||||||
const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
|
|
||||||
[httparse::EMPTY_HEADER; MAX_HEADERS];
|
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
struct HeaderIndex {
|
|
||||||
name: (usize, usize),
|
|
||||||
value: (usize, usize),
|
|
||||||
}
|
|
||||||
|
|
||||||
const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
|
|
||||||
name: (0, 0),
|
|
||||||
value: (0, 0),
|
|
||||||
};
|
|
||||||
|
|
||||||
const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] = [EMPTY_HEADER_INDEX; MAX_HEADERS];
|
|
||||||
|
|
||||||
impl HeaderIndex {
|
|
||||||
fn record(bytes: &[u8], headers: &[httparse::Header<'_>], indices: &mut [HeaderIndex]) {
|
|
||||||
let bytes_ptr = bytes.as_ptr() as usize;
|
|
||||||
for (header, indices) in headers.iter().zip(indices.iter_mut()) {
|
|
||||||
let name_start = header.name.as_ptr() as usize - bytes_ptr;
|
|
||||||
let name_end = name_start + header.name.len();
|
|
||||||
indices.name = (name_start, name_end);
|
|
||||||
let value_start = header.value.as_ptr() as usize - bytes_ptr;
|
|
||||||
let value_end = value_start + header.value.len();
|
|
||||||
indices.value = (value_start, value_end);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// test cases taken from:
|
|
||||||
// https://github.com/seanmonstar/httparse/blob/master/benches/parse.rs
|
|
||||||
|
|
||||||
const REQ_SHORT: &[u8] = b"\
|
|
||||||
GET / HTTP/1.0\r\n\
|
|
||||||
Host: example.com\r\n\
|
|
||||||
Cookie: session=60; user_id=1\r\n\r\n";
|
|
||||||
|
|
||||||
const REQ: &[u8] = b"\
|
|
||||||
GET /wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg HTTP/1.1\r\n\
|
|
||||||
Host: www.kittyhell.com\r\n\
|
|
||||||
User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\r\n\
|
|
||||||
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n\
|
|
||||||
Accept-Language: ja,en-us;q=0.7,en;q=0.3\r\n\
|
|
||||||
Accept-Encoding: gzip,deflate\r\n\
|
|
||||||
Accept-Charset: Shift_JIS,utf-8;q=0.7,*;q=0.7\r\n\
|
|
||||||
Keep-Alive: 115\r\n\
|
|
||||||
Connection: keep-alive\r\n\
|
|
||||||
Cookie: wp_ozh_wsa_visits=2; wp_ozh_wsa_visit_lasttime=xxxxxxxxxx; __utma=xxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.x; __utmz=xxxxxxxxx.xxxxxxxxxx.x.x.utmccn=(referral)|utmcsr=reader.livedoor.com|utmcct=/reader/|utmcmd=referral|padding=under256\r\n\r\n";
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
|
||||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
|
||||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
|
||||||
|
|
||||||
let mut req = httparse::Request::new(&mut parsed);
|
|
||||||
match req.parse(src).unwrap() {
|
|
||||||
httparse::Status::Complete(_len) => {
|
|
||||||
HeaderIndex::record(src, req.headers, &mut headers);
|
|
||||||
req.headers.len()
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _original {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
use std::mem::MaybeUninit;
|
|
||||||
|
|
||||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
|
||||||
#![allow(invalid_value, clippy::uninit_assumed_init)]
|
|
||||||
|
|
||||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
|
||||||
unsafe { MaybeUninit::uninit().assume_init() };
|
|
||||||
|
|
||||||
#[allow(invalid_value)]
|
|
||||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
|
||||||
unsafe { MaybeUninit::uninit().assume_init() };
|
|
||||||
|
|
||||||
let mut req = httparse::Request::new(&mut parsed);
|
|
||||||
match req.parse(src).unwrap() {
|
|
||||||
httparse::Status::Complete(_len) => {
|
|
||||||
HeaderIndex::record(src, req.headers, &mut headers);
|
|
||||||
req.headers.len()
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,93 +0,0 @@
|
|||||||
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
|
|
||||||
|
|
||||||
fn bench_write_camel_case(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_camel_case");
|
|
||||||
|
|
||||||
let names = ["connection", "Transfer-Encoding", "transfer-encoding"];
|
|
||||||
|
|
||||||
for &i in &names {
|
|
||||||
let bts = i.as_bytes();
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original", i), bts, |b, bts| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = black_box([0; 24]);
|
|
||||||
_original::write_camel_case(black_box(bts), &mut buf)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = black_box([0; 24]);
|
|
||||||
let len = black_box(bts.len());
|
|
||||||
_new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_write_camel_case);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
|
||||||
// first copy entire (potentially wrong) slice to output
|
|
||||||
let buffer = unsafe {
|
|
||||||
std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len);
|
|
||||||
std::slice::from_raw_parts_mut(buf, len)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut iter = value.iter();
|
|
||||||
|
|
||||||
// first character should be uppercase
|
|
||||||
if let Some(c @ b'a'..=b'z') = iter.next() {
|
|
||||||
buffer[0] = c & 0b1101_1111;
|
|
||||||
}
|
|
||||||
|
|
||||||
// track 1 ahead of the current position since that's the location being assigned to
|
|
||||||
let mut index = 2;
|
|
||||||
|
|
||||||
// remaining characters after hyphens should also be uppercase
|
|
||||||
while let Some(&c) = iter.next() {
|
|
||||||
if c == b'-' {
|
|
||||||
// advance iter by one and uppercase if needed
|
|
||||||
if let Some(c @ b'a'..=b'z') = iter.next() {
|
|
||||||
buffer[index] = c & 0b1101_1111;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
index += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _original {
|
|
||||||
pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
|
||||||
let mut index = 0;
|
|
||||||
let key = value;
|
|
||||||
let mut key_iter = key.iter();
|
|
||||||
|
|
||||||
if let Some(c) = key_iter.next() {
|
|
||||||
if *c >= b'a' && *c <= b'z' {
|
|
||||||
buffer[index] = *c ^ b' ';
|
|
||||||
index += 1;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
while let Some(c) = key_iter.next() {
|
|
||||||
buffer[index] = *c;
|
|
||||||
index += 1;
|
|
||||||
if *c == b'-' {
|
|
||||||
if let Some(c) = key_iter.next() {
|
|
||||||
if *c >= b'a' && *c <= b'z' {
|
|
||||||
buffer[index] = *c ^ b' ';
|
|
||||||
index += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
use std::{convert::Infallible, io};
|
use std::{convert::Infallible, io};
|
||||||
|
|
||||||
use actix_http::{HttpService, Request, Response, StatusCode};
|
use actix_http::{body::BodyStream, HttpService, Request, Response, StatusCode};
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
|
|
||||||
#[tokio::main(flavor = "current_thread")]
|
#[tokio::main(flavor = "current_thread")]
|
||||||
@ -19,7 +19,12 @@ async fn main() -> io::Result<()> {
|
|||||||
.bind("h2c-detect", ("127.0.0.1", 8080), || {
|
.bind("h2c-detect", ("127.0.0.1", 8080), || {
|
||||||
HttpService::build()
|
HttpService::build()
|
||||||
.finish(|_req: Request| async move {
|
.finish(|_req: Request| async move {
|
||||||
Ok::<_, Infallible>(Response::build(StatusCode::OK).body("Hello!"))
|
Ok::<_, Infallible>(Response::build(StatusCode::OK).body(BodyStream::new(
|
||||||
|
futures_util::stream::iter([
|
||||||
|
Ok::<_, String>("123".into()),
|
||||||
|
Err("wertyuikmnbvcxdfty6t".to_owned()),
|
||||||
|
]),
|
||||||
|
)))
|
||||||
})
|
})
|
||||||
.tcp_auto_h2c()
|
.tcp_auto_h2c()
|
||||||
})?
|
})?
|
||||||
|
@ -23,10 +23,7 @@ async fn main() -> io::Result<()> {
|
|||||||
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
||||||
|
|
||||||
let forty_two = req.conn_data::<u32>().unwrap().to_string();
|
let forty_two = req.conn_data::<u32>().unwrap().to_string();
|
||||||
res.insert_header((
|
res.insert_header(("x-forty-two", HeaderValue::from_str(&forty_two).unwrap()));
|
||||||
"x-forty-two",
|
|
||||||
HeaderValue::from_str(&forty_two).unwrap(),
|
|
||||||
));
|
|
||||||
|
|
||||||
Ok::<_, Infallible>(res.body("Hello world!"))
|
Ok::<_, Infallible>(res.body("Hello world!"))
|
||||||
})
|
})
|
||||||
|
76
actix-http/examples/tls_rustls.rs
Normal file
76
actix-http/examples/tls_rustls.rs
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
//! Demonstrates TLS configuration (via Rustls) for HTTP/1.1 and HTTP/2 connections.
|
||||||
|
//!
|
||||||
|
//! Test using cURL:
|
||||||
|
//!
|
||||||
|
//! ```console
|
||||||
|
//! $ curl --insecure https://127.0.0.1:8443
|
||||||
|
//! Hello World!
|
||||||
|
//! Protocol: HTTP/2.0
|
||||||
|
//!
|
||||||
|
//! $ curl --insecure --http1.1 https://127.0.0.1:8443
|
||||||
|
//! Hello World!
|
||||||
|
//! Protocol: HTTP/1.1
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
extern crate tls_rustls_023 as rustls;
|
||||||
|
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_http::{Error, HttpService, Request, Response};
|
||||||
|
use actix_utils::future::ok;
|
||||||
|
|
||||||
|
#[actix_rt::main]
|
||||||
|
async fn main() -> io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
tracing::info!("starting HTTP server at https://127.0.0.1:8443");
|
||||||
|
|
||||||
|
actix_server::Server::build()
|
||||||
|
.bind("echo", ("127.0.0.1", 8443), || {
|
||||||
|
HttpService::build()
|
||||||
|
.finish(|req: Request| {
|
||||||
|
let body = format!(
|
||||||
|
"Hello World!\n\
|
||||||
|
Protocol: {:?}",
|
||||||
|
req.head().version
|
||||||
|
);
|
||||||
|
ok::<_, Error>(Response::ok().set_body(body))
|
||||||
|
})
|
||||||
|
.rustls_0_23(rustls_config())
|
||||||
|
})?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rustls_config() -> rustls::ServerConfig {
|
||||||
|
let rcgen::CertifiedKey { cert, key_pair } =
|
||||||
|
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
|
||||||
|
let cert_file = cert.pem();
|
||||||
|
let key_file = key_pair.serialize_pem();
|
||||||
|
|
||||||
|
let cert_file = &mut io::BufReader::new(cert_file.as_bytes());
|
||||||
|
let key_file = &mut io::BufReader::new(key_file.as_bytes());
|
||||||
|
|
||||||
|
let cert_chain = rustls_pemfile::certs(cert_file)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.unwrap();
|
||||||
|
let mut keys = rustls_pemfile::pkcs8_private_keys(key_file)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut config = rustls::ServerConfig::builder()
|
||||||
|
.with_no_client_auth()
|
||||||
|
.with_single_cert(
|
||||||
|
cert_chain,
|
||||||
|
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
const H1_ALPN: &[u8] = b"http/1.1";
|
||||||
|
const H2_ALPN: &[u8] = b"h2";
|
||||||
|
|
||||||
|
config.alpn_protocols.push(H2_ALPN.to_vec());
|
||||||
|
config.alpn_protocols.push(H1_ALPN.to_vec());
|
||||||
|
|
||||||
|
config
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
//! Sets up a WebSocket server over TCP and TLS.
|
//! Sets up a WebSocket server over TCP and TLS.
|
||||||
//! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames.
|
//! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames.
|
||||||
|
|
||||||
extern crate tls_rustls as rustls;
|
extern crate tls_rustls_023 as rustls;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
io,
|
io,
|
||||||
@ -28,7 +28,9 @@ async fn main() -> io::Result<()> {
|
|||||||
HttpService::build().h1(handler).tcp()
|
HttpService::build().h1(handler).tcp()
|
||||||
})?
|
})?
|
||||||
.bind("tls", ("127.0.0.1", 8443), || {
|
.bind("tls", ("127.0.0.1", 8443), || {
|
||||||
HttpService::build().finish(handler).rustls(tls_config())
|
HttpService::build()
|
||||||
|
.finish(handler)
|
||||||
|
.rustls_0_23(tls_config())
|
||||||
})?
|
})?
|
||||||
.run()
|
.run()
|
||||||
.await
|
.await
|
||||||
@ -83,27 +85,27 @@ impl Stream for Heartbeat {
|
|||||||
fn tls_config() -> rustls::ServerConfig {
|
fn tls_config() -> rustls::ServerConfig {
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
|
|
||||||
use rustls::{Certificate, PrivateKey};
|
|
||||||
use rustls_pemfile::{certs, pkcs8_private_keys};
|
use rustls_pemfile::{certs, pkcs8_private_keys};
|
||||||
|
|
||||||
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
|
let rcgen::CertifiedKey { cert, key_pair } =
|
||||||
let cert_file = cert.serialize_pem().unwrap();
|
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
|
||||||
let key_file = cert.serialize_private_key_pem();
|
let cert_file = cert.pem();
|
||||||
|
let key_file = key_pair.serialize_pem();
|
||||||
|
|
||||||
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
||||||
let key_file = &mut BufReader::new(key_file.as_bytes());
|
let key_file = &mut BufReader::new(key_file.as_bytes());
|
||||||
|
|
||||||
let cert_chain = certs(cert_file)
|
let cert_chain = certs(cert_file).collect::<Result<Vec<_>, _>>().unwrap();
|
||||||
.unwrap()
|
let mut keys = pkcs8_private_keys(key_file)
|
||||||
.into_iter()
|
.collect::<Result<Vec<_>, _>>()
|
||||||
.map(Certificate)
|
.unwrap();
|
||||||
.collect();
|
|
||||||
let mut keys = pkcs8_private_keys(key_file).unwrap();
|
|
||||||
|
|
||||||
let mut config = rustls::ServerConfig::builder()
|
let mut config = rustls::ServerConfig::builder()
|
||||||
.with_safe_defaults()
|
|
||||||
.with_no_client_auth()
|
.with_no_client_auth()
|
||||||
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
|
.with_single_cert(
|
||||||
|
cert_chain,
|
||||||
|
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
config.alpn_protocols.push(b"http/1.1".to_vec());
|
config.alpn_protocols.push(b"http/1.1".to_vec());
|
||||||
|
@ -47,9 +47,8 @@ where
|
|||||||
|
|
||||||
/// Attempts to pull out the next value of the underlying [`Stream`].
|
/// Attempts to pull out the next value of the underlying [`Stream`].
|
||||||
///
|
///
|
||||||
/// Empty values are skipped to prevent [`BodyStream`]'s transmission being
|
/// Empty values are skipped to prevent [`BodyStream`]'s transmission being ended on a
|
||||||
/// ended on a zero-length chunk, but rather proceed until the underlying
|
/// zero-length chunk, but rather proceed until the underlying [`Stream`] ends.
|
||||||
/// [`Stream`] ends.
|
|
||||||
fn poll_next(
|
fn poll_next(
|
||||||
mut self: Pin<&mut Self>,
|
mut self: Pin<&mut Self>,
|
||||||
cx: &mut Context<'_>,
|
cx: &mut Context<'_>,
|
||||||
|
@ -77,12 +77,8 @@ impl MessageBody for BoxBody {
|
|||||||
cx: &mut Context<'_>,
|
cx: &mut Context<'_>,
|
||||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||||
match &mut self.0 {
|
match &mut self.0 {
|
||||||
BoxBodyInner::None(body) => {
|
BoxBodyInner::None(body) => Pin::new(body).poll_next(cx).map_err(|err| match err {}),
|
||||||
Pin::new(body).poll_next(cx).map_err(|err| match err {})
|
BoxBodyInner::Bytes(body) => Pin::new(body).poll_next(cx).map_err(|err| match err {}),
|
||||||
}
|
|
||||||
BoxBodyInner::Bytes(body) => {
|
|
||||||
Pin::new(body).poll_next(cx).map_err(|err| match err {})
|
|
||||||
}
|
|
||||||
BoxBodyInner::Stream(body) => Pin::new(body).poll_next(cx),
|
BoxBodyInner::Stream(body) => Pin::new(body).poll_next(cx),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -104,7 +100,6 @@ impl MessageBody for BoxBody {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -531,7 +531,6 @@ where
|
|||||||
mod tests {
|
mod tests {
|
||||||
use actix_rt::pin;
|
use actix_rt::pin;
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use bytes::{Bytes, BytesMut};
|
|
||||||
use futures_util::stream;
|
use futures_util::stream;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
@ -555,6 +554,7 @@ mod tests {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused_allocation)] // triggered by `Box::new(()).size()`
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn boxing_equivalence() {
|
async fn boxing_equivalence() {
|
||||||
assert_eq!(().size(), BodySize::Sized(0));
|
assert_eq!(().size(), BodySize::Sized(0));
|
||||||
|
@ -14,12 +14,14 @@ mod size;
|
|||||||
mod sized_stream;
|
mod sized_stream;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
pub use self::body_stream::BodyStream;
|
|
||||||
pub use self::boxed::BoxBody;
|
|
||||||
pub use self::either::EitherBody;
|
|
||||||
pub use self::message_body::MessageBody;
|
|
||||||
pub(crate) use self::message_body::MessageBodyMapErr;
|
pub(crate) use self::message_body::MessageBodyMapErr;
|
||||||
pub use self::none::None;
|
pub use self::{
|
||||||
pub use self::size::BodySize;
|
body_stream::BodyStream,
|
||||||
pub use self::sized_stream::SizedStream;
|
boxed::BoxBody,
|
||||||
pub use self::utils::to_bytes;
|
either::EitherBody,
|
||||||
|
message_body::MessageBody,
|
||||||
|
none::None,
|
||||||
|
size::BodySize,
|
||||||
|
sized_stream::SizedStream,
|
||||||
|
utils::{to_bytes, to_bytes_limited, BodyLimitExceeded},
|
||||||
|
};
|
||||||
|
@ -3,75 +3,196 @@ use std::task::Poll;
|
|||||||
use actix_rt::pin;
|
use actix_rt::pin;
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
|
use derive_more::{Display, Error};
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
|
|
||||||
use super::{BodySize, MessageBody};
|
use super::{BodySize, MessageBody};
|
||||||
|
|
||||||
/// Collects the body produced by a `MessageBody` implementation into `Bytes`.
|
/// Collects all the bytes produced by `body`.
|
||||||
///
|
///
|
||||||
/// Any errors produced by the body stream are returned immediately.
|
/// Any errors produced by the body stream are returned immediately.
|
||||||
///
|
///
|
||||||
|
/// Consider using [`to_bytes_limited`] instead to protect against memory exhaustion.
|
||||||
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use actix_http::body::{self, to_bytes};
|
/// use actix_http::body::{self, to_bytes};
|
||||||
/// use bytes::Bytes;
|
/// use bytes::Bytes;
|
||||||
///
|
///
|
||||||
/// # async fn test_to_bytes() {
|
/// # actix_rt::System::new().block_on(async {
|
||||||
/// let body = body::None::new();
|
/// let body = body::None::new();
|
||||||
/// let bytes = to_bytes(body).await.unwrap();
|
/// let bytes = to_bytes(body).await.unwrap();
|
||||||
/// assert!(bytes.is_empty());
|
/// assert!(bytes.is_empty());
|
||||||
///
|
///
|
||||||
/// let body = Bytes::from_static(b"123");
|
/// let body = Bytes::from_static(b"123");
|
||||||
/// let bytes = to_bytes(body).await.unwrap();
|
/// let bytes = to_bytes(body).await.unwrap();
|
||||||
/// assert_eq!(bytes, b"123"[..]);
|
/// assert_eq!(bytes, "123");
|
||||||
/// # }
|
/// # });
|
||||||
/// ```
|
/// ```
|
||||||
pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
|
pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
|
||||||
|
to_bytes_limited(body, usize::MAX)
|
||||||
|
.await
|
||||||
|
.expect("body should never yield more than usize::MAX bytes")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Error type returned from [`to_bytes_limited`] when body produced exceeds limit.
|
||||||
|
#[derive(Debug, Display, Error)]
|
||||||
|
#[display(fmt = "limit exceeded while collecting body bytes")]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct BodyLimitExceeded;
|
||||||
|
|
||||||
|
/// Collects the bytes produced by `body`, up to `limit` bytes.
|
||||||
|
///
|
||||||
|
/// If a chunk read from `poll_next` causes the total number of bytes read to exceed `limit`, an
|
||||||
|
/// `Err(BodyLimitExceeded)` is returned.
|
||||||
|
///
|
||||||
|
/// Any errors produced by the body stream are returned immediately as `Ok(Err(B::Error))`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use actix_http::body::{self, to_bytes_limited};
|
||||||
|
/// use bytes::Bytes;
|
||||||
|
///
|
||||||
|
/// # actix_rt::System::new().block_on(async {
|
||||||
|
/// let body = body::None::new();
|
||||||
|
/// let bytes = to_bytes_limited(body, 10).await.unwrap().unwrap();
|
||||||
|
/// assert!(bytes.is_empty());
|
||||||
|
///
|
||||||
|
/// let body = Bytes::from_static(b"123");
|
||||||
|
/// let bytes = to_bytes_limited(body, 10).await.unwrap().unwrap();
|
||||||
|
/// assert_eq!(bytes, "123");
|
||||||
|
///
|
||||||
|
/// let body = Bytes::from_static(b"123");
|
||||||
|
/// assert!(to_bytes_limited(body, 2).await.is_err());
|
||||||
|
/// # });
|
||||||
|
/// ```
|
||||||
|
pub async fn to_bytes_limited<B: MessageBody>(
|
||||||
|
body: B,
|
||||||
|
limit: usize,
|
||||||
|
) -> Result<Result<Bytes, B::Error>, BodyLimitExceeded> {
|
||||||
|
/// Sensible default (32kB) for initial, bounded allocation when collecting body bytes.
|
||||||
|
const INITIAL_ALLOC_BYTES: usize = 32 * 1024;
|
||||||
|
|
||||||
let cap = match body.size() {
|
let cap = match body.size() {
|
||||||
BodySize::None | BodySize::Sized(0) => return Ok(Bytes::new()),
|
BodySize::None | BodySize::Sized(0) => return Ok(Ok(Bytes::new())),
|
||||||
BodySize::Sized(size) => size as usize,
|
BodySize::Sized(size) if size as usize > limit => return Err(BodyLimitExceeded),
|
||||||
// good enough first guess for chunk size
|
BodySize::Sized(size) => (size as usize).min(INITIAL_ALLOC_BYTES),
|
||||||
BodySize::Stream => 32_768,
|
BodySize::Stream => INITIAL_ALLOC_BYTES,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut exceeded_limit = false;
|
||||||
let mut buf = BytesMut::with_capacity(cap);
|
let mut buf = BytesMut::with_capacity(cap);
|
||||||
|
|
||||||
pin!(body);
|
pin!(body);
|
||||||
|
|
||||||
poll_fn(|cx| loop {
|
match poll_fn(|cx| loop {
|
||||||
let body = body.as_mut();
|
let body = body.as_mut();
|
||||||
|
|
||||||
match ready!(body.poll_next(cx)) {
|
match ready!(body.poll_next(cx)) {
|
||||||
Some(Ok(bytes)) => buf.extend_from_slice(&bytes),
|
Some(Ok(bytes)) => {
|
||||||
|
// if limit is exceeded...
|
||||||
|
if buf.len() + bytes.len() > limit {
|
||||||
|
// ...set flag to true and break out of poll_fn
|
||||||
|
exceeded_limit = true;
|
||||||
|
return Poll::Ready(Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.extend_from_slice(&bytes)
|
||||||
|
}
|
||||||
None => return Poll::Ready(Ok(())),
|
None => return Poll::Ready(Ok(())),
|
||||||
Some(Err(err)) => return Poll::Ready(Err(err)),
|
Some(Err(err)) => return Poll::Ready(Err(err)),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await?;
|
.await
|
||||||
|
{
|
||||||
|
// propagate error returned from body poll
|
||||||
|
Err(err) => Ok(Err(err)),
|
||||||
|
|
||||||
Ok(buf.freeze())
|
// limit was exceeded while reading body
|
||||||
|
Ok(()) if exceeded_limit => Err(BodyLimitExceeded),
|
||||||
|
|
||||||
|
// otherwise return body buffer
|
||||||
|
Ok(()) => Ok(Ok(buf.freeze())),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod tests {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
use futures_util::{stream, StreamExt as _};
|
use futures_util::{stream, StreamExt as _};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{body::BodyStream, Error};
|
use crate::{
|
||||||
|
body::{BodyStream, SizedStream},
|
||||||
|
Error,
|
||||||
|
};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_to_bytes() {
|
async fn to_bytes_complete() {
|
||||||
let bytes = to_bytes(()).await.unwrap();
|
let bytes = to_bytes(()).await.unwrap();
|
||||||
assert!(bytes.is_empty());
|
assert!(bytes.is_empty());
|
||||||
|
|
||||||
let body = Bytes::from_static(b"123");
|
let body = Bytes::from_static(b"123");
|
||||||
let bytes = to_bytes(body).await.unwrap();
|
let bytes = to_bytes(body).await.unwrap();
|
||||||
assert_eq!(bytes, b"123"[..]);
|
assert_eq!(bytes, b"123"[..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_bytes_streams() {
|
||||||
let stream = stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
|
let stream = stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
|
||||||
.map(Ok::<_, Error>);
|
.map(Ok::<_, Error>);
|
||||||
let body = BodyStream::new(stream);
|
let body = BodyStream::new(stream);
|
||||||
let bytes = to_bytes(body).await.unwrap();
|
let bytes = to_bytes(body).await.unwrap();
|
||||||
assert_eq!(bytes, b"123abc"[..]);
|
assert_eq!(bytes, b"123abc"[..]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_bytes_limited_complete() {
|
||||||
|
let bytes = to_bytes_limited((), 0).await.unwrap().unwrap();
|
||||||
|
assert!(bytes.is_empty());
|
||||||
|
|
||||||
|
let bytes = to_bytes_limited((), 1).await.unwrap().unwrap();
|
||||||
|
assert!(bytes.is_empty());
|
||||||
|
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 0)
|
||||||
|
.await
|
||||||
|
.is_err());
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 1)
|
||||||
|
.await
|
||||||
|
.is_err());
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 2).await.is_ok());
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 3).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_bytes_limited_streams() {
|
||||||
|
// hinting a larger body fails
|
||||||
|
let body = SizedStream::new(8, stream::empty().map(Ok::<_, Error>));
|
||||||
|
assert!(to_bytes_limited(body, 3).await.is_err());
|
||||||
|
|
||||||
|
// hinting a smaller body is okay
|
||||||
|
let body = SizedStream::new(3, stream::empty().map(Ok::<_, Error>));
|
||||||
|
assert!(to_bytes_limited(body, 3).await.unwrap().unwrap().is_empty());
|
||||||
|
|
||||||
|
// hinting a smaller body then returning a larger one fails
|
||||||
|
let stream = stream::iter(vec![Bytes::from_static(b"1234")]).map(Ok::<_, Error>);
|
||||||
|
let body = SizedStream::new(3, stream);
|
||||||
|
assert!(to_bytes_limited(body, 3).await.is_err());
|
||||||
|
|
||||||
|
let stream = stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
|
||||||
|
.map(Ok::<_, Error>);
|
||||||
|
let body = BodyStream::new(stream);
|
||||||
|
assert!(to_bytes_limited(body, 3).await.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_body_limit_error() {
|
||||||
|
let err_stream = stream::once(async { Err(io::Error::new(io::ErrorKind::Other, "")) });
|
||||||
|
let body = SizedStream::new(8, err_stream);
|
||||||
|
// not too big, but propagates error from body stream
|
||||||
|
assert!(to_bytes_limited(body, 10).await.unwrap().is_err());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -211,7 +211,6 @@ where
|
|||||||
|
|
||||||
/// Finish service configuration and create a service for the HTTP/2 protocol.
|
/// Finish service configuration and create a service for the HTTP/2 protocol.
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
|
|
||||||
pub fn h2<F, B>(self, service: F) -> crate::h2::H2Service<T, S, B>
|
pub fn h2<F, B>(self, service: F) -> crate::h2::H2Service<T, S, B>
|
||||||
where
|
where
|
||||||
F: IntoServiceFactory<S, Request>,
|
F: IntoServiceFactory<S, Request>,
|
||||||
|
@ -132,15 +132,15 @@ impl ServiceConfig {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use crate::{date::DATE_VALUE_LENGTH, notify_on_drop};
|
|
||||||
|
|
||||||
use actix_rt::{
|
use actix_rt::{
|
||||||
task::yield_now,
|
task::yield_now,
|
||||||
time::{sleep, sleep_until},
|
time::{sleep, sleep_until},
|
||||||
};
|
};
|
||||||
use memchr::memmem;
|
use memchr::memmem;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use crate::{date::DATE_VALUE_LENGTH, notify_on_drop};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_date_service_update() {
|
async fn test_date_service_update() {
|
||||||
let settings =
|
let settings =
|
||||||
|
@ -28,7 +28,7 @@ impl Date {
|
|||||||
|
|
||||||
fn update(&mut self) {
|
fn update(&mut self) {
|
||||||
self.pos = 0;
|
self.pos = 0;
|
||||||
write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
|
write!(self, "{}", httpdate::HttpDate::from(SystemTime::now())).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,11 +9,9 @@ use std::{
|
|||||||
|
|
||||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use futures_core::{ready, Stream};
|
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
use flate2::write::{GzDecoder, ZlibDecoder};
|
use flate2::write::{GzDecoder, ZlibDecoder};
|
||||||
|
use futures_core::{ready, Stream};
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
use zstd::stream::write::Decoder as ZstdDecoder;
|
use zstd::stream::write::Decoder as ZstdDecoder;
|
||||||
|
|
||||||
@ -49,9 +47,9 @@ where
|
|||||||
))),
|
))),
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
|
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(ZlibDecoder::new(
|
||||||
ZlibDecoder::new(Writer::new()),
|
Writer::new(),
|
||||||
))),
|
)))),
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(GzDecoder::new(
|
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(GzDecoder::new(
|
||||||
@ -193,7 +191,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -207,7 +205,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -220,7 +218,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
@ -233,7 +231,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -252,7 +250,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -267,7 +265,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -282,7 +280,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
@ -297,7 +295,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,12 +11,10 @@ use std::{
|
|||||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use futures_core::ready;
|
|
||||||
use pin_project_lite::pin_project;
|
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||||
|
use futures_core::ready;
|
||||||
|
use pin_project_lite::pin_project;
|
||||||
use tracing::trace;
|
use tracing::trace;
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||||
@ -76,6 +74,15 @@ impl<B: MessageBody> Encoder<B> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn empty() -> Self {
|
||||||
|
Encoder {
|
||||||
|
body: EncoderBody::Full { body: Bytes::new() },
|
||||||
|
encoder: None,
|
||||||
|
fut: None,
|
||||||
|
eof: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
|
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
|
||||||
Encoder::response_with_level(encoding, head, body, None)
|
Encoder::response_with_level(encoding, head, body, None)
|
||||||
}
|
}
|
||||||
@ -86,9 +93,11 @@ impl<B: MessageBody> Encoder<B> {
|
|||||||
body: B,
|
body: B,
|
||||||
level: Option<u32>,
|
level: Option<u32>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
// no need to compress an empty body
|
// no need to compress empty bodies
|
||||||
if matches!(body.size(), BodySize::None) {
|
match body.size() {
|
||||||
return Self::none();
|
BodySize::None => return Self::none(),
|
||||||
|
BodySize::Sized(0) => return Self::empty(),
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
let should_encode = !(head.headers().contains_key(&CONTENT_ENCODING)
|
let should_encode = !(head.headers().contains_key(&CONTENT_ENCODING)
|
||||||
@ -362,9 +371,10 @@ impl ContentEncoder {
|
|||||||
)),
|
)),
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncodingWithLevel::Gzip(level) => Some(ContentEncoder::Gzip(
|
ContentEncodingWithLevel::Gzip(level) => Some(ContentEncoder::Gzip(GzEncoder::new(
|
||||||
GzEncoder::new(Writer::new(), flate2::Compression::new(level)),
|
Writer::new(),
|
||||||
)),
|
flate2::Compression::new(level),
|
||||||
|
))),
|
||||||
|
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncodingWithLevel::Brotli(level) => Some(ContentEncoder::Brotli(Box::new(
|
ContentEncodingWithLevel::Brotli(level) => Some(ContentEncoder::Brotli(Box::new(
|
||||||
|
@ -7,13 +7,12 @@ use bytes::{Bytes, BytesMut};
|
|||||||
mod decoder;
|
mod decoder;
|
||||||
mod encoder;
|
mod encoder;
|
||||||
|
|
||||||
pub use self::decoder::Decoder;
|
pub use self::{decoder::Decoder, encoder::Encoder};
|
||||||
pub use self::encoder::Encoder;
|
|
||||||
|
|
||||||
/// Special-purpose writer for streaming (de-)compression.
|
/// Special-purpose writer for streaming (de-)compression.
|
||||||
///
|
///
|
||||||
/// Pre-allocates 8KiB of capacity.
|
/// Pre-allocates 8KiB of capacity.
|
||||||
pub(self) struct Writer {
|
struct Writer {
|
||||||
buf: BytesMut,
|
buf: BytesMut,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,12 +3,11 @@
|
|||||||
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
|
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
|
||||||
|
|
||||||
use derive_more::{Display, Error, From};
|
use derive_more::{Display, Error, From};
|
||||||
|
pub use http::{status::InvalidStatusCode, Error as HttpError};
|
||||||
use http::{uri::InvalidUri, StatusCode};
|
use http::{uri::InvalidUri, StatusCode};
|
||||||
|
|
||||||
use crate::{body::BoxBody, Response};
|
use crate::{body::BoxBody, Response};
|
||||||
|
|
||||||
pub use http::Error as HttpError;
|
|
||||||
|
|
||||||
pub struct Error {
|
pub struct Error {
|
||||||
inner: Box<ErrorInner>,
|
inner: Box<ErrorInner>,
|
||||||
}
|
}
|
||||||
@ -161,44 +160,44 @@ impl From<crate::ws::ProtocolError> for Error {
|
|||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ParseError {
|
pub enum ParseError {
|
||||||
/// An invalid `Method`, such as `GE.T`.
|
/// An invalid `Method`, such as `GE.T`.
|
||||||
#[display(fmt = "Invalid Method specified")]
|
#[display(fmt = "invalid method specified")]
|
||||||
Method,
|
Method,
|
||||||
|
|
||||||
/// An invalid `Uri`, such as `exam ple.domain`.
|
/// An invalid `Uri`, such as `exam ple.domain`.
|
||||||
#[display(fmt = "Uri error: {}", _0)]
|
#[display(fmt = "URI error: {}", _0)]
|
||||||
Uri(InvalidUri),
|
Uri(InvalidUri),
|
||||||
|
|
||||||
/// An invalid `HttpVersion`, such as `HTP/1.1`
|
/// An invalid `HttpVersion`, such as `HTP/1.1`
|
||||||
#[display(fmt = "Invalid HTTP version specified")]
|
#[display(fmt = "invalid HTTP version specified")]
|
||||||
Version,
|
Version,
|
||||||
|
|
||||||
/// An invalid `Header`.
|
/// An invalid `Header`.
|
||||||
#[display(fmt = "Invalid Header provided")]
|
#[display(fmt = "invalid Header provided")]
|
||||||
Header,
|
Header,
|
||||||
|
|
||||||
/// A message head is too large to be reasonable.
|
/// A message head is too large to be reasonable.
|
||||||
#[display(fmt = "Message head is too large")]
|
#[display(fmt = "message head is too large")]
|
||||||
TooLarge,
|
TooLarge,
|
||||||
|
|
||||||
/// A message reached EOF, but is not complete.
|
/// A message reached EOF, but is not complete.
|
||||||
#[display(fmt = "Message is incomplete")]
|
#[display(fmt = "message is incomplete")]
|
||||||
Incomplete,
|
Incomplete,
|
||||||
|
|
||||||
/// An invalid `Status`, such as `1337 ELITE`.
|
/// An invalid `Status`, such as `1337 ELITE`.
|
||||||
#[display(fmt = "Invalid Status provided")]
|
#[display(fmt = "invalid status provided")]
|
||||||
Status,
|
Status,
|
||||||
|
|
||||||
/// A timeout occurred waiting for an IO event.
|
/// A timeout occurred waiting for an IO event.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[display(fmt = "Timeout")]
|
#[display(fmt = "timeout")]
|
||||||
Timeout,
|
Timeout,
|
||||||
|
|
||||||
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
/// An I/O error that occurred while trying to read or write to a network stream.
|
||||||
#[display(fmt = "IO error: {}", _0)]
|
#[display(fmt = "I/O error: {}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
|
|
||||||
/// Parsing a field as string failed.
|
/// Parsing a field as string failed.
|
||||||
#[display(fmt = "UTF8 error: {}", _0)]
|
#[display(fmt = "UTF-8 error: {}", _0)]
|
||||||
Utf8(Utf8Error),
|
Utf8(Utf8Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -257,22 +256,19 @@ impl From<ParseError> for Response<BoxBody> {
|
|||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum PayloadError {
|
pub enum PayloadError {
|
||||||
/// A payload reached EOF, but is not complete.
|
/// A payload reached EOF, but is not complete.
|
||||||
#[display(
|
#[display(fmt = "payload reached EOF before completing: {:?}", _0)]
|
||||||
fmt = "A payload reached EOF, but is not complete. Inner error: {:?}",
|
|
||||||
_0
|
|
||||||
)]
|
|
||||||
Incomplete(Option<io::Error>),
|
Incomplete(Option<io::Error>),
|
||||||
|
|
||||||
/// Content encoding stream corruption.
|
/// Content encoding stream corruption.
|
||||||
#[display(fmt = "Can not decode content-encoding.")]
|
#[display(fmt = "can not decode content-encoding")]
|
||||||
EncodingCorrupted,
|
EncodingCorrupted,
|
||||||
|
|
||||||
/// Payload reached size limit.
|
/// Payload reached size limit.
|
||||||
#[display(fmt = "Payload reached size limit.")]
|
#[display(fmt = "payload reached size limit")]
|
||||||
Overflow,
|
Overflow,
|
||||||
|
|
||||||
/// Payload length is unknown.
|
/// Payload length is unknown.
|
||||||
#[display(fmt = "Payload length is unknown.")]
|
#[display(fmt = "payload length is unknown")]
|
||||||
UnknownLength,
|
UnknownLength,
|
||||||
|
|
||||||
/// HTTP/2 payload error.
|
/// HTTP/2 payload error.
|
||||||
@ -294,7 +290,6 @@ impl std::error::Error for PayloadError {
|
|||||||
PayloadError::Overflow => None,
|
PayloadError::Overflow => None,
|
||||||
PayloadError::UnknownLength => None,
|
PayloadError::UnknownLength => None,
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
|
|
||||||
PayloadError::Http2Payload(err) => Some(err),
|
PayloadError::Http2Payload(err) => Some(err),
|
||||||
PayloadError::Io(err) => Some(err),
|
PayloadError::Io(err) => Some(err),
|
||||||
}
|
}
|
||||||
@ -331,44 +326,44 @@ impl From<PayloadError> for Error {
|
|||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum DispatchError {
|
pub enum DispatchError {
|
||||||
/// Service error.
|
/// Service error.
|
||||||
#[display(fmt = "Service Error")]
|
#[display(fmt = "service error")]
|
||||||
Service(Response<BoxBody>),
|
Service(Response<BoxBody>),
|
||||||
|
|
||||||
/// Body streaming error.
|
/// Body streaming error.
|
||||||
#[display(fmt = "Body error: {}", _0)]
|
#[display(fmt = "body error: {}", _0)]
|
||||||
Body(Box<dyn StdError>),
|
Body(Box<dyn StdError>),
|
||||||
|
|
||||||
/// Upgrade service error.
|
/// Upgrade service error.
|
||||||
|
#[display(fmt = "upgrade error")]
|
||||||
Upgrade,
|
Upgrade,
|
||||||
|
|
||||||
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
||||||
#[display(fmt = "IO error: {}", _0)]
|
#[display(fmt = "I/O error: {}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
|
|
||||||
/// Request parse error.
|
/// Request parse error.
|
||||||
#[display(fmt = "Request parse error: {}", _0)]
|
#[display(fmt = "request parse error: {}", _0)]
|
||||||
Parse(ParseError),
|
Parse(ParseError),
|
||||||
|
|
||||||
/// HTTP/2 error.
|
/// HTTP/2 error.
|
||||||
#[display(fmt = "{}", _0)]
|
#[display(fmt = "{}", _0)]
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
|
|
||||||
H2(h2::Error),
|
H2(h2::Error),
|
||||||
|
|
||||||
/// The first request did not complete within the specified timeout.
|
/// The first request did not complete within the specified timeout.
|
||||||
#[display(fmt = "The first request did not complete within the specified timeout")]
|
#[display(fmt = "request did not complete within the specified timeout")]
|
||||||
SlowRequestTimeout,
|
SlowRequestTimeout,
|
||||||
|
|
||||||
/// Disconnect timeout. Makes sense for ssl streams.
|
/// Disconnect timeout. Makes sense for TLS streams.
|
||||||
#[display(fmt = "Connection shutdown timeout")]
|
#[display(fmt = "connection shutdown timeout")]
|
||||||
DisconnectTimeout,
|
DisconnectTimeout,
|
||||||
|
|
||||||
/// Handler dropped payload before reading EOF.
|
/// Handler dropped payload before reading EOF.
|
||||||
#[display(fmt = "Handler dropped payload before reading EOF")]
|
#[display(fmt = "handler dropped payload before reading EOF")]
|
||||||
HandlerDroppedPayload,
|
HandlerDroppedPayload,
|
||||||
|
|
||||||
/// Internal error.
|
/// Internal error.
|
||||||
#[display(fmt = "Internal error")]
|
#[display(fmt = "internal error")]
|
||||||
InternalError,
|
InternalError,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -393,20 +388,18 @@ impl StdError for DispatchError {
|
|||||||
#[cfg_attr(test, derive(PartialEq, Eq))]
|
#[cfg_attr(test, derive(PartialEq, Eq))]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ContentTypeError {
|
pub enum ContentTypeError {
|
||||||
/// Can not parse content type
|
/// Can not parse content type.
|
||||||
#[display(fmt = "Can not parse content type")]
|
#[display(fmt = "could not parse content type")]
|
||||||
ParseError,
|
ParseError,
|
||||||
|
|
||||||
/// Unknown content encoding
|
/// Unknown content encoding.
|
||||||
#[display(fmt = "Unknown content encoding")]
|
#[display(fmt = "unknown content encoding")]
|
||||||
UnknownEncoding,
|
UnknownEncoding,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::io;
|
use http::Error as HttpError;
|
||||||
|
|
||||||
use http::{Error as HttpError, StatusCode};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
@ -426,7 +419,7 @@ mod tests {
|
|||||||
let err: Error = ParseError::Io(orig).into();
|
let err: Error = ParseError::Io(orig).into();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
format!("{}", err),
|
format!("{}", err),
|
||||||
"error parsing HTTP message: IO error: other"
|
"error parsing HTTP message: I/O error: other"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -453,7 +446,7 @@ mod tests {
|
|||||||
let err = PayloadError::Incomplete(None);
|
let err = PayloadError::Incomplete(None);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
err.to_string(),
|
err.to_string(),
|
||||||
"A payload reached EOF, but is not complete. Inner error: None"
|
"payload reached EOF before completing: None"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -473,7 +466,7 @@ mod tests {
|
|||||||
match ParseError::from($from) {
|
match ParseError::from($from) {
|
||||||
e @ $error => {
|
e @ $error => {
|
||||||
let desc = format!("{}", e);
|
let desc = format!("{}", e);
|
||||||
assert_eq!(desc, format!("IO error: {}", $from));
|
assert_eq!(desc, format!("I/O error: {}", $from));
|
||||||
}
|
}
|
||||||
_ => unreachable!("{:?}", $from),
|
_ => unreachable!("{:?}", $from),
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
struct Flags: u8 {
|
struct Flags: u8 {
|
||||||
const HEAD = 0b0000_0001;
|
const HEAD = 0b0000_0001;
|
||||||
const KEEP_ALIVE_ENABLED = 0b0000_1000;
|
const KEEP_ALIVE_ENABLED = 0b0000_1000;
|
||||||
|
@ -9,11 +9,10 @@ use super::{
|
|||||||
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
||||||
encoder, Message, MessageType,
|
encoder, Message, MessageType,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{body::BodySize, error::ParseError, ConnectionType, Request, Response, ServiceConfig};
|
||||||
body::BodySize, error::ParseError, ConnectionType, Request, Response, ServiceConfig,
|
|
||||||
};
|
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
struct Flags: u8 {
|
struct Flags: u8 {
|
||||||
const HEAD = 0b0000_0001;
|
const HEAD = 0b0000_0001;
|
||||||
const KEEP_ALIVE_ENABLED = 0b0000_0010;
|
const KEEP_ALIVE_ENABLED = 0b0000_0010;
|
||||||
@ -199,9 +198,6 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use bytes::BytesMut;
|
|
||||||
use http::Method;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::HttpMessage as _;
|
use crate::HttpMessage as _;
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll};
|
use std::{io, marker::PhantomData, mem::MaybeUninit, task::Poll};
|
||||||
|
|
||||||
use actix_codec::Decoder;
|
use actix_codec::Decoder;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
@ -94,9 +94,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
// SAFETY: httparse already checks header value is only visible ASCII bytes
|
// SAFETY: httparse already checks header value is only visible ASCII bytes
|
||||||
// from_maybe_shared_unchecked contains debug assertions so they are omitted here
|
// from_maybe_shared_unchecked contains debug assertions so they are omitted here
|
||||||
let value = unsafe {
|
let value = unsafe {
|
||||||
HeaderValue::from_maybe_shared_unchecked(
|
HeaderValue::from_maybe_shared_unchecked(slice.slice(idx.value.0..idx.value.1))
|
||||||
slice.slice(idx.value.0..idx.value.1),
|
|
||||||
)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
match name {
|
match name {
|
||||||
@ -275,8 +273,7 @@ impl MessageType for Request {
|
|||||||
let mut msg = Request::new();
|
let mut msg = Request::new();
|
||||||
|
|
||||||
// convert headers
|
// convert headers
|
||||||
let mut length =
|
let mut length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||||
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
|
||||||
|
|
||||||
// disallow HTTP/1.0 POST requests that do not contain a Content-Length headers
|
// disallow HTTP/1.0 POST requests that do not contain a Content-Length headers
|
||||||
// see https://datatracker.ietf.org/doc/html/rfc1945#section-7.2.2
|
// see https://datatracker.ietf.org/doc/html/rfc1945#section-7.2.2
|
||||||
@ -356,8 +353,8 @@ impl MessageType for ResponseHead {
|
|||||||
Version::HTTP_10
|
Version::HTTP_10
|
||||||
};
|
};
|
||||||
|
|
||||||
let status = StatusCode::from_u16(res.code.unwrap())
|
let status =
|
||||||
.map_err(|_| ParseError::Status)?;
|
StatusCode::from_u16(res.code.unwrap()).map_err(|_| ParseError::Status)?;
|
||||||
HeaderIndex::record(src, res.headers, &mut headers);
|
HeaderIndex::record(src, res.headers, &mut headers);
|
||||||
|
|
||||||
(len, version, status, res.headers.len())
|
(len, version, status, res.headers.len())
|
||||||
@ -378,8 +375,7 @@ impl MessageType for ResponseHead {
|
|||||||
msg.version = ver;
|
msg.version = ver;
|
||||||
|
|
||||||
// convert headers
|
// convert headers
|
||||||
let mut length =
|
let mut length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||||
msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
|
||||||
|
|
||||||
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
||||||
// Protects against some request smuggling attacks.
|
// Protects against some request smuggling attacks.
|
||||||
@ -536,7 +532,7 @@ impl Decoder for PayloadDecoder {
|
|||||||
*state = match state.step(src, size, &mut buf) {
|
*state = match state.step(src, size, &mut buf) {
|
||||||
Poll::Pending => return Ok(None),
|
Poll::Pending => return Ok(None),
|
||||||
Poll::Ready(Ok(state)) => state,
|
Poll::Ready(Ok(state)) => state,
|
||||||
Poll::Ready(Err(e)) => return Err(e),
|
Poll::Ready(Err(err)) => return Err(err),
|
||||||
};
|
};
|
||||||
|
|
||||||
if *state == ChunkedState::End {
|
if *state == ChunkedState::End {
|
||||||
@ -567,15 +563,8 @@ impl Decoder for PayloadDecoder {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use bytes::{Bytes, BytesMut};
|
|
||||||
use http::{Method, Version};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{header::SET_COOKIE, HttpMessage as _};
|
||||||
error::ParseError,
|
|
||||||
header::{HeaderName, SET_COOKIE},
|
|
||||||
HttpMessage as _,
|
|
||||||
};
|
|
||||||
|
|
||||||
impl PayloadType {
|
impl PayloadType {
|
||||||
pub(crate) fn unwrap(self) -> PayloadDecoder {
|
pub(crate) fn unwrap(self) -> PayloadDecoder {
|
||||||
|
@ -19,6 +19,13 @@ use tokio::io::{AsyncRead, AsyncWrite};
|
|||||||
use tokio_util::codec::{Decoder as _, Encoder as _};
|
use tokio_util::codec::{Decoder as _, Encoder as _};
|
||||||
use tracing::{error, trace};
|
use tracing::{error, trace};
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
codec::Codec,
|
||||||
|
decoder::MAX_BUFFER_SIZE,
|
||||||
|
payload::{Payload, PayloadSender, PayloadStatus},
|
||||||
|
timer::TimerState,
|
||||||
|
Message, MessageType,
|
||||||
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BodySize, BoxBody, MessageBody},
|
body::{BodySize, BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
@ -27,19 +34,12 @@ use crate::{
|
|||||||
Error, Extensions, OnConnectData, Request, Response, StatusCode,
|
Error, Extensions, OnConnectData, Request, Response, StatusCode,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
|
||||||
codec::Codec,
|
|
||||||
decoder::MAX_BUFFER_SIZE,
|
|
||||||
payload::{Payload, PayloadSender, PayloadStatus},
|
|
||||||
timer::TimerState,
|
|
||||||
Message, MessageType,
|
|
||||||
};
|
|
||||||
|
|
||||||
const LW_BUFFER_SIZE: usize = 1024;
|
const LW_BUFFER_SIZE: usize = 1024;
|
||||||
const HW_BUFFER_SIZE: usize = 1024 * 8;
|
const HW_BUFFER_SIZE: usize = 1024 * 8;
|
||||||
const MAX_PIPELINED_MESSAGES: usize = 16;
|
const MAX_PIPELINED_MESSAGES: usize = 16;
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub struct Flags: u8 {
|
pub struct Flags: u8 {
|
||||||
/// Set when stream is read for first time.
|
/// Set when stream is read for first time.
|
||||||
const STARTED = 0b0000_0001;
|
const STARTED = 0b0000_0001;
|
||||||
@ -212,9 +212,7 @@ where
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::None => write!(f, "State::None"),
|
Self::None => write!(f, "State::None"),
|
||||||
Self::ExpectCall { .. } => {
|
Self::ExpectCall { .. } => f.debug_struct("State::ExpectCall").finish_non_exhaustive(),
|
||||||
f.debug_struct("State::ExpectCall").finish_non_exhaustive()
|
|
||||||
}
|
|
||||||
Self::ServiceCall { .. } => {
|
Self::ServiceCall { .. } => {
|
||||||
f.debug_struct("State::ServiceCall").finish_non_exhaustive()
|
f.debug_struct("State::ServiceCall").finish_non_exhaustive()
|
||||||
}
|
}
|
||||||
@ -275,9 +273,7 @@ where
|
|||||||
|
|
||||||
head_timer: TimerState::new(config.client_request_deadline().is_some()),
|
head_timer: TimerState::new(config.client_request_deadline().is_some()),
|
||||||
ka_timer: TimerState::new(config.keep_alive().enabled()),
|
ka_timer: TimerState::new(config.keep_alive().enabled()),
|
||||||
shutdown_timer: TimerState::new(
|
shutdown_timer: TimerState::new(config.client_disconnect_deadline().is_some()),
|
||||||
config.client_disconnect_deadline().is_some(),
|
|
||||||
),
|
|
||||||
|
|
||||||
io: Some(io),
|
io: Some(io),
|
||||||
read_buf: BytesMut::with_capacity(HW_BUFFER_SIZE),
|
read_buf: BytesMut::with_capacity(HW_BUFFER_SIZE),
|
||||||
@ -455,9 +451,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
// return with upgrade request and poll it exclusively
|
// return with upgrade request and poll it exclusively
|
||||||
Some(DispatcherMessage::Upgrade(req)) => {
|
Some(DispatcherMessage::Upgrade(req)) => return Ok(PollResponse::Upgrade(req)),
|
||||||
return Ok(PollResponse::Upgrade(req))
|
|
||||||
}
|
|
||||||
|
|
||||||
// all messages are dealt with
|
// all messages are dealt with
|
||||||
None => {
|
None => {
|
||||||
@ -518,8 +512,10 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
Poll::Ready(Some(Err(err))) => {
|
Poll::Ready(Some(Err(err))) => {
|
||||||
|
let err = err.into();
|
||||||
|
tracing::error!("Response payload stream error: {err:?}");
|
||||||
this.flags.insert(Flags::FINISHED);
|
this.flags.insert(Flags::FINISHED);
|
||||||
return Err(DispatchError::Body(err.into()));
|
return Err(DispatchError::Body(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
Poll::Pending => return Ok(PollResponse::DoNothing),
|
Poll::Pending => return Ok(PollResponse::DoNothing),
|
||||||
@ -555,6 +551,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
Poll::Ready(Some(Err(err))) => {
|
Poll::Ready(Some(Err(err))) => {
|
||||||
|
tracing::error!("Response payload stream error: {err:?}");
|
||||||
this.flags.insert(Flags::FINISHED);
|
this.flags.insert(Flags::FINISHED);
|
||||||
return Err(DispatchError::Body(
|
return Err(DispatchError::Body(
|
||||||
Error::new_body().with_cause(err).into(),
|
Error::new_body().with_cause(err).into(),
|
||||||
@ -674,9 +671,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
unreachable!(
|
unreachable!("State must be set to ServiceCall or ExceptCall in handle_request")
|
||||||
"State must be set to ServiceCall or ExceptCall in handle_request"
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -685,10 +680,7 @@ where
|
|||||||
/// Process one incoming request.
|
/// Process one incoming request.
|
||||||
///
|
///
|
||||||
/// Returns true if any meaningful work was done.
|
/// Returns true if any meaningful work was done.
|
||||||
fn poll_request(
|
fn poll_request(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Result<bool, DispatchError> {
|
||||||
mut self: Pin<&mut Self>,
|
|
||||||
cx: &mut Context<'_>,
|
|
||||||
) -> Result<bool, DispatchError> {
|
|
||||||
let pipeline_queue_full = self.messages.len() >= MAX_PIPELINED_MESSAGES;
|
let pipeline_queue_full = self.messages.len() >= MAX_PIPELINED_MESSAGES;
|
||||||
let can_not_read = !self.can_read(cx);
|
let can_not_read = !self.can_read(cx);
|
||||||
|
|
||||||
@ -714,7 +706,7 @@ where
|
|||||||
|
|
||||||
req.head_mut().peer_addr = *this.peer_addr;
|
req.head_mut().peer_addr = *this.peer_addr;
|
||||||
|
|
||||||
req.conn_data = this.conn_data.as_ref().map(Rc::clone);
|
req.conn_data.clone_from(this.conn_data);
|
||||||
|
|
||||||
match this.codec.message_type() {
|
match this.codec.message_type() {
|
||||||
// request has no payload
|
// request has no payload
|
||||||
@ -858,10 +850,7 @@ where
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn poll_ka_timer(
|
fn poll_ka_timer(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Result<(), DispatchError> {
|
||||||
mut self: Pin<&mut Self>,
|
|
||||||
cx: &mut Context<'_>,
|
|
||||||
) -> Result<(), DispatchError> {
|
|
||||||
let this = self.as_mut().project();
|
let this = self.as_mut().project();
|
||||||
if let TimerState::Active { timer } = this.ka_timer {
|
if let TimerState::Active { timer } = this.ka_timer {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
@ -926,10 +915,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Poll head, keep-alive, and disconnect timer.
|
/// Poll head, keep-alive, and disconnect timer.
|
||||||
fn poll_timers(
|
fn poll_timers(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Result<(), DispatchError> {
|
||||||
mut self: Pin<&mut Self>,
|
|
||||||
cx: &mut Context<'_>,
|
|
||||||
) -> Result<(), DispatchError> {
|
|
||||||
self.as_mut().poll_head_timer(cx)?;
|
self.as_mut().poll_head_timer(cx)?;
|
||||||
self.as_mut().poll_ka_timer(cx)?;
|
self.as_mut().poll_ka_timer(cx)?;
|
||||||
self.as_mut().poll_shutdown_timer(cx)?;
|
self.as_mut().poll_shutdown_timer(cx)?;
|
||||||
@ -943,10 +929,7 @@ where
|
|||||||
/// - `std::io::ErrorKind::ConnectionReset` after partial read;
|
/// - `std::io::ErrorKind::ConnectionReset` after partial read;
|
||||||
/// - all data read done.
|
/// - all data read done.
|
||||||
#[inline(always)] // TODO: bench this inline
|
#[inline(always)] // TODO: bench this inline
|
||||||
fn read_available(
|
fn read_available(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Result<bool, DispatchError> {
|
||||||
self: Pin<&mut Self>,
|
|
||||||
cx: &mut Context<'_>,
|
|
||||||
) -> Result<bool, DispatchError> {
|
|
||||||
let this = self.project();
|
let this = self.project();
|
||||||
|
|
||||||
if this.flags.contains(Flags::READ_DISCONNECT) {
|
if this.flags.contains(Flags::READ_DISCONNECT) {
|
||||||
|
@ -1,14 +1,11 @@
|
|||||||
use std::{future::Future, str, task::Poll, time::Duration};
|
use std::{future::Future, str, task::Poll, time::Duration};
|
||||||
|
|
||||||
use actix_rt::{pin, time::sleep};
|
|
||||||
use actix_service::fn_service;
|
|
||||||
use actix_utils::future::{ready, Ready};
|
|
||||||
use bytes::Bytes;
|
|
||||||
use futures_util::future::lazy;
|
|
||||||
|
|
||||||
use actix_codec::Framed;
|
use actix_codec::Framed;
|
||||||
use actix_service::Service;
|
use actix_rt::{pin, time::sleep};
|
||||||
use bytes::{Buf, BytesMut};
|
use actix_service::{fn_service, Service};
|
||||||
|
use actix_utils::future::{ready, Ready};
|
||||||
|
use bytes::{Buf, Bytes, BytesMut};
|
||||||
|
use futures_util::future::lazy;
|
||||||
|
|
||||||
use super::dispatcher::{Dispatcher, DispatcherState, DispatcherStateProj, Flags};
|
use super::dispatcher::{Dispatcher, DispatcherState, DispatcherStateProj, Flags};
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -43,8 +40,8 @@ fn status_service(
|
|||||||
fn_service(move |_req: Request| ready(Ok::<_, Error>(Response::new(status))))
|
fn_service(move |_req: Request| ready(Ok::<_, Error>(Response::new(status))))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn echo_path_service(
|
fn echo_path_service() -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error>
|
||||||
) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {
|
{
|
||||||
fn_service(|req: Request| {
|
fn_service(|req: Request| {
|
||||||
let path = req.path().as_bytes();
|
let path = req.path().as_bytes();
|
||||||
ready(Ok::<_, Error>(
|
ready(Ok::<_, Error>(
|
||||||
@ -53,8 +50,8 @@ fn echo_path_service(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn drop_payload_service(
|
fn drop_payload_service() -> impl Service<Request, Response = Response<&'static str>, Error = Error>
|
||||||
) -> impl Service<Request, Response = Response<&'static str>, Error = Error> {
|
{
|
||||||
fn_service(|mut req: Request| async move {
|
fn_service(|mut req: Request| async move {
|
||||||
let _ = req.take_payload();
|
let _ = req.take_payload();
|
||||||
Ok::<_, Error>(Response::with_body(StatusCode::OK, "payload dropped"))
|
Ok::<_, Error>(Response::with_body(StatusCode::OK, "payload dropped"))
|
||||||
@ -932,7 +929,6 @@ fn http_msg(msg: impl AsRef<str>) -> BytesMut {
|
|||||||
.as_ref()
|
.as_ref()
|
||||||
.trim()
|
.trim()
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.into_iter()
|
|
||||||
.map(|line| [line.trim_start(), "\r"].concat())
|
.map(|line| [line.trim_start(), "\r"].concat())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join("\n");
|
.join("\n");
|
||||||
|
@ -17,14 +17,16 @@ mod timer;
|
|||||||
mod upgrade;
|
mod upgrade;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
pub use self::client::{ClientCodec, ClientPayloadCodec};
|
pub use self::{
|
||||||
pub use self::codec::Codec;
|
client::{ClientCodec, ClientPayloadCodec},
|
||||||
pub use self::dispatcher::Dispatcher;
|
codec::Codec,
|
||||||
pub use self::expect::ExpectHandler;
|
dispatcher::Dispatcher,
|
||||||
pub use self::payload::Payload;
|
expect::ExpectHandler,
|
||||||
pub use self::service::{H1Service, H1ServiceHandler};
|
payload::Payload,
|
||||||
pub use self::upgrade::UpgradeHandler;
|
service::{H1Service, H1ServiceHandler},
|
||||||
pub use self::utils::SendResponse;
|
upgrade::UpgradeHandler,
|
||||||
|
utils::SendResponse,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// Codec message
|
/// Codec message
|
||||||
|
@ -117,6 +117,7 @@ impl PayloadSender {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_ref_mut)]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn need_read(&self, cx: &mut Context<'_>) -> PayloadStatus {
|
pub fn need_read(&self, cx: &mut Context<'_>) -> PayloadStatus {
|
||||||
// we check need_read only if Payload (other side) is alive,
|
// we check need_read only if Payload (other side) is alive,
|
||||||
@ -174,7 +175,7 @@ impl Inner {
|
|||||||
|
|
||||||
/// Register future waiting data from payload.
|
/// Register future waiting data from payload.
|
||||||
/// Waker would be used in `Inner::wake`
|
/// Waker would be used in `Inner::wake`
|
||||||
fn register(&mut self, cx: &mut Context<'_>) {
|
fn register(&mut self, cx: &Context<'_>) {
|
||||||
if self
|
if self
|
||||||
.task
|
.task
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@ -186,7 +187,7 @@ impl Inner {
|
|||||||
|
|
||||||
// Register future feeding data to payload.
|
// Register future feeding data to payload.
|
||||||
/// Waker would be used in `Inner::wake_io`
|
/// Waker would be used in `Inner::wake_io`
|
||||||
fn register_io(&mut self, cx: &mut Context<'_>) {
|
fn register_io(&mut self, cx: &Context<'_>) {
|
||||||
if self
|
if self
|
||||||
.io_task
|
.io_task
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@ -221,7 +222,7 @@ impl Inner {
|
|||||||
|
|
||||||
fn poll_next(
|
fn poll_next(
|
||||||
mut self: Pin<&mut Self>,
|
mut self: Pin<&mut Self>,
|
||||||
cx: &mut Context<'_>,
|
cx: &Context<'_>,
|
||||||
) -> Poll<Option<Result<Bytes, PayloadError>>> {
|
) -> Poll<Option<Result<Bytes, PayloadError>>> {
|
||||||
if let Some(data) = self.items.pop_front() {
|
if let Some(data) = self.items.pop_front() {
|
||||||
self.len -= data.len();
|
self.len -= data.len();
|
||||||
|
@ -15,6 +15,7 @@ use actix_utils::future::ready;
|
|||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
|
use super::{codec::Codec, dispatcher::Dispatcher, ExpectHandler, UpgradeHandler};
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
@ -23,8 +24,6 @@ use crate::{
|
|||||||
ConnectCallback, OnConnectData, Request, Response,
|
ConnectCallback, OnConnectData, Request, Response,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{codec::Codec, dispatcher::Dispatcher, ExpectHandler, UpgradeHandler};
|
|
||||||
|
|
||||||
/// `ServiceFactory` implementation for HTTP1 transport
|
/// `ServiceFactory` implementation for HTTP1 transport
|
||||||
pub struct H1Service<T, S, B, X = ExpectHandler, U = UpgradeHandler> {
|
pub struct H1Service<T, S, B, X = ExpectHandler, U = UpgradeHandler> {
|
||||||
srv: S,
|
srv: S,
|
||||||
@ -82,13 +81,8 @@ where
|
|||||||
/// Create simple tcp stream service
|
/// Create simple tcp stream service
|
||||||
pub fn tcp(
|
pub fn tcp(
|
||||||
self,
|
self,
|
||||||
) -> impl ServiceFactory<
|
) -> impl ServiceFactory<TcpStream, Config = (), Response = (), Error = DispatchError, InitError = ()>
|
||||||
TcpStream,
|
{
|
||||||
Config = (),
|
|
||||||
Response = (),
|
|
||||||
Error = DispatchError,
|
|
||||||
InitError = (),
|
|
||||||
> {
|
|
||||||
fn_service(|io: TcpStream| {
|
fn_service(|io: TcpStream| {
|
||||||
let peer_addr = io.peer_addr().ok();
|
let peer_addr = io.peer_addr().ok();
|
||||||
ready(Ok((io, peer_addr)))
|
ready(Ok((io, peer_addr)))
|
||||||
@ -99,8 +93,6 @@ where
|
|||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
mod openssl {
|
mod openssl {
|
||||||
use super::*;
|
|
||||||
|
|
||||||
use actix_tls::accept::{
|
use actix_tls::accept::{
|
||||||
openssl::{
|
openssl::{
|
||||||
reexports::{Error as SslError, SslAcceptor},
|
reexports::{Error as SslError, SslAcceptor},
|
||||||
@ -109,6 +101,8 @@ mod openssl {
|
|||||||
TlsError,
|
TlsError,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
where
|
where
|
||||||
S: ServiceFactory<Request, Config = ()>,
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
@ -134,7 +128,6 @@ mod openssl {
|
|||||||
U::InitError: fmt::Debug,
|
U::InitError: fmt::Debug,
|
||||||
{
|
{
|
||||||
/// Create OpenSSL based service.
|
/// Create OpenSSL based service.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
|
|
||||||
pub fn openssl(
|
pub fn openssl(
|
||||||
self,
|
self,
|
||||||
acceptor: SslAcceptor,
|
acceptor: SslAcceptor,
|
||||||
@ -159,14 +152,13 @@ mod openssl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "rustls")]
|
#[cfg(feature = "rustls-0_20")]
|
||||||
mod rustls {
|
mod rustls_0_20 {
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use actix_service::ServiceFactoryExt as _;
|
use actix_service::ServiceFactoryExt as _;
|
||||||
use actix_tls::accept::{
|
use actix_tls::accept::{
|
||||||
rustls::{reexports::ServerConfig, Acceptor, TlsStream},
|
rustls_0_20::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
TlsError,
|
TlsError,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -196,8 +188,7 @@ mod rustls {
|
|||||||
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
U::InitError: fmt::Debug,
|
U::InitError: fmt::Debug,
|
||||||
{
|
{
|
||||||
/// Create Rustls based service.
|
/// Create Rustls v0.20 based service.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
|
|
||||||
pub fn rustls(
|
pub fn rustls(
|
||||||
self,
|
self,
|
||||||
config: ServerConfig,
|
config: ServerConfig,
|
||||||
@ -222,6 +213,189 @@ mod rustls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_21")]
|
||||||
|
mod rustls_0_21 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_21::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>>,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>>,
|
||||||
|
|
||||||
|
B: MessageBody,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.21 based service.
|
||||||
|
pub fn rustls_021(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_22")]
|
||||||
|
mod rustls_0_22 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>>,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>>,
|
||||||
|
|
||||||
|
B: MessageBody,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.22 based service.
|
||||||
|
pub fn rustls_0_22(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_23")]
|
||||||
|
mod rustls_0_23 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>>,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>>,
|
||||||
|
|
||||||
|
B: MessageBody,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.23 based service.
|
||||||
|
pub fn rustls_0_23(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T, S, B, X, U> H1Service<T, S, B, X, U>
|
impl<T, S, B, X, U> H1Service<T, S, B, X, U>
|
||||||
where
|
where
|
||||||
S: ServiceFactory<Request, Config = ()>,
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
@ -4,7 +4,7 @@ use std::{
|
|||||||
future::Future,
|
future::Future,
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
net,
|
net,
|
||||||
pin::Pin,
|
pin::{pin, Pin},
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
@ -20,7 +20,6 @@ use h2::{
|
|||||||
Ping, PingPong,
|
Ping, PingPong,
|
||||||
};
|
};
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
use tracing::{error, trace, warn};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BodySize, BoxBody, MessageBody},
|
body::{BodySize, BoxBody, MessageBody},
|
||||||
@ -29,7 +28,7 @@ use crate::{
|
|||||||
HeaderName, HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING, UPGRADE,
|
HeaderName, HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING, UPGRADE,
|
||||||
},
|
},
|
||||||
service::HttpFlow,
|
service::HttpFlow,
|
||||||
Extensions, OnConnectData, Payload, Request, Response, ResponseHead,
|
Extensions, Method, OnConnectData, Payload, Request, Response, ResponseHead,
|
||||||
};
|
};
|
||||||
|
|
||||||
const CHUNK_SIZE: usize = 16_384;
|
const CHUNK_SIZE: usize = 16_384;
|
||||||
@ -118,6 +117,7 @@ where
|
|||||||
let payload = crate::h2::Payload::new(body);
|
let payload = crate::h2::Payload::new(body);
|
||||||
let pl = Payload::H2 { payload };
|
let pl = Payload::H2 { payload };
|
||||||
let mut req = Request::with_payload(pl);
|
let mut req = Request::with_payload(pl);
|
||||||
|
let head_req = parts.method == Method::HEAD;
|
||||||
|
|
||||||
let head = req.head_mut();
|
let head = req.head_mut();
|
||||||
head.uri = parts.uri;
|
head.uri = parts.uri;
|
||||||
@ -126,7 +126,7 @@ where
|
|||||||
head.headers = parts.headers.into();
|
head.headers = parts.headers.into();
|
||||||
head.peer_addr = this.peer_addr;
|
head.peer_addr = this.peer_addr;
|
||||||
|
|
||||||
req.conn_data = this.conn_data.as_ref().map(Rc::clone);
|
req.conn_data.clone_from(&this.conn_data);
|
||||||
|
|
||||||
let fut = this.flow.service.call(req);
|
let fut = this.flow.service.call(req);
|
||||||
let config = this.config.clone();
|
let config = this.config.clone();
|
||||||
@ -135,10 +135,10 @@ where
|
|||||||
actix_rt::spawn(async move {
|
actix_rt::spawn(async move {
|
||||||
// resolve service call and send response.
|
// resolve service call and send response.
|
||||||
let res = match fut.await {
|
let res = match fut.await {
|
||||||
Ok(res) => handle_response(res.into(), tx, config).await,
|
Ok(res) => handle_response(res.into(), tx, config, head_req).await,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let res: Response<BoxBody> = err.into();
|
let res: Response<BoxBody> = err.into();
|
||||||
handle_response(res, tx, config).await
|
handle_response(res, tx, config, head_req).await
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -146,11 +146,13 @@ where
|
|||||||
if let Err(err) = res {
|
if let Err(err) = res {
|
||||||
match err {
|
match err {
|
||||||
DispatchError::SendResponse(err) => {
|
DispatchError::SendResponse(err) => {
|
||||||
trace!("Error sending HTTP/2 response: {:?}", err)
|
tracing::trace!("Error sending response: {err:?}");
|
||||||
|
}
|
||||||
|
DispatchError::SendData(err) => {
|
||||||
|
tracing::warn!("Send data error: {err:?}");
|
||||||
}
|
}
|
||||||
DispatchError::SendData(err) => warn!("{:?}", err),
|
|
||||||
DispatchError::ResponseBody(err) => {
|
DispatchError::ResponseBody(err) => {
|
||||||
error!("Response payload stream error: {:?}", err)
|
tracing::error!("Response payload stream error: {err:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -206,6 +208,7 @@ async fn handle_response<B>(
|
|||||||
res: Response<B>,
|
res: Response<B>,
|
||||||
mut tx: SendResponse<Bytes>,
|
mut tx: SendResponse<Bytes>,
|
||||||
config: ServiceConfig,
|
config: ServiceConfig,
|
||||||
|
head_req: bool,
|
||||||
) -> Result<(), DispatchError>
|
) -> Result<(), DispatchError>
|
||||||
where
|
where
|
||||||
B: MessageBody,
|
B: MessageBody,
|
||||||
@ -215,20 +218,20 @@ where
|
|||||||
// prepare response.
|
// prepare response.
|
||||||
let mut size = body.size();
|
let mut size = body.size();
|
||||||
let res = prepare_response(config, res.head(), &mut size);
|
let res = prepare_response(config, res.head(), &mut size);
|
||||||
let eof = size.is_eof();
|
let eof_or_head = size.is_eof() || head_req;
|
||||||
|
|
||||||
// send response head and return on eof.
|
// send response head and return on eof.
|
||||||
let mut stream = tx
|
let mut stream = tx
|
||||||
.send_response(res, eof)
|
.send_response(res, eof_or_head)
|
||||||
.map_err(DispatchError::SendResponse)?;
|
.map_err(DispatchError::SendResponse)?;
|
||||||
|
|
||||||
if eof {
|
if eof_or_head {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// poll response body and send chunks to client
|
let mut body = pin!(body);
|
||||||
actix_rt::pin!(body);
|
|
||||||
|
|
||||||
|
// poll response body and send chunks to client
|
||||||
while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await {
|
while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await {
|
||||||
let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?;
|
let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?;
|
||||||
|
|
||||||
|
@ -23,8 +23,7 @@ use crate::{
|
|||||||
mod dispatcher;
|
mod dispatcher;
|
||||||
mod service;
|
mod service;
|
||||||
|
|
||||||
pub use self::dispatcher::Dispatcher;
|
pub use self::{dispatcher::Dispatcher, service::H2Service};
|
||||||
pub use self::service::H2Service;
|
|
||||||
|
|
||||||
/// HTTP/2 peer stream.
|
/// HTTP/2 peer stream.
|
||||||
pub struct Payload {
|
pub struct Payload {
|
||||||
@ -58,10 +57,7 @@ impl Stream for Payload {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn handshake_with_timeout<T>(
|
pub(crate) fn handshake_with_timeout<T>(io: T, config: &ServiceConfig) -> HandshakeWithTimeout<T>
|
||||||
io: T,
|
|
||||||
config: &ServiceConfig,
|
|
||||||
) -> HandshakeWithTimeout<T>
|
|
||||||
where
|
where
|
||||||
T: AsyncRead + AsyncWrite + Unpin,
|
T: AsyncRead + AsyncWrite + Unpin,
|
||||||
{
|
{
|
||||||
|
@ -16,6 +16,7 @@ use actix_utils::future::ready;
|
|||||||
use futures_core::{future::LocalBoxFuture, ready};
|
use futures_core::{future::LocalBoxFuture, ready};
|
||||||
use tracing::{error, trace};
|
use tracing::{error, trace};
|
||||||
|
|
||||||
|
use super::{dispatcher::Dispatcher, handshake_with_timeout, HandshakeWithTimeout};
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
@ -24,8 +25,6 @@ use crate::{
|
|||||||
ConnectCallback, OnConnectData, Request, Response,
|
ConnectCallback, OnConnectData, Request, Response,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{dispatcher::Dispatcher, handshake_with_timeout, HandshakeWithTimeout};
|
|
||||||
|
|
||||||
/// `ServiceFactory` implementation for HTTP/2 transport
|
/// `ServiceFactory` implementation for HTTP/2 transport
|
||||||
pub struct H2Service<T, S, B> {
|
pub struct H2Service<T, S, B> {
|
||||||
srv: S,
|
srv: S,
|
||||||
@ -117,7 +116,6 @@ mod openssl {
|
|||||||
B: MessageBody + 'static,
|
B: MessageBody + 'static,
|
||||||
{
|
{
|
||||||
/// Create OpenSSL based service.
|
/// Create OpenSSL based service.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
|
|
||||||
pub fn openssl(
|
pub fn openssl(
|
||||||
self,
|
self,
|
||||||
acceptor: SslAcceptor,
|
acceptor: SslAcceptor,
|
||||||
@ -142,8 +140,8 @@ mod openssl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "rustls")]
|
#[cfg(feature = "rustls-0_20")]
|
||||||
mod rustls {
|
mod rustls_0_20 {
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use actix_service::ServiceFactoryExt as _;
|
use actix_service::ServiceFactoryExt as _;
|
||||||
@ -164,8 +162,7 @@ mod rustls {
|
|||||||
|
|
||||||
B: MessageBody + 'static,
|
B: MessageBody + 'static,
|
||||||
{
|
{
|
||||||
/// Create Rustls based service.
|
/// Create Rustls v0.20 based service.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
|
|
||||||
pub fn rustls(
|
pub fn rustls(
|
||||||
self,
|
self,
|
||||||
mut config: ServerConfig,
|
mut config: ServerConfig,
|
||||||
@ -194,6 +191,159 @@ mod rustls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_21")]
|
||||||
|
mod rustls_0_21 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_21::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.21 based service.
|
||||||
|
pub fn rustls_021(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = S::InitError,
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_22")]
|
||||||
|
mod rustls_0_22 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.22 based service.
|
||||||
|
pub fn rustls_0_22(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = S::InitError,
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_23")]
|
||||||
|
mod rustls_0_23 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.23 based service.
|
||||||
|
pub fn rustls_0_23(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = S::InitError,
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B>
|
impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B>
|
||||||
where
|
where
|
||||||
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||||
|
@ -4,6 +4,20 @@
|
|||||||
|
|
||||||
use http::header::HeaderName;
|
use http::header::HeaderName;
|
||||||
|
|
||||||
|
/// Response header field that indicates how caches have handled that response and its corresponding
|
||||||
|
/// request.
|
||||||
|
///
|
||||||
|
/// See [RFC 9211](https://www.rfc-editor.org/rfc/rfc9211) for full semantics.
|
||||||
|
// TODO(breaking): replace with http's version
|
||||||
|
pub const CACHE_STATUS: HeaderName = HeaderName::from_static("cache-status");
|
||||||
|
|
||||||
|
/// Response header field that allows origin servers to control the behavior of CDN caches
|
||||||
|
/// interposed between them and clients separately from other caches that might handle the response.
|
||||||
|
///
|
||||||
|
/// See [RFC 9213](https://www.rfc-editor.org/rfc/rfc9213) for full semantics.
|
||||||
|
// TODO(breaking): replace with http's version
|
||||||
|
pub const CDN_CACHE_CONTROL: HeaderName = HeaderName::from_static("cdn-cache-control");
|
||||||
|
|
||||||
/// Response header that prevents a document from loading any cross-origin resources that don't
|
/// Response header that prevents a document from loading any cross-origin resources that don't
|
||||||
/// explicitly grant the document permission (using [CORP] or [CORS]).
|
/// explicitly grant the document permission (using [CORP] or [CORS]).
|
||||||
///
|
///
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
//! [`TryIntoHeaderPair`] trait and implementations.
|
//! [`TryIntoHeaderPair`] trait and implementations.
|
||||||
|
|
||||||
use std::convert::TryFrom as _;
|
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
Header, HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, TryIntoHeaderValue,
|
Header, HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, TryIntoHeaderValue,
|
||||||
};
|
};
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
//! [`TryIntoHeaderValue`] trait and implementations.
|
//! [`TryIntoHeaderValue`] trait and implementations.
|
||||||
|
|
||||||
use std::convert::TryFrom as _;
|
|
||||||
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use http::{header::InvalidHeaderValue, Error as HttpError, HeaderValue};
|
use http::{header::InvalidHeaderValue, Error as HttpError, HeaderValue};
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
|
@ -636,10 +636,24 @@ impl<'a> IntoIterator for &'a HeaderMap {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert `http::HeaderMap` to our `HeaderMap`.
|
/// Convert a `http::HeaderMap` to our `HeaderMap`.
|
||||||
impl From<http::HeaderMap> for HeaderMap {
|
impl From<http::HeaderMap> for HeaderMap {
|
||||||
fn from(mut map: http::HeaderMap) -> HeaderMap {
|
fn from(mut map: http::HeaderMap) -> Self {
|
||||||
HeaderMap::from_drain(map.drain())
|
Self::from_drain(map.drain())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert our `HeaderMap` to a `http::HeaderMap`.
|
||||||
|
impl From<HeaderMap> for http::HeaderMap {
|
||||||
|
fn from(map: HeaderMap) -> Self {
|
||||||
|
Self::from_iter(map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert our `&HeaderMap` to a `http::HeaderMap`.
|
||||||
|
impl From<&HeaderMap> for http::HeaderMap {
|
||||||
|
fn from(map: &HeaderMap) -> Self {
|
||||||
|
map.to_owned().into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1120,9 +1134,7 @@ mod tests {
|
|||||||
assert!(vals.next().is_none());
|
assert!(vals.next().is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn owned_pair<'a>(
|
fn owned_pair<'a>((name, val): (&'a HeaderName, &'a HeaderValue)) -> (HeaderName, HeaderValue) {
|
||||||
(name, val): (&'a HeaderName, &'a HeaderValue),
|
|
||||||
) -> (HeaderName, HeaderValue) {
|
|
||||||
(name.clone(), val.clone())
|
(name.clone(), val.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,33 +3,30 @@
|
|||||||
// declaring new header consts will yield this error
|
// declaring new header consts will yield this error
|
||||||
#![allow(clippy::declare_interior_mutable_const)]
|
#![allow(clippy::declare_interior_mutable_const)]
|
||||||
|
|
||||||
use percent_encoding::{AsciiSet, CONTROLS};
|
|
||||||
|
|
||||||
// re-export from http except header map related items
|
// re-export from http except header map related items
|
||||||
pub use ::http::header::{
|
pub use ::http::header::{
|
||||||
HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, ToStrError,
|
HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, ToStrError,
|
||||||
};
|
};
|
||||||
|
|
||||||
// re-export const header names, list is explicit so that any updates to `common` module do not
|
// re-export const header names, list is explicit so that any updates to `common` module do not
|
||||||
// conflict with this set
|
// conflict with this set
|
||||||
pub use ::http::header::{
|
pub use ::http::header::{
|
||||||
ACCEPT, ACCEPT_CHARSET, ACCEPT_ENCODING, ACCEPT_LANGUAGE, ACCEPT_RANGES,
|
ACCEPT, ACCEPT_CHARSET, ACCEPT_ENCODING, ACCEPT_LANGUAGE, ACCEPT_RANGES,
|
||||||
ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS,
|
ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS,
|
||||||
ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS,
|
ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_MAX_AGE,
|
||||||
ACCESS_CONTROL_MAX_AGE, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, AGE,
|
ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, AGE, ALLOW, ALT_SVC,
|
||||||
ALLOW, ALT_SVC, AUTHORIZATION, CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION,
|
AUTHORIZATION, CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION, CONTENT_ENCODING,
|
||||||
CONTENT_ENCODING, CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_LOCATION, CONTENT_RANGE,
|
CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_LOCATION, CONTENT_RANGE, CONTENT_SECURITY_POLICY,
|
||||||
CONTENT_SECURITY_POLICY, CONTENT_SECURITY_POLICY_REPORT_ONLY, CONTENT_TYPE, COOKIE, DATE,
|
CONTENT_SECURITY_POLICY_REPORT_ONLY, CONTENT_TYPE, COOKIE, DATE, DNT, ETAG, EXPECT, EXPIRES,
|
||||||
DNT, ETAG, EXPECT, EXPIRES, FORWARDED, FROM, HOST, IF_MATCH, IF_MODIFIED_SINCE,
|
FORWARDED, FROM, HOST, IF_MATCH, IF_MODIFIED_SINCE, IF_NONE_MATCH, IF_RANGE,
|
||||||
IF_NONE_MATCH, IF_RANGE, IF_UNMODIFIED_SINCE, LAST_MODIFIED, LINK, LOCATION, MAX_FORWARDS,
|
IF_UNMODIFIED_SINCE, LAST_MODIFIED, LINK, LOCATION, MAX_FORWARDS, ORIGIN, PRAGMA,
|
||||||
ORIGIN, PRAGMA, PROXY_AUTHENTICATE, PROXY_AUTHORIZATION, PUBLIC_KEY_PINS,
|
PROXY_AUTHENTICATE, PROXY_AUTHORIZATION, PUBLIC_KEY_PINS, PUBLIC_KEY_PINS_REPORT_ONLY, RANGE,
|
||||||
PUBLIC_KEY_PINS_REPORT_ONLY, RANGE, REFERER, REFERRER_POLICY, REFRESH, RETRY_AFTER,
|
REFERER, REFERRER_POLICY, REFRESH, RETRY_AFTER, SEC_WEBSOCKET_ACCEPT, SEC_WEBSOCKET_EXTENSIONS,
|
||||||
SEC_WEBSOCKET_ACCEPT, SEC_WEBSOCKET_EXTENSIONS, SEC_WEBSOCKET_KEY, SEC_WEBSOCKET_PROTOCOL,
|
SEC_WEBSOCKET_KEY, SEC_WEBSOCKET_PROTOCOL, SEC_WEBSOCKET_VERSION, SERVER, SET_COOKIE,
|
||||||
SEC_WEBSOCKET_VERSION, SERVER, SET_COOKIE, STRICT_TRANSPORT_SECURITY, TE, TRAILER,
|
STRICT_TRANSPORT_SECURITY, TE, TRAILER, TRANSFER_ENCODING, UPGRADE, UPGRADE_INSECURE_REQUESTS,
|
||||||
TRANSFER_ENCODING, UPGRADE, UPGRADE_INSECURE_REQUESTS, USER_AGENT, VARY, VIA, WARNING,
|
USER_AGENT, VARY, VIA, WARNING, WWW_AUTHENTICATE, X_CONTENT_TYPE_OPTIONS,
|
||||||
WWW_AUTHENTICATE, X_CONTENT_TYPE_OPTIONS, X_DNS_PREFETCH_CONTROL, X_FRAME_OPTIONS,
|
X_DNS_PREFETCH_CONTROL, X_FRAME_OPTIONS, X_XSS_PROTECTION,
|
||||||
X_XSS_PROTECTION,
|
|
||||||
};
|
};
|
||||||
|
use percent_encoding::{AsciiSet, CONTROLS};
|
||||||
|
|
||||||
use crate::{error::ParseError, HttpMessage};
|
use crate::{error::ParseError, HttpMessage};
|
||||||
|
|
||||||
@ -43,22 +40,22 @@ mod utils;
|
|||||||
|
|
||||||
pub use self::{
|
pub use self::{
|
||||||
as_name::AsHeaderName,
|
as_name::AsHeaderName,
|
||||||
|
// re-export list is explicit so that any updates to `http` do not conflict with this set
|
||||||
|
common::{
|
||||||
|
CACHE_STATUS, CDN_CACHE_CONTROL, CROSS_ORIGIN_EMBEDDER_POLICY, CROSS_ORIGIN_OPENER_POLICY,
|
||||||
|
CROSS_ORIGIN_RESOURCE_POLICY, PERMISSIONS_POLICY, X_FORWARDED_FOR, X_FORWARDED_HOST,
|
||||||
|
X_FORWARDED_PROTO,
|
||||||
|
},
|
||||||
into_pair::TryIntoHeaderPair,
|
into_pair::TryIntoHeaderPair,
|
||||||
into_value::TryIntoHeaderValue,
|
into_value::TryIntoHeaderValue,
|
||||||
map::HeaderMap,
|
map::HeaderMap,
|
||||||
shared::{
|
shared::{
|
||||||
parse_extended_value, q, Charset, ContentEncoding, ExtendedValue, HttpDate,
|
parse_extended_value, q, Charset, ContentEncoding, ExtendedValue, HttpDate, LanguageTag,
|
||||||
LanguageTag, Quality, QualityItem,
|
Quality, QualityItem,
|
||||||
},
|
},
|
||||||
utils::{fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode},
|
utils::{fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode},
|
||||||
};
|
};
|
||||||
|
|
||||||
// re-export list is explicit so that any updates to `http` do not conflict with this set
|
|
||||||
pub use self::common::{
|
|
||||||
CROSS_ORIGIN_EMBEDDER_POLICY, CROSS_ORIGIN_OPENER_POLICY, CROSS_ORIGIN_RESOURCE_POLICY,
|
|
||||||
PERMISSIONS_POLICY, X_FORWARDED_FOR, X_FORWARDED_HOST, X_FORWARDED_PROTO,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// An interface for types that already represent a valid header.
|
/// An interface for types that already represent a valid header.
|
||||||
pub trait Header: TryIntoHeaderValue {
|
pub trait Header: TryIntoHeaderValue {
|
||||||
/// Returns the name of the header field.
|
/// Returns the name of the header field.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::{convert::TryFrom, str::FromStr};
|
use std::str::FromStr;
|
||||||
|
|
||||||
use derive_more::{Display, Error};
|
use derive_more::{Display, Error};
|
||||||
use http::header::InvalidHeaderValue;
|
use http::header::InvalidHeaderValue;
|
||||||
|
@ -24,8 +24,7 @@ impl FromStr for HttpDate {
|
|||||||
|
|
||||||
impl fmt::Display for HttpDate {
|
impl fmt::Display for HttpDate {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let date_str = httpdate::fmt_http_date(self.0);
|
httpdate::HttpDate::from(self.0).fmt(f)
|
||||||
f.write_str(&date_str)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,7 +36,7 @@ impl TryIntoHeaderValue for HttpDate {
|
|||||||
let mut wrt = MutWriter(&mut buf);
|
let mut wrt = MutWriter(&mut buf);
|
||||||
|
|
||||||
// unwrap: date output is known to be well formed and of known length
|
// unwrap: date output is known to be well formed and of known length
|
||||||
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
|
write!(wrt, "{}", self).unwrap();
|
||||||
|
|
||||||
HeaderValue::from_maybe_shared(buf.split().freeze())
|
HeaderValue::from_maybe_shared(buf.split().freeze())
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
//! Originally taken from `hyper::header::shared`.
|
//! Originally taken from `hyper::header::shared`.
|
||||||
|
|
||||||
|
pub use language_tags::LanguageTag;
|
||||||
|
|
||||||
mod charset;
|
mod charset;
|
||||||
mod content_encoding;
|
mod content_encoding;
|
||||||
mod extended;
|
mod extended;
|
||||||
@ -7,10 +9,11 @@ mod http_date;
|
|||||||
mod quality;
|
mod quality;
|
||||||
mod quality_item;
|
mod quality_item;
|
||||||
|
|
||||||
pub use self::charset::Charset;
|
pub use self::{
|
||||||
pub use self::content_encoding::ContentEncoding;
|
charset::Charset,
|
||||||
pub use self::extended::{parse_extended_value, ExtendedValue};
|
content_encoding::ContentEncoding,
|
||||||
pub use self::http_date::HttpDate;
|
extended::{parse_extended_value, ExtendedValue},
|
||||||
pub use self::quality::{q, Quality};
|
http_date::HttpDate,
|
||||||
pub use self::quality_item::QualityItem;
|
quality::{q, Quality},
|
||||||
pub use language_tags::LanguageTag;
|
quality_item::QualityItem,
|
||||||
|
};
|
||||||
|
@ -1,7 +1,4 @@
|
|||||||
use std::{
|
use std::fmt;
|
||||||
convert::{TryFrom, TryInto},
|
|
||||||
fmt,
|
|
||||||
};
|
|
||||||
|
|
||||||
use derive_more::{Display, Error};
|
use derive_more::{Display, Error};
|
||||||
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
use std::{cmp, convert::TryFrom as _, fmt, str};
|
use std::{cmp, fmt, str};
|
||||||
|
|
||||||
use crate::error::ParseError;
|
|
||||||
|
|
||||||
use super::Quality;
|
use super::Quality;
|
||||||
|
use crate::error::ParseError;
|
||||||
|
|
||||||
/// Represents an item with a quality value as defined
|
/// Represents an item with a quality value as defined
|
||||||
/// in [RFC 7231 §5.3.1](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.1).
|
/// in [RFC 7231 §5.3.1](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.1).
|
||||||
|
@ -80,18 +80,18 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn comma_delimited_parsing() {
|
fn comma_delimited_parsing() {
|
||||||
let headers = vec![];
|
let headers = [];
|
||||||
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
|
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
|
||||||
assert_eq!(res, vec![0; 0]);
|
assert_eq!(res, vec![0; 0]);
|
||||||
|
|
||||||
let headers = vec![
|
let headers = [
|
||||||
HeaderValue::from_static("1, 2"),
|
HeaderValue::from_static("1, 2"),
|
||||||
HeaderValue::from_static("3,4"),
|
HeaderValue::from_static("3,4"),
|
||||||
];
|
];
|
||||||
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
|
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
|
||||||
assert_eq!(res, vec![1, 2, 3, 4]);
|
assert_eq!(res, vec![1, 2, 3, 4]);
|
||||||
|
|
||||||
let headers = vec![
|
let headers = [
|
||||||
HeaderValue::from_static(""),
|
HeaderValue::from_static(""),
|
||||||
HeaderValue::from_static(","),
|
HeaderValue::from_static(","),
|
||||||
HeaderValue::from_static(" "),
|
HeaderValue::from_static(" "),
|
||||||
|
@ -61,9 +61,7 @@ pub trait HttpMessage: Sized {
|
|||||||
fn encoding(&self) -> Result<&'static Encoding, ContentTypeError> {
|
fn encoding(&self) -> Result<&'static Encoding, ContentTypeError> {
|
||||||
if let Some(mime_type) = self.mime_type()? {
|
if let Some(mime_type) = self.mime_type()? {
|
||||||
if let Some(charset) = mime_type.get_param("charset") {
|
if let Some(charset) = mime_type.get_param("charset") {
|
||||||
if let Some(enc) =
|
if let Some(enc) = Encoding::for_label_no_replacement(charset.as_str().as_bytes()) {
|
||||||
Encoding::for_label_no_replacement(charset.as_str().as_bytes())
|
|
||||||
{
|
|
||||||
Ok(enc)
|
Ok(enc)
|
||||||
} else {
|
} else {
|
||||||
Err(ContentTypeError::UnknownEncoding)
|
Err(ContentTypeError::UnknownEncoding)
|
||||||
@ -146,7 +144,7 @@ mod tests {
|
|||||||
.finish();
|
.finish();
|
||||||
assert_eq!(req.content_type(), "text/plain");
|
assert_eq!(req.content_type(), "text/plain");
|
||||||
let req = TestRequest::default()
|
let req = TestRequest::default()
|
||||||
.insert_header(("content-type", "application/json; charset=utf=8"))
|
.insert_header(("content-type", "application/json; charset=utf-8"))
|
||||||
.finish();
|
.finish();
|
||||||
assert_eq!(req.content_type(), "application/json");
|
assert_eq!(req.content_type(), "application/json");
|
||||||
let req = TestRequest::default().finish();
|
let req = TestRequest::default().finish();
|
||||||
|
@ -1,11 +1,15 @@
|
|||||||
//! HTTP primitives for the Actix ecosystem.
|
//! HTTP types and services for the Actix ecosystem.
|
||||||
//!
|
//!
|
||||||
//! ## Crate Features
|
//! ## Crate Features
|
||||||
|
//!
|
||||||
//! | Feature | Functionality |
|
//! | Feature | Functionality |
|
||||||
//! | ------------------- | ------------------------------------------- |
|
//! | ------------------- | ------------------------------------------- |
|
||||||
//! | `http2` | HTTP/2 support via [h2]. |
|
//! | `http2` | HTTP/2 support via [h2]. |
|
||||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||||
//! | `rustls` | TLS support via [rustls]. |
|
//! | `rustls` | TLS support via [rustls] 0.20. |
|
||||||
|
//! | `rustls-0_21` | TLS support via [rustls] 0.21. |
|
||||||
|
//! | `rustls-0_22` | TLS support via [rustls] 0.22. |
|
||||||
|
//! | `rustls-0_23` | TLS support via [rustls] 0.23. |
|
||||||
//! | `compress-brotli` | Payload compression support: Brotli. |
|
//! | `compress-brotli` | Payload compression support: Brotli. |
|
||||||
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
||||||
//! | `compress-zstd` | Payload compression support: Zstd. |
|
//! | `compress-zstd` | Payload compression support: Zstd. |
|
||||||
@ -21,15 +25,13 @@
|
|||||||
#![allow(
|
#![allow(
|
||||||
clippy::type_complexity,
|
clippy::type_complexity,
|
||||||
clippy::too_many_arguments,
|
clippy::too_many_arguments,
|
||||||
clippy::borrow_interior_mutable_const,
|
clippy::borrow_interior_mutable_const
|
||||||
clippy::uninlined_format_args
|
|
||||||
)]
|
)]
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
||||||
pub use ::http::{uri, uri::Uri};
|
pub use http::{uri, uri::Uri, Method, StatusCode, Version};
|
||||||
pub use ::http::{Method, StatusCode, Version};
|
|
||||||
|
|
||||||
pub mod body;
|
pub mod body;
|
||||||
mod builder;
|
mod builder;
|
||||||
@ -41,7 +43,6 @@ pub mod error;
|
|||||||
mod extensions;
|
mod extensions;
|
||||||
pub mod h1;
|
pub mod h1;
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
|
|
||||||
pub mod h2;
|
pub mod h2;
|
||||||
pub mod header;
|
pub mod header;
|
||||||
mod helpers;
|
mod helpers;
|
||||||
@ -56,26 +57,32 @@ mod responses;
|
|||||||
mod service;
|
mod service;
|
||||||
pub mod test;
|
pub mod test;
|
||||||
#[cfg(feature = "ws")]
|
#[cfg(feature = "ws")]
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "ws")))]
|
|
||||||
pub mod ws;
|
pub mod ws;
|
||||||
|
|
||||||
pub use self::builder::HttpServiceBuilder;
|
|
||||||
pub use self::config::ServiceConfig;
|
|
||||||
pub use self::error::Error;
|
|
||||||
pub use self::extensions::Extensions;
|
|
||||||
pub use self::header::ContentEncoding;
|
|
||||||
pub use self::http_message::HttpMessage;
|
|
||||||
pub use self::keep_alive::KeepAlive;
|
|
||||||
pub use self::message::ConnectionType;
|
|
||||||
pub use self::message::Message;
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
pub use self::payload::{BoxedPayloadStream, Payload, PayloadStream};
|
pub use self::payload::PayloadStream;
|
||||||
pub use self::requests::{Request, RequestHead, RequestHeadType};
|
#[cfg(any(
|
||||||
pub use self::responses::{Response, ResponseBuilder, ResponseHead};
|
feature = "openssl",
|
||||||
pub use self::service::HttpService;
|
feature = "rustls-0_20",
|
||||||
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
feature = "rustls-0_21",
|
||||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "openssl", feature = "rustls"))))]
|
feature = "rustls-0_22",
|
||||||
|
feature = "rustls-0_23",
|
||||||
|
))]
|
||||||
pub use self::service::TlsAcceptorConfig;
|
pub use self::service::TlsAcceptorConfig;
|
||||||
|
pub use self::{
|
||||||
|
builder::HttpServiceBuilder,
|
||||||
|
config::ServiceConfig,
|
||||||
|
error::Error,
|
||||||
|
extensions::Extensions,
|
||||||
|
header::ContentEncoding,
|
||||||
|
http_message::HttpMessage,
|
||||||
|
keep_alive::KeepAlive,
|
||||||
|
message::{ConnectionType, Message},
|
||||||
|
payload::{BoxedPayloadStream, Payload},
|
||||||
|
requests::{Request, RequestHead, RequestHeadType},
|
||||||
|
responses::{Response, ResponseBuilder, ResponseHead},
|
||||||
|
service::HttpService,
|
||||||
|
};
|
||||||
|
|
||||||
/// A major HTTP protocol version.
|
/// A major HTTP protocol version.
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
|
@ -16,6 +16,7 @@ pub enum ConnectionType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub(crate) struct Flags: u8 {
|
pub(crate) struct Flags: u8 {
|
||||||
const CLOSE = 0b0000_0001;
|
const CLOSE = 0b0000_0001;
|
||||||
const KEEP_ALIVE = 0b0000_0010;
|
const KEEP_ALIVE = 0b0000_0010;
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static NOTIFY_DROPPED: RefCell<Option<bool>> = RefCell::new(None);
|
static NOTIFY_DROPPED: RefCell<Option<bool>> = const { RefCell::new(None) };
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the spawned task is dropped.
|
/// Check if the spawned task is dropped.
|
||||||
|
@ -16,7 +16,10 @@ pub struct RequestHead {
|
|||||||
pub uri: Uri,
|
pub uri: Uri,
|
||||||
pub version: Version,
|
pub version: Version,
|
||||||
pub headers: HeaderMap,
|
pub headers: HeaderMap,
|
||||||
|
|
||||||
|
/// Will only be None when called in unit tests unless set manually.
|
||||||
pub peer_addr: Option<net::SocketAddr>,
|
pub peer_addr: Option<net::SocketAddr>,
|
||||||
|
|
||||||
flags: Flags,
|
flags: Flags,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,5 +3,7 @@
|
|||||||
mod head;
|
mod head;
|
||||||
mod request;
|
mod request;
|
||||||
|
|
||||||
pub use self::head::{RequestHead, RequestHeadType};
|
pub use self::{
|
||||||
pub use self::request::Request;
|
head::{RequestHead, RequestHeadType},
|
||||||
|
request::Request,
|
||||||
|
};
|
||||||
|
@ -10,8 +10,7 @@ use std::{
|
|||||||
use http::{header, Method, Uri, Version};
|
use http::{header, Method, Uri, Version};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
header::HeaderMap, BoxedPayloadStream, Extensions, HttpMessage, Message, Payload,
|
header::HeaderMap, BoxedPayloadStream, Extensions, HttpMessage, Message, Payload, RequestHead,
|
||||||
RequestHead,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// An HTTP request.
|
/// An HTTP request.
|
||||||
@ -174,7 +173,7 @@ impl<P> Request<P> {
|
|||||||
/// Peer address is the directly connected peer's socket address. If a proxy is used in front of
|
/// Peer address is the directly connected peer's socket address. If a proxy is used in front of
|
||||||
/// the Actix Web server, then it would be address of this proxy.
|
/// the Actix Web server, then it would be address of this proxy.
|
||||||
///
|
///
|
||||||
/// Will only return None when called in unit tests.
|
/// Will only return None when called in unit tests unless set manually.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn peer_addr(&self) -> Option<net::SocketAddr> {
|
pub fn peer_addr(&self) -> Option<net::SocketAddr> {
|
||||||
self.head().peer_addr
|
self.head().peer_addr
|
||||||
@ -234,7 +233,6 @@ impl<P> fmt::Debug for Request<P> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use std::convert::TryFrom;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_basics() {
|
fn test_basics() {
|
||||||
|
@ -93,7 +93,7 @@ impl ResponseBuilder {
|
|||||||
Ok((key, value)) => {
|
Ok((key, value)) => {
|
||||||
parts.headers.insert(key, value);
|
parts.headers.insert(key, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,7 +119,7 @@ impl ResponseBuilder {
|
|||||||
if let Some(parts) = self.inner() {
|
if let Some(parts) = self.inner() {
|
||||||
match header.try_into_pair() {
|
match header.try_into_pair() {
|
||||||
Ok((key, value)) => parts.headers.append(key, value),
|
Ok((key, value)) => parts.headers.append(key, value),
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,7 +193,7 @@ impl ResponseBuilder {
|
|||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
parts.headers.insert(header::CONTENT_TYPE, value);
|
parts.headers.insert(header::CONTENT_TYPE, value);
|
||||||
}
|
}
|
||||||
Err(e) => self.err = Some(e.into()),
|
Err(err) => self.err = Some(err.into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
|
@ -5,7 +5,5 @@ mod head;
|
|||||||
#[allow(clippy::module_inception)]
|
#[allow(clippy::module_inception)]
|
||||||
mod response;
|
mod response;
|
||||||
|
|
||||||
pub use self::builder::ResponseBuilder;
|
|
||||||
pub(crate) use self::head::BoxedResponseHead;
|
pub(crate) use self::head::BoxedResponseHead;
|
||||||
pub use self::head::ResponseHead;
|
pub use self::{builder::ResponseBuilder, head::ResponseHead, response::Response};
|
||||||
pub use self::response::Response;
|
|
||||||
|
@ -30,9 +30,9 @@ use crate::{
|
|||||||
///
|
///
|
||||||
/// # Automatic HTTP Version Selection
|
/// # Automatic HTTP Version Selection
|
||||||
/// There are two ways to select the HTTP version of an incoming connection:
|
/// There are two ways to select the HTTP version of an incoming connection:
|
||||||
/// - One is to rely on the ALPN information that is provided when using a TLS (HTTPS); both
|
/// - One is to rely on the ALPN information that is provided when using TLS (HTTPS); both versions
|
||||||
/// versions are supported automatically when using either of the `.rustls()` or `.openssl()`
|
/// are supported automatically when using either of the `.rustls()` or `.openssl()` finalizing
|
||||||
/// finalizing methods.
|
/// methods.
|
||||||
/// - The other is to read the first few bytes of the TCP stream. This is the only viable approach
|
/// - The other is to read the first few bytes of the TCP stream. This is the only viable approach
|
||||||
/// for supporting H2C, which allows the HTTP/2 protocol to work over plaintext connections. Use
|
/// for supporting H2C, which allows the HTTP/2 protocol to work over plaintext connections. Use
|
||||||
/// the `.tcp_auto_h2c()` finalizing method to enable this behavior.
|
/// the `.tcp_auto_h2c()` finalizing method to enable this behavior.
|
||||||
@ -200,13 +200,8 @@ where
|
|||||||
/// The resulting service only supports HTTP/1.x.
|
/// The resulting service only supports HTTP/1.x.
|
||||||
pub fn tcp(
|
pub fn tcp(
|
||||||
self,
|
self,
|
||||||
) -> impl ServiceFactory<
|
) -> impl ServiceFactory<TcpStream, Config = (), Response = (), Error = DispatchError, InitError = ()>
|
||||||
TcpStream,
|
{
|
||||||
Config = (),
|
|
||||||
Response = (),
|
|
||||||
Error = DispatchError,
|
|
||||||
InitError = (),
|
|
||||||
> {
|
|
||||||
fn_service(|io: TcpStream| async {
|
fn_service(|io: TcpStream| async {
|
||||||
let peer_addr = io.peer_addr().ok();
|
let peer_addr = io.peer_addr().ok();
|
||||||
Ok((io, Protocol::Http1, peer_addr))
|
Ok((io, Protocol::Http1, peer_addr))
|
||||||
@ -217,16 +212,10 @@ where
|
|||||||
/// Creates TCP stream service from HTTP service that automatically selects HTTP/1.x or HTTP/2
|
/// Creates TCP stream service from HTTP service that automatically selects HTTP/1.x or HTTP/2
|
||||||
/// on plaintext connections.
|
/// on plaintext connections.
|
||||||
#[cfg(feature = "http2")]
|
#[cfg(feature = "http2")]
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "http2")))]
|
|
||||||
pub fn tcp_auto_h2c(
|
pub fn tcp_auto_h2c(
|
||||||
self,
|
self,
|
||||||
) -> impl ServiceFactory<
|
) -> impl ServiceFactory<TcpStream, Config = (), Response = (), Error = DispatchError, InitError = ()>
|
||||||
TcpStream,
|
{
|
||||||
Config = (),
|
|
||||||
Response = (),
|
|
||||||
Error = DispatchError,
|
|
||||||
InitError = (),
|
|
||||||
> {
|
|
||||||
fn_service(move |io: TcpStream| async move {
|
fn_service(move |io: TcpStream| async move {
|
||||||
// subset of HTTP/2 preface defined by RFC 9113 §3.4
|
// subset of HTTP/2 preface defined by RFC 9113 §3.4
|
||||||
// this subset was chosen to maximize likelihood that peeking only once will allow us to
|
// this subset was chosen to maximize likelihood that peeking only once will allow us to
|
||||||
@ -252,14 +241,25 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Configuration options used when accepting TLS connection.
|
/// Configuration options used when accepting TLS connection.
|
||||||
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
#[cfg(any(
|
||||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "openssl", feature = "rustls"))))]
|
feature = "openssl",
|
||||||
|
feature = "rustls-0_20",
|
||||||
|
feature = "rustls-0_21",
|
||||||
|
feature = "rustls-0_22",
|
||||||
|
feature = "rustls-0_23",
|
||||||
|
))]
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct TlsAcceptorConfig {
|
pub struct TlsAcceptorConfig {
|
||||||
pub(crate) handshake_timeout: Option<std::time::Duration>,
|
pub(crate) handshake_timeout: Option<std::time::Duration>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(feature = "openssl", feature = "rustls"))]
|
#[cfg(any(
|
||||||
|
feature = "openssl",
|
||||||
|
feature = "rustls-0_20",
|
||||||
|
feature = "rustls-0_21",
|
||||||
|
feature = "rustls-0_22",
|
||||||
|
feature = "rustls-0_23",
|
||||||
|
))]
|
||||||
impl TlsAcceptorConfig {
|
impl TlsAcceptorConfig {
|
||||||
/// Set TLS handshake timeout duration.
|
/// Set TLS handshake timeout duration.
|
||||||
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
|
pub fn handshake_timeout(self, dur: std::time::Duration) -> Self {
|
||||||
@ -309,7 +309,6 @@ mod openssl {
|
|||||||
U::InitError: fmt::Debug,
|
U::InitError: fmt::Debug,
|
||||||
{
|
{
|
||||||
/// Create OpenSSL based service.
|
/// Create OpenSSL based service.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
|
|
||||||
pub fn openssl(
|
pub fn openssl(
|
||||||
self,
|
self,
|
||||||
acceptor: SslAcceptor,
|
acceptor: SslAcceptor,
|
||||||
@ -324,7 +323,6 @@ mod openssl {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create OpenSSL based service with custom TLS acceptor configuration.
|
/// Create OpenSSL based service with custom TLS acceptor configuration.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "openssl")))]
|
|
||||||
pub fn openssl_with_config(
|
pub fn openssl_with_config(
|
||||||
self,
|
self,
|
||||||
acceptor: SslAcceptor,
|
acceptor: SslAcceptor,
|
||||||
@ -366,13 +364,13 @@ mod openssl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "rustls")]
|
#[cfg(feature = "rustls-0_20")]
|
||||||
mod rustls {
|
mod rustls_0_20 {
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use actix_service::ServiceFactoryExt as _;
|
use actix_service::ServiceFactoryExt as _;
|
||||||
use actix_tls::accept::{
|
use actix_tls::accept::{
|
||||||
rustls::{reexports::ServerConfig, Acceptor, TlsStream},
|
rustls_0_20::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
TlsError,
|
TlsError,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -403,8 +401,7 @@ mod rustls {
|
|||||||
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
U::InitError: fmt::Debug,
|
U::InitError: fmt::Debug,
|
||||||
{
|
{
|
||||||
/// Create Rustls based service.
|
/// Create Rustls v0.20 based service.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
|
|
||||||
pub fn rustls(
|
pub fn rustls(
|
||||||
self,
|
self,
|
||||||
config: ServerConfig,
|
config: ServerConfig,
|
||||||
@ -418,8 +415,7 @@ mod rustls {
|
|||||||
self.rustls_with_config(config, TlsAcceptorConfig::default())
|
self.rustls_with_config(config, TlsAcceptorConfig::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create Rustls based service with custom TLS acceptor configuration.
|
/// Create Rustls v0.20 based service with custom TLS acceptor configuration.
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "rustls")))]
|
|
||||||
pub fn rustls_with_config(
|
pub fn rustls_with_config(
|
||||||
self,
|
self,
|
||||||
mut config: ServerConfig,
|
mut config: ServerConfig,
|
||||||
@ -464,6 +460,294 @@ mod rustls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_21")]
|
||||||
|
mod rustls_0_21 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_21::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, h1::Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.21 based service.
|
||||||
|
pub fn rustls_021(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
self.rustls_021_with_config(config, TlsAcceptorConfig::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create Rustls v0.21 based service with custom TLS acceptor configuration.
|
||||||
|
pub fn rustls_021_with_config(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
tls_acceptor_config: TlsAcceptorConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
let mut acceptor = Acceptor::new(config);
|
||||||
|
|
||||||
|
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
|
||||||
|
acceptor.set_handshake_timeout(handshake_timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
acceptor
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.and_then(|io: TlsStream<TcpStream>| async {
|
||||||
|
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
|
||||||
|
if protos.windows(2).any(|window| window == b"h2") {
|
||||||
|
Protocol::Http2
|
||||||
|
} else {
|
||||||
|
Protocol::Http1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Protocol::Http1
|
||||||
|
};
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
Ok((io, proto, peer_addr))
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_22")]
|
||||||
|
mod rustls_0_22 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, h1::Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.22 based service.
|
||||||
|
pub fn rustls_0_22(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
self.rustls_0_22_with_config(config, TlsAcceptorConfig::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create Rustls v0.22 based service with custom TLS acceptor configuration.
|
||||||
|
pub fn rustls_0_22_with_config(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
tls_acceptor_config: TlsAcceptorConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
let mut acceptor = Acceptor::new(config);
|
||||||
|
|
||||||
|
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
|
||||||
|
acceptor.set_handshake_timeout(handshake_timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
acceptor
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.and_then(|io: TlsStream<TcpStream>| async {
|
||||||
|
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
|
||||||
|
if protos.windows(2).any(|window| window == b"h2") {
|
||||||
|
Protocol::Http2
|
||||||
|
} else {
|
||||||
|
Protocol::Http1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Protocol::Http1
|
||||||
|
};
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
Ok((io, proto, peer_addr))
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_23")]
|
||||||
|
mod rustls_0_23 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, h1::Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.23 based service.
|
||||||
|
pub fn rustls_0_23(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
self.rustls_0_23_with_config(config, TlsAcceptorConfig::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create Rustls v0.23 based service with custom TLS acceptor configuration.
|
||||||
|
pub fn rustls_0_23_with_config(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
tls_acceptor_config: TlsAcceptorConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
let mut acceptor = Acceptor::new(config);
|
||||||
|
|
||||||
|
if let Some(handshake_timeout) = tls_acceptor_config.handshake_timeout {
|
||||||
|
acceptor.set_handshake_timeout(handshake_timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
acceptor
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.and_then(|io: TlsStream<TcpStream>| async {
|
||||||
|
let proto = if let Some(protos) = io.get_ref().1.alpn_protocol() {
|
||||||
|
if protos.windows(2).any(|window| window == b"h2") {
|
||||||
|
Protocol::Http2
|
||||||
|
} else {
|
||||||
|
Protocol::Http1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Protocol::Http1
|
||||||
|
};
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
Ok((io, proto, peer_addr))
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T, S, B, X, U> ServiceFactory<(T, Protocol, Option<net::SocketAddr>)>
|
impl<T, S, B, X, U> ServiceFactory<(T, Protocol, Option<net::SocketAddr>)>
|
||||||
for HttpService<T, S, B, X, U>
|
for HttpService<T, S, B, X, U>
|
||||||
where
|
where
|
||||||
@ -569,10 +853,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn _poll_ready(
|
pub(super) fn _poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Response<BoxBody>>> {
|
||||||
&self,
|
|
||||||
cx: &mut Context<'_>,
|
|
||||||
) -> Poll<Result<(), Response<BoxBody>>> {
|
|
||||||
ready!(self.flow.expect.poll_ready(cx).map_err(Into::into))?;
|
ready!(self.flow.expect.poll_ready(cx).map_err(Into::into))?;
|
||||||
|
|
||||||
ready!(self.flow.service.poll_ready(cx).map_err(Into::into))?;
|
ready!(self.flow.service.poll_ready(cx).map_err(Into::into))?;
|
||||||
@ -631,10 +912,7 @@ where
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call(
|
fn call(&self, (io, proto, peer_addr): (T, Protocol, Option<net::SocketAddr>)) -> Self::Future {
|
||||||
&self,
|
|
||||||
(io, proto, peer_addr): (T, Protocol, Option<net::SocketAddr>),
|
|
||||||
) -> Self::Future {
|
|
||||||
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
|
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
|
||||||
|
|
||||||
match proto {
|
match proto {
|
||||||
|
@ -74,6 +74,7 @@ pub struct Codec {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
struct Flags: u8 {
|
struct Flags: u8 {
|
||||||
const SERVER = 0b0000_0001;
|
const SERVER = 0b0000_0001;
|
||||||
const CONTINUATION = 0b0000_0010;
|
const CONTINUATION = 0b0000_0010;
|
||||||
@ -295,7 +296,7 @@ impl Decoder for Codec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(None) => Ok(None),
|
Ok(None) => Ok(None),
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,15 +70,14 @@ mod inner {
|
|||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use actix_codec::Framed;
|
||||||
use actix_service::{IntoService, Service};
|
use actix_service::{IntoService, Service};
|
||||||
use futures_core::stream::Stream;
|
use futures_core::stream::Stream;
|
||||||
use local_channel::mpsc;
|
use local_channel::mpsc;
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use actix_codec::Framed;
|
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
use tokio_util::codec::{Decoder, Encoder};
|
use tokio_util::codec::{Decoder, Encoder};
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::{body::BoxBody, Response};
|
use crate::{body::BoxBody, Response};
|
||||||
|
|
||||||
@ -413,9 +412,7 @@ mod inner {
|
|||||||
}
|
}
|
||||||
State::Error(_) => {
|
State::Error(_) => {
|
||||||
// flush write buffer
|
// flush write buffer
|
||||||
if !this.framed.is_write_buf_empty()
|
if !this.framed.is_write_buf_empty() && this.framed.flush(cx).is_pending() {
|
||||||
&& this.framed.flush(cx).is_pending()
|
|
||||||
{
|
|
||||||
return Poll::Pending;
|
return Poll::Pending;
|
||||||
}
|
}
|
||||||
Poll::Ready(Err(this.state.take_error()))
|
Poll::Ready(Err(this.state.take_error()))
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::convert::TryFrom;
|
use std::cmp::min;
|
||||||
|
|
||||||
use bytes::{Buf, BufMut, BytesMut};
|
use bytes::{Buf, BufMut, BytesMut};
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
@ -96,6 +96,10 @@ impl Parser {
|
|||||||
|
|
||||||
// not enough data
|
// not enough data
|
||||||
if src.len() < idx + length {
|
if src.len() < idx + length {
|
||||||
|
let min_length = min(length, max_size);
|
||||||
|
if src.capacity() < idx + min_length {
|
||||||
|
src.reserve(idx + min_length - src.capacity());
|
||||||
|
}
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,14 +178,14 @@ impl Parser {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if payload_len < 126 {
|
if payload_len < 126 {
|
||||||
dst.reserve(p_len + 2 + if mask { 4 } else { 0 });
|
dst.reserve(p_len + 2);
|
||||||
dst.put_slice(&[one, two | payload_len as u8]);
|
dst.put_slice(&[one, two | payload_len as u8]);
|
||||||
} else if payload_len <= 65_535 {
|
} else if payload_len <= 65_535 {
|
||||||
dst.reserve(p_len + 4 + if mask { 4 } else { 0 });
|
dst.reserve(p_len + 4);
|
||||||
dst.put_slice(&[one, two | 126]);
|
dst.put_slice(&[one, two | 126]);
|
||||||
dst.put_u16(payload_len as u16);
|
dst.put_u16(payload_len as u16);
|
||||||
} else {
|
} else {
|
||||||
dst.reserve(p_len + 10 + if mask { 4 } else { 0 });
|
dst.reserve(p_len + 10);
|
||||||
dst.put_slice(&[one, two | 127]);
|
dst.put_slice(&[one, two | 127]);
|
||||||
dst.put_u64(payload_len as u64);
|
dst.put_u64(payload_len as u64);
|
||||||
};
|
};
|
||||||
@ -217,9 +221,10 @@ impl Parser {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
struct F {
|
struct F {
|
||||||
finished: bool,
|
finished: bool,
|
||||||
opcode: OpCode,
|
opcode: OpCode,
|
||||||
|
@ -50,7 +50,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_apply_mask() {
|
fn test_apply_mask() {
|
||||||
let mask = [0x6d, 0xb6, 0xb2, 0x80];
|
let mask = [0x6d, 0xb6, 0xb2, 0x80];
|
||||||
let unmasked = vec![
|
let unmasked = [
|
||||||
0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0x74, 0xf9,
|
0xf3, 0x00, 0x01, 0x02, 0x03, 0x80, 0x81, 0x82, 0xff, 0xfe, 0x00, 0x17, 0x74, 0xf9,
|
||||||
0x12, 0x03,
|
0x12, 0x03,
|
||||||
];
|
];
|
||||||
|
@ -8,8 +8,7 @@ use std::io;
|
|||||||
use derive_more::{Display, Error, From};
|
use derive_more::{Display, Error, From};
|
||||||
use http::{header, Method, StatusCode};
|
use http::{header, Method, StatusCode};
|
||||||
|
|
||||||
use crate::body::BoxBody;
|
use crate::{body::BoxBody, header::HeaderValue, RequestHead, Response, ResponseBuilder};
|
||||||
use crate::{header::HeaderValue, RequestHead, Response, ResponseBuilder};
|
|
||||||
|
|
||||||
mod codec;
|
mod codec;
|
||||||
mod dispatcher;
|
mod dispatcher;
|
||||||
@ -17,48 +16,50 @@ mod frame;
|
|||||||
mod mask;
|
mod mask;
|
||||||
mod proto;
|
mod proto;
|
||||||
|
|
||||||
pub use self::codec::{Codec, Frame, Item, Message};
|
pub use self::{
|
||||||
pub use self::dispatcher::Dispatcher;
|
codec::{Codec, Frame, Item, Message},
|
||||||
pub use self::frame::Parser;
|
dispatcher::Dispatcher,
|
||||||
pub use self::proto::{hash_key, CloseCode, CloseReason, OpCode};
|
frame::Parser,
|
||||||
|
proto::{hash_key, CloseCode, CloseReason, OpCode},
|
||||||
|
};
|
||||||
|
|
||||||
/// WebSocket protocol errors.
|
/// WebSocket protocol errors.
|
||||||
#[derive(Debug, Display, Error, From)]
|
#[derive(Debug, Display, Error, From)]
|
||||||
pub enum ProtocolError {
|
pub enum ProtocolError {
|
||||||
/// Received an unmasked frame from client.
|
/// Received an unmasked frame from client.
|
||||||
#[display(fmt = "Received an unmasked frame from client.")]
|
#[display(fmt = "received an unmasked frame from client")]
|
||||||
UnmaskedFrame,
|
UnmaskedFrame,
|
||||||
|
|
||||||
/// Received a masked frame from server.
|
/// Received a masked frame from server.
|
||||||
#[display(fmt = "Received a masked frame from server.")]
|
#[display(fmt = "received a masked frame from server")]
|
||||||
MaskedFrame,
|
MaskedFrame,
|
||||||
|
|
||||||
/// Encountered invalid opcode.
|
/// Encountered invalid opcode.
|
||||||
#[display(fmt = "Invalid opcode: {}.", _0)]
|
#[display(fmt = "invalid opcode ({})", _0)]
|
||||||
InvalidOpcode(#[error(not(source))] u8),
|
InvalidOpcode(#[error(not(source))] u8),
|
||||||
|
|
||||||
/// Invalid control frame length
|
/// Invalid control frame length
|
||||||
#[display(fmt = "Invalid control frame length: {}.", _0)]
|
#[display(fmt = "invalid control frame length ({})", _0)]
|
||||||
InvalidLength(#[error(not(source))] usize),
|
InvalidLength(#[error(not(source))] usize),
|
||||||
|
|
||||||
/// Bad opcode.
|
/// Bad opcode.
|
||||||
#[display(fmt = "Bad opcode.")]
|
#[display(fmt = "bad opcode")]
|
||||||
BadOpCode,
|
BadOpCode,
|
||||||
|
|
||||||
/// A payload reached size limit.
|
/// A payload reached size limit.
|
||||||
#[display(fmt = "A payload reached size limit.")]
|
#[display(fmt = "payload reached size limit")]
|
||||||
Overflow,
|
Overflow,
|
||||||
|
|
||||||
/// Continuation is not started.
|
/// Continuation has not started.
|
||||||
#[display(fmt = "Continuation is not started.")]
|
#[display(fmt = "continuation has not started")]
|
||||||
ContinuationNotStarted,
|
ContinuationNotStarted,
|
||||||
|
|
||||||
/// Received new continuation but it is already started.
|
/// Received new continuation but it is already started.
|
||||||
#[display(fmt = "Received new continuation but it is already started.")]
|
#[display(fmt = "received new continuation but it has already started")]
|
||||||
ContinuationStarted,
|
ContinuationStarted,
|
||||||
|
|
||||||
/// Unknown continuation fragment.
|
/// Unknown continuation fragment.
|
||||||
#[display(fmt = "Unknown continuation fragment: {}.", _0)]
|
#[display(fmt = "unknown continuation fragment: {}", _0)]
|
||||||
ContinuationFragment(#[error(not(source))] OpCode),
|
ContinuationFragment(#[error(not(source))] OpCode),
|
||||||
|
|
||||||
/// I/O error.
|
/// I/O error.
|
||||||
@ -70,27 +71,27 @@ pub enum ProtocolError {
|
|||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, Error)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, Error)]
|
||||||
pub enum HandshakeError {
|
pub enum HandshakeError {
|
||||||
/// Only get method is allowed.
|
/// Only get method is allowed.
|
||||||
#[display(fmt = "Method not allowed.")]
|
#[display(fmt = "method not allowed")]
|
||||||
GetMethodRequired,
|
GetMethodRequired,
|
||||||
|
|
||||||
/// Upgrade header if not set to WebSocket.
|
/// Upgrade header if not set to WebSocket.
|
||||||
#[display(fmt = "WebSocket upgrade is expected.")]
|
#[display(fmt = "WebSocket upgrade is expected")]
|
||||||
NoWebsocketUpgrade,
|
NoWebsocketUpgrade,
|
||||||
|
|
||||||
/// Connection header is not set to upgrade.
|
/// Connection header is not set to upgrade.
|
||||||
#[display(fmt = "Connection upgrade is expected.")]
|
#[display(fmt = "connection upgrade is expected")]
|
||||||
NoConnectionUpgrade,
|
NoConnectionUpgrade,
|
||||||
|
|
||||||
/// WebSocket version header is not set.
|
/// WebSocket version header is not set.
|
||||||
#[display(fmt = "WebSocket version header is required.")]
|
#[display(fmt = "WebSocket version header is required")]
|
||||||
NoVersionHeader,
|
NoVersionHeader,
|
||||||
|
|
||||||
/// Unsupported WebSocket version.
|
/// Unsupported WebSocket version.
|
||||||
#[display(fmt = "Unsupported WebSocket version.")]
|
#[display(fmt = "unsupported WebSocket version")]
|
||||||
UnsupportedVersion,
|
UnsupportedVersion,
|
||||||
|
|
||||||
/// WebSocket key is not set or wrong.
|
/// WebSocket key is not set or wrong.
|
||||||
#[display(fmt = "Unknown websocket key.")]
|
#[display(fmt = "unknown WebSocket key")]
|
||||||
BadWebsocketKey,
|
BadWebsocketKey,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,10 +220,8 @@ pub fn handshake_response(req: &RequestHead) -> ResponseBuilder {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::{header, Method};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::test::TestRequest;
|
use crate::{header, test::TestRequest};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_handshake() {
|
fn test_handshake() {
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use std::{
|
use std::fmt;
|
||||||
convert::{From, Into},
|
|
||||||
fmt,
|
|
||||||
};
|
|
||||||
|
|
||||||
|
use base64::prelude::*;
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
/// Operation codes defined in [RFC 6455 §11.8].
|
/// Operation codes defined in [RFC 6455 §11.8].
|
||||||
@ -244,7 +242,7 @@ pub fn hash_key(key: &[u8]) -> [u8; 28] {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut hash_b64 = [0; 28];
|
let mut hash_b64 = [0; 28];
|
||||||
let n = base64::encode_config_slice(hash, base64::STANDARD, &mut hash_b64);
|
let n = BASE64_STANDARD.encode_slice(hash, &mut hash_b64).unwrap();
|
||||||
assert_eq!(n, 28);
|
assert_eq!(n, 28);
|
||||||
|
|
||||||
hash_b64
|
hash_b64
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user