mirror of
https://github.com/fafhrd91/actix-web
synced 2025-07-04 01:51:30 +02:00
Compare commits
93 Commits
files-v0.3
...
web-v3.1.0
Author | SHA1 | Date | |
---|---|---|---|
aa11231ee5 | |||
b5812b15f0 | |||
b4e02fe29a | |||
37c76a39ab | |||
60e7e52276 | |||
c53e9468bc | |||
162121bf8d | |||
f7bcad9567 | |||
f9e3f78e45 | |||
1596893ef7 | |||
2a2474ca09 | |||
509b2e6eec | |||
d707704556 | |||
a429ee6646 | |||
7f8073233a | |||
4b4c9d1b93 | |||
3fde3be3d8 | |||
f861508789 | |||
a4546f02d2 | |||
64a2c13cdf | |||
bf53fe5a22 | |||
cf5138e740 | |||
121075c1ef | |||
22089aff87 | |||
7787638f26 | |||
2f6e9738c4 | |||
e39d166a17 | |||
059d1671d7 | |||
3a27580ebe | |||
9d0534999d | |||
c54d73e0bb | |||
9a9d4b182e | |||
4e321595bc | |||
01cbef700f | |||
8497b5f490 | |||
75d86a6beb | |||
3892a95c11 | |||
5802eb797f | |||
ff2ca0f420 | |||
59ad1738e9 | |||
aa2bd6fbfb | |||
5aad8e24c7 | |||
6e97bc09f8 | |||
160995b8d4 | |||
187646b2f9 | |||
46627be36f | |||
a78380739e | |||
cf1c8abe62 | |||
92b5bcd13f | |||
701bdacfa2 | |||
6dc47c4093 | |||
0ec335a39c | |||
f8d5ad6b53 | |||
43c362779d | |||
971ba3eee1 | |||
2fd96c03e5 | |||
ad7c6d2633 | |||
3362a3d61b | |||
769ea6bd5b | |||
1382094c15 | |||
78594a72bd | |||
327e472e44 | |||
e10eb648d9 | |||
a2662b928b | |||
84583799be | |||
08f9a34075 | |||
056803d534 | |||
deab634247 | |||
0b641a2db2 | |||
f2d641b772 | |||
23c8191cca | |||
487f90be5b | |||
fa28175a74 | |||
a70e599ff5 | |||
c11052f826 | |||
73ec01e83b | |||
a7c8533291 | |||
eb0eda69c6 | |||
dc74db1f2f | |||
0ba73fc44c | |||
9af07d66ae | |||
e72ee28232 | |||
4f9a1ac3b7 | |||
6c5c4ea230 | |||
a79450482c | |||
5286b8aed7 | |||
621ebec01a | |||
5e5c8b1c83 | |||
322e7c15d1 | |||
7aa757ad5a | |||
482f74e409 | |||
19967c41cc | |||
8a106c07b4 |
@ -1,41 +0,0 @@
|
||||
environment:
|
||||
global:
|
||||
PROJECT_NAME: actix-web
|
||||
matrix:
|
||||
# Stable channel
|
||||
- TARGET: i686-pc-windows-msvc
|
||||
CHANNEL: stable
|
||||
- TARGET: x86_64-pc-windows-gnu
|
||||
CHANNEL: stable
|
||||
- TARGET: x86_64-pc-windows-msvc
|
||||
CHANNEL: stable
|
||||
# Nightly channel
|
||||
- TARGET: i686-pc-windows-msvc
|
||||
CHANNEL: nightly
|
||||
- TARGET: x86_64-pc-windows-gnu
|
||||
CHANNEL: nightly
|
||||
- TARGET: x86_64-pc-windows-msvc
|
||||
CHANNEL: nightly
|
||||
|
||||
# Install Rust and Cargo
|
||||
# (Based on from https://github.com/rust-lang/libc/blob/master/appveyor.yml)
|
||||
install:
|
||||
- ps: >-
|
||||
If ($Env:TARGET -eq 'x86_64-pc-windows-gnu') {
|
||||
$Env:PATH += ';C:\msys64\mingw64\bin'
|
||||
} ElseIf ($Env:TARGET -eq 'i686-pc-windows-gnu') {
|
||||
$Env:PATH += ';C:\MinGW\bin'
|
||||
}
|
||||
- curl -sSf -o rustup-init.exe https://win.rustup.rs
|
||||
- rustup-init.exe --default-host %TARGET% --default-toolchain %CHANNEL% -y
|
||||
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
|
||||
- rustc -Vv
|
||||
- cargo -V
|
||||
|
||||
# 'cargo test' takes care of building for us, so disable Appveyor's build stage.
|
||||
build: false
|
||||
|
||||
# Equivalent to Travis' `script` phase
|
||||
test_script:
|
||||
- cargo clean
|
||||
- cargo test --no-default-features --features="flate2-rust"
|
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: bug report
|
||||
about: create a bug report
|
||||
name: Bug Report
|
||||
about: Create a bug report.
|
||||
---
|
||||
|
||||
Your issue may already be reported!
|
||||
|
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
<!-- Thanks for considering contributing actix! -->
|
||||
<!-- Please fill out the following to make our reviews easy. -->
|
||||
|
||||
## PR Type
|
||||
<!-- What kind of change does this PR make? -->
|
||||
<!-- Bug Fix / Feature / Refactor / Code Style / Other -->
|
||||
INSERT_PR_TYPE
|
||||
|
||||
|
||||
## PR Checklist
|
||||
Check your PR fulfills the following:
|
||||
|
||||
<!-- For draft PRs check the boxes as you complete them. -->
|
||||
|
||||
- [ ] Tests for the changes have been added / updated.
|
||||
- [ ] Documentation comments have been added / updated.
|
||||
- [ ] A changelog entry has been made for the appropriate packages.
|
||||
- [ ] Format code with the latest stable rustfmt
|
||||
|
||||
|
||||
## Overview
|
||||
<!-- Describe the current and new behavior. -->
|
||||
<!-- Emphasize any breaking changes. -->
|
||||
|
||||
|
||||
<!-- If this PR fixes or closes an issue, reference it here. -->
|
||||
<!-- Closes #000 -->
|
9
.github/workflows/bench.yml
vendored
9
.github/workflows/bench.yml
vendored
@ -1,13 +1,18 @@
|
||||
name: Benchmark (Linux)
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
check_benchmark:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
|
32
.github/workflows/clippy-fmt.yml
vendored
Normal file
32
.github/workflows/clippy-fmt.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
name: Clippy and rustfmt Check
|
||||
jobs:
|
||||
clippy_check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
components: rustfmt
|
||||
override: true
|
||||
- name: Check with rustfmt
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: clippy
|
||||
override: true
|
||||
- name: Check with Clippy
|
||||
uses: actions-rs/clippy-check@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --all-features --all --tests
|
13
.github/workflows/linux.yml
vendored
13
.github/workflows/linux.yml
vendored
@ -1,6 +1,11 @@
|
||||
name: CI (Linux)
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build_and_test:
|
||||
@ -8,7 +13,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- 1.40.0 # MSRV
|
||||
- 1.42.0 # MSRV
|
||||
- stable
|
||||
- nightly
|
||||
|
||||
@ -16,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install ${{ matrix.version }}
|
||||
uses: actions-rs/toolchain@v1
|
||||
@ -55,7 +60,7 @@ jobs:
|
||||
- name: Generate coverage file
|
||||
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'pull_request')
|
||||
run: |
|
||||
cargo install cargo-tarpaulin
|
||||
cargo install cargo-tarpaulin --vers "^0.13"
|
||||
cargo tarpaulin --out Xml
|
||||
- name: Upload to Codecov
|
||||
if: matrix.version == 'stable' && (github.ref == 'refs/heads/master' || github.event_name == 'pull_request')
|
||||
|
9
.github/workflows/macos.yml
vendored
9
.github/workflows/macos.yml
vendored
@ -1,6 +1,11 @@
|
||||
name: CI (macOS)
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build_and_test:
|
||||
@ -15,7 +20,7 @@ jobs:
|
||||
runs-on: macOS-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install ${{ matrix.version }}
|
||||
uses: actions-rs/toolchain@v1
|
||||
|
12
.github/workflows/upload-doc.yml
vendored
12
.github/workflows/upload-doc.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
if: github.repository == 'actix/actix-web'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
@ -29,7 +29,9 @@ jobs:
|
||||
- name: Tweak HTML
|
||||
run: echo "<meta http-equiv=refresh content=0;url=os_balloon/index.html>" > target/doc/index.html
|
||||
|
||||
- name: Upload documentation
|
||||
run: |
|
||||
git clone https://github.com/davisp/ghp-import.git
|
||||
./ghp-import/ghp_import.py -n -p -f -m "Documentation upload" -r https://${{ secrets.GITHUB_TOKEN }}@github.com/"${{ github.repository }}.git" target/doc
|
||||
- name: Deploy to GitHub Pages
|
||||
uses: JamesIves/github-pages-deploy-action@3.5.8
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BRANCH: gh-pages
|
||||
FOLDER: target/doc
|
||||
|
9
.github/workflows/windows.yml
vendored
9
.github/workflows/windows.yml
vendored
@ -1,6 +1,11 @@
|
||||
name: CI (Windows)
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
VCPKGRS_DYNAMIC: 1
|
||||
@ -18,7 +23,7 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install ${{ matrix.version }}
|
||||
uses: actions-rs/toolchain@v1
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -9,6 +9,10 @@ guide/build/
|
||||
*.pid
|
||||
*.sock
|
||||
*~
|
||||
.DS_Store
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# Configuration directory generated by CLion
|
||||
.idea
|
||||
|
106
CHANGES.md
106
CHANGES.md
@ -1,19 +1,109 @@
|
||||
# Changes
|
||||
|
||||
## [3.0.0-alpha.3] - 2020-05-21
|
||||
## Unreleased - 2020-xx-xx
|
||||
|
||||
|
||||
## 3.1.0 - 2020-09-29
|
||||
### Changed
|
||||
* Add `TrailingSlash::MergeOnly` behaviour to `NormalizePath`, which allows `NormalizePath`
|
||||
to retain any trailing slashes. [#1695]
|
||||
* Remove bound `std::marker::Sized` from `web::Data` to support storing `Arc<dyn Trait>`
|
||||
via `web::Data::from` [#1710]
|
||||
|
||||
### Fixed
|
||||
* `ResourceMap` debug printing is no longer infinitely recursive. [#1708]
|
||||
|
||||
[#1695]: https://github.com/actix/actix-web/pull/1695
|
||||
[#1708]: https://github.com/actix/actix-web/pull/1708
|
||||
[#1710]: https://github.com/actix/actix-web/pull/1710
|
||||
|
||||
|
||||
## 3.0.2 - 2020-09-15
|
||||
### Fixed
|
||||
* `NormalizePath` when used with `TrailingSlash::Trim` no longer trims the root path "/". [#1678]
|
||||
|
||||
[#1678]: https://github.com/actix/actix-web/pull/1678
|
||||
|
||||
|
||||
## 3.0.1 - 2020-09-13
|
||||
### Changed
|
||||
* `middleware::normalize::TrailingSlash` enum is now accessible. [#1673]
|
||||
|
||||
[#1673]: https://github.com/actix/actix-web/pull/1673
|
||||
|
||||
|
||||
## 3.0.0 - 2020-09-11
|
||||
* No significant changes from `3.0.0-beta.4`.
|
||||
|
||||
|
||||
## 3.0.0-beta.4 - 2020-09-09
|
||||
### Added
|
||||
* `middleware::NormalizePath` now has configurable behaviour for either always having a trailing
|
||||
slash, or as the new addition, always trimming trailing slashes. [#1639]
|
||||
|
||||
### Changed
|
||||
* Update actix-codec and actix-utils dependencies. [#1634]
|
||||
* `FormConfig` and `JsonConfig` configurations are now also considered when set
|
||||
using `App::data`. [#1641]
|
||||
* `HttpServer::maxconn` is renamed to the more expressive `HttpServer::max_connections`. [#1655]
|
||||
* `HttpServer::maxconnrate` is renamed to the more expressive
|
||||
`HttpServer::max_connection_rate`. [#1655]
|
||||
|
||||
[#1639]: https://github.com/actix/actix-web/pull/1639
|
||||
[#1641]: https://github.com/actix/actix-web/pull/1641
|
||||
[#1634]: https://github.com/actix/actix-web/pull/1634
|
||||
[#1655]: https://github.com/actix/actix-web/pull/1655
|
||||
|
||||
## 3.0.0-beta.3 - 2020-08-17
|
||||
### Changed
|
||||
* Update `rustls` to 0.18
|
||||
|
||||
|
||||
## 3.0.0-beta.2 - 2020-08-17
|
||||
### Changed
|
||||
* `PayloadConfig` is now also considered in `Bytes` and `String` extractors when set
|
||||
using `App::data`. [#1610]
|
||||
* `web::Path` now has a public representation: `web::Path(pub T)` that enables
|
||||
destructuring. [#1594]
|
||||
* `ServiceRequest::app_data` allows retrieval of non-Data data without splitting into parts to
|
||||
access `HttpRequest` which already allows this. [#1618]
|
||||
* Re-export all error types from `awc`. [#1621]
|
||||
* MSRV is now 1.42.0.
|
||||
|
||||
### Fixed
|
||||
* Memory leak of app data in pooled requests. [#1609]
|
||||
|
||||
[#1594]: https://github.com/actix/actix-web/pull/1594
|
||||
[#1609]: https://github.com/actix/actix-web/pull/1609
|
||||
[#1610]: https://github.com/actix/actix-web/pull/1610
|
||||
[#1618]: https://github.com/actix/actix-web/pull/1618
|
||||
[#1621]: https://github.com/actix/actix-web/pull/1621
|
||||
|
||||
|
||||
## 3.0.0-beta.1 - 2020-07-13
|
||||
### Added
|
||||
* Re-export `actix_rt::main` as `actix_web::main`.
|
||||
* `HttpRequest::match_pattern` and `ServiceRequest::match_pattern` for extracting the matched
|
||||
resource pattern.
|
||||
* `HttpRequest::match_name` and `ServiceRequest::match_name` for extracting matched resource name.
|
||||
|
||||
### Changed
|
||||
* Fix actix_http::h1::dispatcher so it returns when HW_BUFFER_SIZE is reached. Should reduce peak memory consumption during large uploads. [#1550]
|
||||
* Migrate cookie handling to `cookie` crate. Actix-web no longer requires `ring` dependency.
|
||||
* MSRV is now 1.41.1
|
||||
|
||||
### Fixed
|
||||
* `NormalizePath` improved consistency when path needs slashes added _and_ removed.
|
||||
|
||||
|
||||
## 3.0.0-alpha.3 - 2020-05-21
|
||||
### Added
|
||||
* Add option to create `Data<T>` from `Arc<T>` [#1509]
|
||||
|
||||
### Changed
|
||||
|
||||
* Resources and Scopes can now access non-overridden data types set on App (or containing scopes) when setting their own data. [#1486]
|
||||
|
||||
* Fix audit issue logging by default peer address [#1485]
|
||||
|
||||
* Bump minimum supported Rust version to 1.40
|
||||
|
||||
* Replace deprecated `net2` crate with `socket2`
|
||||
|
||||
[#1485]: https://github.com/actix/actix-web/pull/1485
|
||||
@ -96,7 +186,7 @@
|
||||
|
||||
### Deleted
|
||||
|
||||
* Delete HttpServer::run(), it is not useful witht async/await
|
||||
* Delete HttpServer::run(), it is not useful with async/await
|
||||
|
||||
## [2.0.0-alpha.3] - 2019-12-07
|
||||
|
||||
@ -141,7 +231,7 @@
|
||||
|
||||
### Changed
|
||||
|
||||
* Make UrlEncodedError::Overflow more informativve
|
||||
* Make UrlEncodedError::Overflow more informative
|
||||
|
||||
* Use actix-testing for testing utils
|
||||
|
||||
@ -159,7 +249,7 @@
|
||||
|
||||
* Re-implement Host predicate (#989)
|
||||
|
||||
* Form immplements Responder, returning a `application/x-www-form-urlencoded` response
|
||||
* Form implements Responder, returning a `application/x-www-form-urlencoded` response
|
||||
|
||||
* Add `into_inner` to `Data`
|
||||
|
||||
|
42
Cargo.toml
42
Cargo.toml
@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "actix-web"
|
||||
version = "3.0.0-alpha.3"
|
||||
version = "3.1.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix web is a simple, pragmatic and extremely fast web framework for Rust."
|
||||
description = "Actix web is a powerful, pragmatic, and extremely fast web framework for Rust."
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
homepage = "https://actix.rs"
|
||||
@ -11,7 +11,7 @@ documentation = "https://docs.rs/actix-web/"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
@ -31,7 +31,6 @@ members = [
|
||||
"awc",
|
||||
"actix-http",
|
||||
"actix-files",
|
||||
"actix-framed",
|
||||
"actix-multipart",
|
||||
"actix-web-actors",
|
||||
"actix-web-codegen",
|
||||
@ -44,7 +43,7 @@ default = ["compress"]
|
||||
# content-encoding support
|
||||
compress = ["actix-http/compress", "awc/compress"]
|
||||
|
||||
# sessions feature, session require "ring" crate and c compiler
|
||||
# sessions feature
|
||||
secure-cookies = ["actix-http/secure-cookies"]
|
||||
|
||||
# openssl
|
||||
@ -66,20 +65,20 @@ name = "test_server"
|
||||
required-features = ["compress"]
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.2.0"
|
||||
actix-service = "1.0.2"
|
||||
actix-utils = "1.0.6"
|
||||
actix-codec = "0.3.0"
|
||||
actix-service = "1.0.6"
|
||||
actix-utils = "2.0.0"
|
||||
actix-router = "0.2.4"
|
||||
actix-rt = "1.0.0"
|
||||
actix-rt = "1.1.1"
|
||||
actix-server = "1.0.0"
|
||||
actix-testing = "1.0.0"
|
||||
actix-macros = "0.1.0"
|
||||
actix-threadpool = "0.3.1"
|
||||
actix-tls = "2.0.0-alpha.1"
|
||||
actix-tls = "2.0.0"
|
||||
|
||||
actix-web-codegen = "0.2.2"
|
||||
actix-http = "2.0.0-alpha.4"
|
||||
awc = { version = "2.0.0-alpha.2", default-features = false }
|
||||
actix-web-codegen = "0.3.0"
|
||||
actix-http = "2.0.0"
|
||||
awc = { version = "2.0.0", default-features = false }
|
||||
|
||||
bytes = "0.5.3"
|
||||
derive_more = "0.99.2"
|
||||
@ -91,19 +90,20 @@ fxhash = "0.2.1"
|
||||
log = "0.4"
|
||||
mime = "0.3"
|
||||
socket2 = "0.3"
|
||||
pin-project = "0.4.6"
|
||||
pin-project = "0.4.17"
|
||||
regex = "1.3"
|
||||
serde = { version = "1.0", features=["derive"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.6.1"
|
||||
time = { version = "0.2.7", default-features = false, features = ["std"] }
|
||||
url = "2.1"
|
||||
open-ssl = { version="0.10", package = "openssl", optional = true }
|
||||
rust-tls = { version = "0.17.0", package = "rustls", optional = true }
|
||||
tinyvec = { version = "0.3", features = ["alloc"] }
|
||||
open-ssl = { package = "openssl", version = "0.10", optional = true }
|
||||
rust-tls = { package = "rustls", version = "0.18.0", optional = true }
|
||||
tinyvec = { version = "1", features = ["alloc"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix = "0.10.0-alpha.1"
|
||||
actix = "0.10.0"
|
||||
actix-http = { version = "2.0.0", features = ["actors"] }
|
||||
rand = "0.7"
|
||||
env_logger = "0.7"
|
||||
serde_derive = "1.0"
|
||||
@ -125,6 +125,10 @@ actix-files = { path = "actix-files" }
|
||||
actix-multipart = { path = "actix-multipart" }
|
||||
awc = { path = "awc" }
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
required-features = ["rustls"]
|
||||
|
||||
[[bench]]
|
||||
name = "server"
|
||||
harness = false
|
||||
|
@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017-NOW Nikolay Kim
|
||||
Copyright 2017-NOW Actix Team
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@ -1,4 +1,4 @@
|
||||
Copyright (c) 2017 Nikolay Kim
|
||||
Copyright (c) 2017 Actix Team
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
|
43
MIGRATION.md
43
MIGRATION.md
@ -1,17 +1,60 @@
|
||||
## Unreleased
|
||||
|
||||
|
||||
## 3.0.0
|
||||
|
||||
* The return type for `ServiceRequest::app_data::<T>()` was changed from returning a `Data<T>` to
|
||||
simply a `T`. To access a `Data<T>` use `ServiceRequest::app_data::<Data<T>>()`.
|
||||
|
||||
* Cookie handling has been offloaded to the `cookie` crate:
|
||||
* `USERINFO_ENCODE_SET` is no longer exposed. Percent-encoding is still supported; check docs.
|
||||
* Some types now require lifetime parameters.
|
||||
|
||||
* The time crate was updated to `v0.2`, a major breaking change to the time crate, which affects
|
||||
any `actix-web` method previously expecting a time v0.1 input.
|
||||
|
||||
* Setting a cookie's SameSite property, explicitly, to `SameSite::None` will now
|
||||
result in `SameSite=None` being sent with the response Set-Cookie header.
|
||||
To create a cookie without a SameSite attribute, remove any calls setting same_site.
|
||||
|
||||
* actix-http support for Actors messages was moved to actix-http crate and is enabled
|
||||
with feature `actors`
|
||||
|
||||
* content_length function is removed from actix-http.
|
||||
You can set Content-Length by normally setting the response body or calling no_chunking function.
|
||||
|
||||
* `BodySize::Sized64` variant has been removed. `BodySize::Sized` now receives a
|
||||
`u64` instead of a `usize`.
|
||||
|
||||
* Code that was using `path.<index>` to access a `web::Path<(A, B, C)>`s elements now needs to use
|
||||
destructuring or `.into_inner()`. For example:
|
||||
|
||||
```rust
|
||||
// Previously:
|
||||
async fn some_route(path: web::Path<(String, String)>) -> String {
|
||||
format!("Hello, {} {}", path.0, path.1)
|
||||
}
|
||||
|
||||
// Now (this also worked before):
|
||||
async fn some_route(path: web::Path<(String, String)>) -> String {
|
||||
let (first_name, last_name) = path.into_inner();
|
||||
format!("Hello, {} {}", first_name, last_name)
|
||||
}
|
||||
// Or (this wasn't previously supported):
|
||||
async fn some_route(web::Path((first_name, last_name)): web::Path<(String, String)>) -> String {
|
||||
format!("Hello, {} {}", first_name, last_name)
|
||||
}
|
||||
```
|
||||
|
||||
* `middleware::NormalizePath` can now also be configured to trim trailing slashes instead of always keeping one.
|
||||
It will need `middleware::normalize::TrailingSlash` when being constructed with `NormalizePath::new(...)`,
|
||||
or for an easier migration you can replace `wrap(middleware::NormalizePath)` with `wrap(middleware::NormalizePath::new(TrailingSlash::MergeOnly))`.
|
||||
|
||||
* `HttpServer::maxconn` is renamed to the more expressive `HttpServer::max_connections`.
|
||||
|
||||
* `HttpServer::maxconnrate` is renamed to the more expressive `HttpServer::max_connection_rate`.
|
||||
|
||||
|
||||
## 2.0.0
|
||||
|
||||
* `HttpServer::start()` renamed to `HttpServer::run()`. It also possible to
|
||||
|
94
README.md
94
README.md
@ -1,50 +1,45 @@
|
||||
<div align="center">
|
||||
<p><h1>Actix web</h1> </p>
|
||||
<p><strong>Actix web is a small, pragmatic, and extremely fast rust web framework</strong> </p>
|
||||
<h1>Actix web</h1>
|
||||
<p>
|
||||
<strong>Actix web is a powerful, pragmatic, and extremely fast web framework for Rust</strong>
|
||||
</p>
|
||||
<p>
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.42.html)
|
||||

|
||||
<br />
|
||||
[](https://travis-ci.org/actix/actix-web)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://docs.rs/actix-web)
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://blog.rust-lang.org/2019/12/19/Rust-1.40.0.html)
|
||||

|
||||
[](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
</p>
|
||||
|
||||
<h3>
|
||||
<a href="https://actix.rs">Website</a>
|
||||
<span> | </span>
|
||||
<a href="https://gitter.im/actix/actix">Chat</a>
|
||||
<span> | </span>
|
||||
<a href="https://github.com/actix/examples">Examples</a>
|
||||
</h3>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
Actix web is a simple, pragmatic and extremely fast web framework for Rust.
|
||||
## Features
|
||||
|
||||
* Supported *HTTP/1.x* and *HTTP/2.0* protocols
|
||||
* Supports *HTTP/1.x* and *HTTP/2*
|
||||
* Streaming and pipelining
|
||||
* Keep-alive and slow requests handling
|
||||
* Client/server [WebSockets](https://actix.rs/docs/websockets/) support
|
||||
* Transparent content compression/decompression (br, gzip, deflate)
|
||||
* Configurable [request routing](https://actix.rs/docs/url-dispatch/)
|
||||
* Powerful [request routing](https://actix.rs/docs/url-dispatch/)
|
||||
* Multipart streams
|
||||
* Static assets
|
||||
* SSL support with OpenSSL or Rustls
|
||||
* SSL support using OpenSSL or Rustls
|
||||
* Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
||||
* Includes an asynchronous [HTTP client](https://actix.rs/actix-web/actix_web/client/index.html)
|
||||
* Includes an async [HTTP client](https://actix.rs/actix-web/actix_web/client/index.html)
|
||||
* Supports [Actix actor framework](https://github.com/actix/actix)
|
||||
* Supports Rust 1.40+
|
||||
* Runs on stable Rust 1.42+
|
||||
|
||||
## Docs
|
||||
## Documentation
|
||||
|
||||
* [API documentation (master)](https://actix.rs/actix-web/actix_web)
|
||||
* [API documentation (docs.rs)](https://docs.rs/actix-web)
|
||||
* [User guide](https://actix.rs)
|
||||
* [Website & User Guide](https://actix.rs)
|
||||
* [Examples Repository](https://github.com/actix/examples)
|
||||
* [API Documentation](https://docs.rs/actix-web)
|
||||
* [API Documentation (master branch)](https://actix.rs/actix-web/actix_web)
|
||||
|
||||
## Example
|
||||
|
||||
@ -52,8 +47,7 @@ Dependencies:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
actix-web = "2"
|
||||
actix-rt = "1"
|
||||
actix-web = "3"
|
||||
```
|
||||
|
||||
Code:
|
||||
@ -62,11 +56,11 @@ Code:
|
||||
use actix_web::{get, web, App, HttpServer, Responder};
|
||||
|
||||
#[get("/{id}/{name}/index.html")]
|
||||
async fn index(info: web::Path<(u32, String)>) -> impl Responder {
|
||||
format!("Hello {}! id:{}", info.1, info.0)
|
||||
async fn index(web::Path((id, name)): web::Path<(u32, String)>) -> impl Responder {
|
||||
format!("Hello {}! id:{}", name, id)
|
||||
}
|
||||
|
||||
#[actix_rt::main]
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
HttpServer::new(|| App::new().service(index))
|
||||
.bind("127.0.0.1:8080")?
|
||||
@ -77,37 +71,39 @@ async fn main() -> std::io::Result<()> {
|
||||
|
||||
### More examples
|
||||
|
||||
* [Basics](https://github.com/actix/examples/tree/master/basics/)
|
||||
* [Stateful](https://github.com/actix/examples/tree/master/state/)
|
||||
* [Multipart streams](https://github.com/actix/examples/tree/master/multipart/)
|
||||
* [Simple websocket](https://github.com/actix/examples/tree/master/websocket/)
|
||||
* [Tera](https://github.com/actix/examples/tree/master/template_tera/)
|
||||
* [Askama](https://github.com/actix/examples/tree/master/template_askama/) templates
|
||||
* [Diesel integration](https://github.com/actix/examples/tree/master/diesel/)
|
||||
* [r2d2](https://github.com/actix/examples/tree/master/r2d2/)
|
||||
* [OpenSSL](https://github.com/actix/examples/tree/master/openssl/)
|
||||
* [Rustls](https://github.com/actix/examples/tree/master/rustls/)
|
||||
* [Tcp/Websocket chat](https://github.com/actix/examples/tree/master/websocket-chat/)
|
||||
* [Json](https://github.com/actix/examples/tree/master/json/)
|
||||
* [Basic Setup](https://github.com/actix/examples/tree/master/basics/)
|
||||
* [Application State](https://github.com/actix/examples/tree/master/state/)
|
||||
* [JSON Handling](https://github.com/actix/examples/tree/master/json/)
|
||||
* [Multipart Streams](https://github.com/actix/examples/tree/master/multipart/)
|
||||
* [Diesel Integration](https://github.com/actix/examples/tree/master/diesel/)
|
||||
* [r2d2 Integration](https://github.com/actix/examples/tree/master/r2d2/)
|
||||
* [Simple WebSocket](https://github.com/actix/examples/tree/master/websocket/)
|
||||
* [Tera Templates](https://github.com/actix/examples/tree/master/template_tera/)
|
||||
* [Askama Templates](https://github.com/actix/examples/tree/master/template_askama/)
|
||||
* [HTTPS using Rustls](https://github.com/actix/examples/tree/master/rustls/)
|
||||
* [HTTPS using OpenSSL](https://github.com/actix/examples/tree/master/openssl/)
|
||||
* [WebSocket Chat](https://github.com/actix/examples/tree/master/websocket-chat/)
|
||||
|
||||
You may consider checking out
|
||||
[this directory](https://github.com/actix/examples/tree/master/) for more examples.
|
||||
|
||||
## Benchmarks
|
||||
|
||||
* [TechEmpower Framework Benchmark](https://www.techempower.com/benchmarks/#section=data-r18)
|
||||
One of the fastest web frameworks available according to the
|
||||
[TechEmpower Framework Benchmark](https://www.techempower.com/benchmarks/#section=data-r19).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under either of
|
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0))
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or [http://opensource.org/licenses/MIT](http://opensource.org/licenses/MIT))
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0))
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
[http://opensource.org/licenses/MIT](http://opensource.org/licenses/MIT))
|
||||
|
||||
at your option.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Contribution to the actix-web crate is organized under the terms of the
|
||||
Contributor Covenant, the maintainer of actix-web, @fafhrd91, promises to
|
||||
intervene to uphold that code of conduct.
|
||||
Contribution to the actix-web crate is organized under the terms of the Contributor Covenant, the
|
||||
maintainers of Actix web, promises to intervene to uphold that code of conduct.
|
||||
|
@ -1,11 +0,0 @@
|
||||
# Cors Middleware for actix web framework [](https://travis-ci.org/actix/actix-web) [](https://codecov.io/gh/actix/actix-web) [](https://crates.io/crates/actix-cors) [](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
**This crate moved to https://github.com/actix/actix-extras.**
|
||||
|
||||
## Documentation & community resources
|
||||
|
||||
* [User Guide](https://actix.rs/docs/)
|
||||
* [API Documentation](https://docs.rs/actix-cors/)
|
||||
* [Chat on gitter](https://gitter.im/actix/actix)
|
||||
* Cargo package: [actix-cors](https://crates.io/crates/actix-cors)
|
||||
* Minimum supported Rust version: 1.34 or later
|
@ -1,7 +1,12 @@
|
||||
# Changes
|
||||
|
||||
## [0.3.0-alpha.1] - 2020-05-23
|
||||
## [Unreleased] - 2020-xx-xx
|
||||
|
||||
## [0.3.0-beta.1] - 2020-07-15
|
||||
* Update `v_htmlescape` to 0.10
|
||||
* Update `actix-web` and `actix-http` dependencies to beta.1
|
||||
|
||||
## [0.3.0-alpha.1] - 2020-05-23
|
||||
* Update `actix-web` and `actix-http` dependencies to alpha
|
||||
* Fix some typos in the docs
|
||||
* Bump minimum supported Rust version to 1.40
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "actix-files"
|
||||
version = "0.3.0-alpha.1"
|
||||
version = "0.3.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Static files support for actix web."
|
||||
readme = "README.md"
|
||||
@ -9,18 +9,16 @@ homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-files/"
|
||||
categories = ["asynchronous", "web-programming::http-server"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
workspace = ".."
|
||||
|
||||
[lib]
|
||||
name = "actix_files"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "3.0.0-alpha.3", default-features = false }
|
||||
actix-http = "2.0.0-alpha.4"
|
||||
actix-service = "1.0.1"
|
||||
actix-web = { version = "3.0.0", default-features = false }
|
||||
actix-service = "1.0.6"
|
||||
bitflags = "1"
|
||||
bytes = "0.5.3"
|
||||
futures-core = { version = "0.3.5", default-features = false }
|
||||
@ -30,8 +28,8 @@ log = "0.4"
|
||||
mime = "0.3"
|
||||
mime_guess = "2.0.1"
|
||||
percent-encoding = "2.1"
|
||||
v_htmlescape = "0.4"
|
||||
v_htmlescape = "0.10"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "1.0.0"
|
||||
actix-web = { version = "3.0.0-alpha.3", features = ["openssl"] }
|
||||
actix-web = { version = "3.0.0", features = ["openssl"] }
|
||||
|
94
actix-files/src/chunked.rs
Normal file
94
actix-files/src/chunked.rs
Normal file
@ -0,0 +1,94 @@
|
||||
use std::{
|
||||
cmp, fmt,
|
||||
fs::File,
|
||||
future::Future,
|
||||
io::{self, Read, Seek},
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use actix_web::{
|
||||
error::{BlockingError, Error},
|
||||
web,
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use futures_core::{ready, Stream};
|
||||
use futures_util::future::{FutureExt, LocalBoxFuture};
|
||||
|
||||
use crate::handle_error;
|
||||
|
||||
type ChunkedBoxFuture =
|
||||
LocalBoxFuture<'static, Result<(File, Bytes), BlockingError<io::Error>>>;
|
||||
|
||||
#[doc(hidden)]
|
||||
/// A helper created from a `std::fs::File` which reads the file
|
||||
/// chunk-by-chunk on a `ThreadPool`.
|
||||
pub struct ChunkedReadFile {
|
||||
pub(crate) size: u64,
|
||||
pub(crate) offset: u64,
|
||||
pub(crate) file: Option<File>,
|
||||
pub(crate) fut: Option<ChunkedBoxFuture>,
|
||||
pub(crate) counter: u64,
|
||||
}
|
||||
|
||||
impl fmt::Debug for ChunkedReadFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("ChunkedReadFile")
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for ChunkedReadFile {
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
if let Some(ref mut fut) = self.fut {
|
||||
return match ready!(Pin::new(fut).poll(cx)) {
|
||||
Ok((file, bytes)) => {
|
||||
self.fut.take();
|
||||
self.file = Some(file);
|
||||
|
||||
self.offset += bytes.len() as u64;
|
||||
self.counter += bytes.len() as u64;
|
||||
|
||||
Poll::Ready(Some(Ok(bytes)))
|
||||
}
|
||||
Err(e) => Poll::Ready(Some(Err(handle_error(e)))),
|
||||
};
|
||||
}
|
||||
|
||||
let size = self.size;
|
||||
let offset = self.offset;
|
||||
let counter = self.counter;
|
||||
|
||||
if size == counter {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
let mut file = self.file.take().expect("Use after completion");
|
||||
|
||||
self.fut = Some(
|
||||
web::block(move || {
|
||||
let max_bytes =
|
||||
cmp::min(size.saturating_sub(counter), 65_536) as usize;
|
||||
|
||||
let mut buf = Vec::with_capacity(max_bytes);
|
||||
file.seek(io::SeekFrom::Start(offset))?;
|
||||
|
||||
let n_bytes =
|
||||
file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
|
||||
|
||||
if n_bytes == 0 {
|
||||
return Err(io::ErrorKind::UnexpectedEof.into());
|
||||
}
|
||||
|
||||
Ok((file, Bytes::from(buf)))
|
||||
})
|
||||
.boxed_local(),
|
||||
);
|
||||
|
||||
self.poll_next(cx)
|
||||
}
|
||||
}
|
||||
}
|
114
actix-files/src/directory.rs
Normal file
114
actix-files/src/directory.rs
Normal file
@ -0,0 +1,114 @@
|
||||
use std::{fmt::Write, fs::DirEntry, io, path::Path, path::PathBuf};
|
||||
|
||||
use actix_web::{dev::ServiceResponse, HttpRequest, HttpResponse};
|
||||
use percent_encoding::{utf8_percent_encode, CONTROLS};
|
||||
use v_htmlescape::escape as escape_html_entity;
|
||||
|
||||
/// A directory; responds with the generated directory listing.
|
||||
#[derive(Debug)]
|
||||
pub struct Directory {
|
||||
/// Base directory.
|
||||
pub base: PathBuf,
|
||||
|
||||
/// Path of subdirectory to generate listing for.
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl Directory {
|
||||
/// Create a new directory
|
||||
pub fn new(base: PathBuf, path: PathBuf) -> Directory {
|
||||
Directory { base, path }
|
||||
}
|
||||
|
||||
/// Is this entry visible from this directory?
|
||||
pub fn is_visible(&self, entry: &io::Result<DirEntry>) -> bool {
|
||||
if let Ok(ref entry) = *entry {
|
||||
if let Some(name) = entry.file_name().to_str() {
|
||||
if name.starts_with('.') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if let Ok(ref md) = entry.metadata() {
|
||||
let ft = md.file_type();
|
||||
return ft.is_dir() || ft.is_file() || ft.is_symlink();
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type DirectoryRenderer =
|
||||
dyn Fn(&Directory, &HttpRequest) -> Result<ServiceResponse, io::Error>;
|
||||
|
||||
// show file url as relative to static path
|
||||
macro_rules! encode_file_url {
|
||||
($path:ident) => {
|
||||
utf8_percent_encode(&$path, CONTROLS)
|
||||
};
|
||||
}
|
||||
|
||||
// " -- " & -- & ' -- ' < -- < > -- > / -- /
|
||||
macro_rules! encode_file_name {
|
||||
($entry:ident) => {
|
||||
escape_html_entity(&$entry.file_name().to_string_lossy())
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) fn directory_listing(
|
||||
dir: &Directory,
|
||||
req: &HttpRequest,
|
||||
) -> Result<ServiceResponse, io::Error> {
|
||||
let index_of = format!("Index of {}", req.path());
|
||||
let mut body = String::new();
|
||||
let base = Path::new(req.path());
|
||||
|
||||
for entry in dir.path.read_dir()? {
|
||||
if dir.is_visible(&entry) {
|
||||
let entry = entry.unwrap();
|
||||
let p = match entry.path().strip_prefix(&dir.path) {
|
||||
Ok(p) if cfg!(windows) => {
|
||||
base.join(p).to_string_lossy().replace("\\", "/")
|
||||
}
|
||||
Ok(p) => base.join(p).to_string_lossy().into_owned(),
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
// if file is a directory, add '/' to the end of the name
|
||||
if let Ok(metadata) = entry.metadata() {
|
||||
if metadata.is_dir() {
|
||||
let _ = write!(
|
||||
body,
|
||||
"<li><a href=\"{}\">{}/</a></li>",
|
||||
encode_file_url!(p),
|
||||
encode_file_name!(entry),
|
||||
);
|
||||
} else {
|
||||
let _ = write!(
|
||||
body,
|
||||
"<li><a href=\"{}\">{}</a></li>",
|
||||
encode_file_url!(p),
|
||||
encode_file_name!(entry),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let html = format!(
|
||||
"<html>\
|
||||
<head><title>{}</title></head>\
|
||||
<body><h1>{}</h1>\
|
||||
<ul>\
|
||||
{}\
|
||||
</ul></body>\n</html>",
|
||||
index_of, index_of, body
|
||||
);
|
||||
Ok(ServiceResponse::new(
|
||||
req.clone(),
|
||||
HttpResponse::Ok()
|
||||
.content_type("text/html; charset=utf-8")
|
||||
.body(html),
|
||||
))
|
||||
}
|
250
actix-files/src/files.rs
Normal file
250
actix-files/src/files.rs
Normal file
@ -0,0 +1,250 @@
|
||||
use std::{cell::RefCell, fmt, io, path::PathBuf, rc::Rc};
|
||||
|
||||
use actix_service::{boxed, IntoServiceFactory, ServiceFactory};
|
||||
use actix_web::{
|
||||
dev::{
|
||||
AppService, HttpServiceFactory, ResourceDef, ServiceRequest, ServiceResponse,
|
||||
},
|
||||
error::Error,
|
||||
guard::Guard,
|
||||
http::header::DispositionType,
|
||||
HttpRequest,
|
||||
};
|
||||
use futures_util::future::{ok, FutureExt, LocalBoxFuture};
|
||||
|
||||
use crate::{
|
||||
directory_listing, named, Directory, DirectoryRenderer, FilesService,
|
||||
HttpNewService, MimeOverride,
|
||||
};
|
||||
|
||||
/// Static files handling service.
|
||||
///
|
||||
/// `Files` service must be registered with `App::service()` method.
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_web::App;
|
||||
/// use actix_files::Files;
|
||||
///
|
||||
/// let app = App::new()
|
||||
/// .service(Files::new("/static", "."));
|
||||
/// ```
|
||||
pub struct Files {
|
||||
path: String,
|
||||
directory: PathBuf,
|
||||
index: Option<String>,
|
||||
show_index: bool,
|
||||
redirect_to_slash: bool,
|
||||
default: Rc<RefCell<Option<Rc<HttpNewService>>>>,
|
||||
renderer: Rc<DirectoryRenderer>,
|
||||
mime_override: Option<Rc<MimeOverride>>,
|
||||
file_flags: named::Flags,
|
||||
guards: Option<Rc<dyn Guard>>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Files {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("Files")
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Files {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
directory: self.directory.clone(),
|
||||
index: self.index.clone(),
|
||||
show_index: self.show_index,
|
||||
redirect_to_slash: self.redirect_to_slash,
|
||||
default: self.default.clone(),
|
||||
renderer: self.renderer.clone(),
|
||||
file_flags: self.file_flags,
|
||||
path: self.path.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
guards: self.guards.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Files {
|
||||
/// Create new `Files` instance for specified base directory.
|
||||
///
|
||||
/// `File` uses `ThreadPool` for blocking filesystem operations.
|
||||
/// By default pool with 5x threads of available cpus is used.
|
||||
/// Pool size can be changed by setting ACTIX_THREADPOOL environment variable.
|
||||
pub fn new<T: Into<PathBuf>>(path: &str, dir: T) -> Files {
|
||||
let orig_dir = dir.into();
|
||||
let dir = match orig_dir.canonicalize() {
|
||||
Ok(canon_dir) => canon_dir,
|
||||
Err(_) => {
|
||||
log::error!("Specified path is not a directory: {:?}", orig_dir);
|
||||
PathBuf::new()
|
||||
}
|
||||
};
|
||||
|
||||
Files {
|
||||
path: path.to_string(),
|
||||
directory: dir,
|
||||
index: None,
|
||||
show_index: false,
|
||||
redirect_to_slash: false,
|
||||
default: Rc::new(RefCell::new(None)),
|
||||
renderer: Rc::new(directory_listing),
|
||||
mime_override: None,
|
||||
file_flags: named::Flags::default(),
|
||||
guards: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Show files listing for directories.
|
||||
///
|
||||
/// By default show files listing is disabled.
|
||||
pub fn show_files_listing(mut self) -> Self {
|
||||
self.show_index = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Redirects to a slash-ended path when browsing a directory.
|
||||
///
|
||||
/// By default never redirect.
|
||||
pub fn redirect_to_slash_directory(mut self) -> Self {
|
||||
self.redirect_to_slash = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set custom directory renderer
|
||||
pub fn files_listing_renderer<F>(mut self, f: F) -> Self
|
||||
where
|
||||
for<'r, 's> F: Fn(&'r Directory, &'s HttpRequest) -> Result<ServiceResponse, io::Error>
|
||||
+ 'static,
|
||||
{
|
||||
self.renderer = Rc::new(f);
|
||||
self
|
||||
}
|
||||
|
||||
/// Specifies mime override callback
|
||||
pub fn mime_override<F>(mut self, f: F) -> Self
|
||||
where
|
||||
F: Fn(&mime::Name<'_>) -> DispositionType + 'static,
|
||||
{
|
||||
self.mime_override = Some(Rc::new(f));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set index file
|
||||
///
|
||||
/// Shows specific index file for directory "/" instead of
|
||||
/// showing files listing.
|
||||
pub fn index_file<T: Into<String>>(mut self, index: T) -> Self {
|
||||
self.index = Some(index.into());
|
||||
self
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Specifies whether to use ETag or not.
|
||||
///
|
||||
/// Default is true.
|
||||
pub fn use_etag(mut self, value: bool) -> Self {
|
||||
self.file_flags.set(named::Flags::ETAG, value);
|
||||
self
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Specifies whether to use Last-Modified or not.
|
||||
///
|
||||
/// Default is true.
|
||||
pub fn use_last_modified(mut self, value: bool) -> Self {
|
||||
self.file_flags.set(named::Flags::LAST_MD, value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Specifies custom guards to use for directory listings and files.
|
||||
///
|
||||
/// Default behaviour allows GET and HEAD.
|
||||
#[inline]
|
||||
pub fn use_guards<G: Guard + 'static>(mut self, guards: G) -> Self {
|
||||
self.guards = Some(Rc::new(guards));
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable `Content-Disposition` header.
|
||||
///
|
||||
/// By default Content-Disposition` header is enabled.
|
||||
#[inline]
|
||||
pub fn disable_content_disposition(mut self) -> Self {
|
||||
self.file_flags.remove(named::Flags::CONTENT_DISPOSITION);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets default handler which is used when no matched file could be found.
|
||||
pub fn default_handler<F, U>(mut self, f: F) -> Self
|
||||
where
|
||||
F: IntoServiceFactory<U>,
|
||||
U: ServiceFactory<
|
||||
Config = (),
|
||||
Request = ServiceRequest,
|
||||
Response = ServiceResponse,
|
||||
Error = Error,
|
||||
> + 'static,
|
||||
{
|
||||
// create and configure default resource
|
||||
self.default = Rc::new(RefCell::new(Some(Rc::new(boxed::factory(
|
||||
f.into_factory().map_init_err(|_| ()),
|
||||
)))));
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpServiceFactory for Files {
|
||||
fn register(self, config: &mut AppService) {
|
||||
if self.default.borrow().is_none() {
|
||||
*self.default.borrow_mut() = Some(config.default_service());
|
||||
}
|
||||
|
||||
let rdef = if config.is_root() {
|
||||
ResourceDef::root_prefix(&self.path)
|
||||
} else {
|
||||
ResourceDef::prefix(&self.path)
|
||||
};
|
||||
|
||||
config.register_service(rdef, None, self, None)
|
||||
}
|
||||
}
|
||||
|
||||
impl ServiceFactory for Files {
|
||||
type Request = ServiceRequest;
|
||||
type Response = ServiceResponse;
|
||||
type Error = Error;
|
||||
type Config = ();
|
||||
type Service = FilesService;
|
||||
type InitError = ();
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
|
||||
|
||||
fn new_service(&self, _: ()) -> Self::Future {
|
||||
let mut srv = FilesService {
|
||||
directory: self.directory.clone(),
|
||||
index: self.index.clone(),
|
||||
show_index: self.show_index,
|
||||
redirect_to_slash: self.redirect_to_slash,
|
||||
default: None,
|
||||
renderer: self.renderer.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
file_flags: self.file_flags,
|
||||
guards: self.guards.clone(),
|
||||
};
|
||||
|
||||
if let Some(ref default) = *self.default.borrow() {
|
||||
default
|
||||
.new_service(())
|
||||
.map(move |result| match result {
|
||||
Ok(default) => {
|
||||
srv.default = Some(default);
|
||||
Ok(srv)
|
||||
}
|
||||
Err(_) => Err(()),
|
||||
})
|
||||
.boxed_local()
|
||||
} else {
|
||||
ok(srv).boxed_local()
|
||||
}
|
||||
}
|
||||
}
|
@ -1,43 +1,52 @@
|
||||
#![allow(clippy::borrow_interior_mutable_const, clippy::type_complexity)]
|
||||
//! Static files support for Actix Web.
|
||||
//!
|
||||
//! Provides a non-blocking service for serving static files from disk.
|
||||
//!
|
||||
//! # Example
|
||||
//! ```rust
|
||||
//! use actix_web::App;
|
||||
//! use actix_files::Files;
|
||||
//!
|
||||
//! let app = App::new()
|
||||
//! .service(Files::new("/static", "."));
|
||||
//! ```
|
||||
//!
|
||||
//! # Implementation Quirks
|
||||
//! - If a filename contains non-ascii characters, that file will be served with the `charset=utf-8`
|
||||
//! extension on the Content-Type header.
|
||||
|
||||
//! Static files support
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Write;
|
||||
use std::fs::{DirEntry, File};
|
||||
use std::future::Future;
|
||||
use std::io::{Read, Seek};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::task::{Context, Poll};
|
||||
use std::{cmp, io};
|
||||
#![deny(rust_2018_idioms)]
|
||||
#![warn(missing_docs, missing_debug_implementations)]
|
||||
|
||||
use actix_service::boxed::{self, BoxService, BoxServiceFactory};
|
||||
use actix_service::{IntoServiceFactory, Service, ServiceFactory};
|
||||
use actix_web::dev::{
|
||||
AppService, HttpServiceFactory, Payload, ResourceDef, ServiceRequest,
|
||||
ServiceResponse,
|
||||
use std::io;
|
||||
|
||||
use actix_service::boxed::{BoxService, BoxServiceFactory};
|
||||
use actix_web::{
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
error::{BlockingError, Error, ErrorInternalServerError},
|
||||
http::header::DispositionType,
|
||||
};
|
||||
use actix_web::error::{BlockingError, Error, ErrorInternalServerError};
|
||||
use actix_web::guard::Guard;
|
||||
use actix_web::http::header::{self, DispositionType};
|
||||
use actix_web::http::Method;
|
||||
use actix_web::{web, FromRequest, HttpRequest, HttpResponse};
|
||||
use bytes::Bytes;
|
||||
use futures_core::Stream;
|
||||
use futures_util::future::{ok, ready, Either, FutureExt, LocalBoxFuture, Ready};
|
||||
use mime;
|
||||
use mime_guess::from_ext;
|
||||
use percent_encoding::{utf8_percent_encode, CONTROLS};
|
||||
use v_htmlescape::escape as escape_html_entity;
|
||||
|
||||
mod chunked;
|
||||
mod directory;
|
||||
mod error;
|
||||
mod files;
|
||||
mod named;
|
||||
mod path_buf;
|
||||
mod range;
|
||||
mod service;
|
||||
|
||||
use self::error::{FilesError, UriSegmentError};
|
||||
pub use crate::chunked::ChunkedReadFile;
|
||||
pub use crate::directory::Directory;
|
||||
pub use crate::files::Files;
|
||||
pub use crate::named::NamedFile;
|
||||
pub use crate::range::HttpRange;
|
||||
pub use crate::service::FilesService;
|
||||
|
||||
use self::directory::{directory_listing, DirectoryRenderer};
|
||||
use self::error::FilesError;
|
||||
use self::path_buf::PathBufWrap;
|
||||
|
||||
type HttpService = BoxService<ServiceRequest, ServiceResponse, Error>;
|
||||
type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Error, ()>;
|
||||
@ -50,610 +59,37 @@ pub fn file_extension_to_mime(ext: &str) -> mime::Mime {
|
||||
from_ext(ext).first_or_octet_stream()
|
||||
}
|
||||
|
||||
fn handle_error(err: BlockingError<io::Error>) -> Error {
|
||||
pub(crate) fn handle_error(err: BlockingError<io::Error>) -> Error {
|
||||
match err {
|
||||
BlockingError::Error(err) => err.into(),
|
||||
BlockingError::Canceled => ErrorInternalServerError("Unexpected error"),
|
||||
}
|
||||
}
|
||||
#[doc(hidden)]
|
||||
/// A helper created from a `std::fs::File` which reads the file
|
||||
/// chunk-by-chunk on a `ThreadPool`.
|
||||
pub struct ChunkedReadFile {
|
||||
size: u64,
|
||||
offset: u64,
|
||||
file: Option<File>,
|
||||
fut:
|
||||
Option<LocalBoxFuture<'static, Result<(File, Bytes), BlockingError<io::Error>>>>,
|
||||
counter: u64,
|
||||
}
|
||||
|
||||
impl Stream for ChunkedReadFile {
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
if let Some(ref mut fut) = self.fut {
|
||||
return match Pin::new(fut).poll(cx) {
|
||||
Poll::Ready(Ok((file, bytes))) => {
|
||||
self.fut.take();
|
||||
self.file = Some(file);
|
||||
self.offset += bytes.len() as u64;
|
||||
self.counter += bytes.len() as u64;
|
||||
Poll::Ready(Some(Ok(bytes)))
|
||||
}
|
||||
Poll::Ready(Err(e)) => Poll::Ready(Some(Err(handle_error(e)))),
|
||||
Poll::Pending => Poll::Pending,
|
||||
};
|
||||
}
|
||||
|
||||
let size = self.size;
|
||||
let offset = self.offset;
|
||||
let counter = self.counter;
|
||||
|
||||
if size == counter {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
let mut file = self.file.take().expect("Use after completion");
|
||||
self.fut = Some(
|
||||
web::block(move || {
|
||||
let max_bytes: usize;
|
||||
max_bytes = cmp::min(size.saturating_sub(counter), 65_536) as usize;
|
||||
let mut buf = Vec::with_capacity(max_bytes);
|
||||
file.seek(io::SeekFrom::Start(offset))?;
|
||||
let nbytes =
|
||||
file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
|
||||
if nbytes == 0 {
|
||||
return Err(io::ErrorKind::UnexpectedEof.into());
|
||||
}
|
||||
Ok((file, Bytes::from(buf)))
|
||||
})
|
||||
.boxed_local(),
|
||||
);
|
||||
self.poll_next(cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type DirectoryRenderer =
|
||||
dyn Fn(&Directory, &HttpRequest) -> Result<ServiceResponse, io::Error>;
|
||||
|
||||
/// A directory; responds with the generated directory listing.
|
||||
#[derive(Debug)]
|
||||
pub struct Directory {
|
||||
/// Base directory
|
||||
pub base: PathBuf,
|
||||
/// Path of subdirectory to generate listing for
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl Directory {
|
||||
/// Create a new directory
|
||||
pub fn new(base: PathBuf, path: PathBuf) -> Directory {
|
||||
Directory { base, path }
|
||||
}
|
||||
|
||||
/// Is this entry visible from this directory?
|
||||
pub fn is_visible(&self, entry: &io::Result<DirEntry>) -> bool {
|
||||
if let Ok(ref entry) = *entry {
|
||||
if let Some(name) = entry.file_name().to_str() {
|
||||
if name.starts_with('.') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if let Ok(ref md) = entry.metadata() {
|
||||
let ft = md.file_type();
|
||||
return ft.is_dir() || ft.is_file() || ft.is_symlink();
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
// show file url as relative to static path
|
||||
macro_rules! encode_file_url {
|
||||
($path:ident) => {
|
||||
utf8_percent_encode(&$path, CONTROLS)
|
||||
};
|
||||
}
|
||||
|
||||
// " -- " & -- & ' -- ' < -- < > -- > / -- /
|
||||
macro_rules! encode_file_name {
|
||||
($entry:ident) => {
|
||||
escape_html_entity(&$entry.file_name().to_string_lossy())
|
||||
};
|
||||
}
|
||||
|
||||
fn directory_listing(
|
||||
dir: &Directory,
|
||||
req: &HttpRequest,
|
||||
) -> Result<ServiceResponse, io::Error> {
|
||||
let index_of = format!("Index of {}", req.path());
|
||||
let mut body = String::new();
|
||||
let base = Path::new(req.path());
|
||||
|
||||
for entry in dir.path.read_dir()? {
|
||||
if dir.is_visible(&entry) {
|
||||
let entry = entry.unwrap();
|
||||
let p = match entry.path().strip_prefix(&dir.path) {
|
||||
Ok(p) if cfg!(windows) => {
|
||||
base.join(p).to_string_lossy().replace("\\", "/")
|
||||
}
|
||||
Ok(p) => base.join(p).to_string_lossy().into_owned(),
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
// if file is a directory, add '/' to the end of the name
|
||||
if let Ok(metadata) = entry.metadata() {
|
||||
if metadata.is_dir() {
|
||||
let _ = write!(
|
||||
body,
|
||||
"<li><a href=\"{}\">{}/</a></li>",
|
||||
encode_file_url!(p),
|
||||
encode_file_name!(entry),
|
||||
);
|
||||
} else {
|
||||
let _ = write!(
|
||||
body,
|
||||
"<li><a href=\"{}\">{}</a></li>",
|
||||
encode_file_url!(p),
|
||||
encode_file_name!(entry),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let html = format!(
|
||||
"<html>\
|
||||
<head><title>{}</title></head>\
|
||||
<body><h1>{}</h1>\
|
||||
<ul>\
|
||||
{}\
|
||||
</ul></body>\n</html>",
|
||||
index_of, index_of, body
|
||||
);
|
||||
Ok(ServiceResponse::new(
|
||||
req.clone(),
|
||||
HttpResponse::Ok()
|
||||
.content_type("text/html; charset=utf-8")
|
||||
.body(html),
|
||||
))
|
||||
}
|
||||
|
||||
type MimeOverride = dyn Fn(&mime::Name) -> DispositionType;
|
||||
|
||||
/// Static files handling
|
||||
///
|
||||
/// `Files` service must be registered with `App::service()` method.
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_web::App;
|
||||
/// use actix_files as fs;
|
||||
///
|
||||
/// fn main() {
|
||||
/// let app = App::new()
|
||||
/// .service(fs::Files::new("/static", "."));
|
||||
/// }
|
||||
/// ```
|
||||
pub struct Files {
|
||||
path: String,
|
||||
directory: PathBuf,
|
||||
index: Option<String>,
|
||||
show_index: bool,
|
||||
redirect_to_slash: bool,
|
||||
default: Rc<RefCell<Option<Rc<HttpNewService>>>>,
|
||||
renderer: Rc<DirectoryRenderer>,
|
||||
mime_override: Option<Rc<MimeOverride>>,
|
||||
file_flags: named::Flags,
|
||||
guards: Option<Rc<Box<dyn Guard>>>,
|
||||
}
|
||||
|
||||
impl Clone for Files {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
directory: self.directory.clone(),
|
||||
index: self.index.clone(),
|
||||
show_index: self.show_index,
|
||||
redirect_to_slash: self.redirect_to_slash,
|
||||
default: self.default.clone(),
|
||||
renderer: self.renderer.clone(),
|
||||
file_flags: self.file_flags,
|
||||
path: self.path.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
guards: self.guards.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Files {
|
||||
/// Create new `Files` instance for specified base directory.
|
||||
///
|
||||
/// `File` uses `ThreadPool` for blocking filesystem operations.
|
||||
/// By default pool with 5x threads of available cpus is used.
|
||||
/// Pool size can be changed by setting ACTIX_THREADPOOL environment variable.
|
||||
pub fn new<T: Into<PathBuf>>(path: &str, dir: T) -> Files {
|
||||
let orig_dir = dir.into();
|
||||
let dir = match orig_dir.canonicalize() {
|
||||
Ok(canon_dir) => canon_dir,
|
||||
Err(_) => {
|
||||
log::error!("Specified path is not a directory: {:?}", orig_dir);
|
||||
PathBuf::new()
|
||||
}
|
||||
};
|
||||
|
||||
Files {
|
||||
path: path.to_string(),
|
||||
directory: dir,
|
||||
index: None,
|
||||
show_index: false,
|
||||
redirect_to_slash: false,
|
||||
default: Rc::new(RefCell::new(None)),
|
||||
renderer: Rc::new(directory_listing),
|
||||
mime_override: None,
|
||||
file_flags: named::Flags::default(),
|
||||
guards: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Show files listing for directories.
|
||||
///
|
||||
/// By default show files listing is disabled.
|
||||
pub fn show_files_listing(mut self) -> Self {
|
||||
self.show_index = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Redirects to a slash-ended path when browsing a directory.
|
||||
///
|
||||
/// By default never redirect.
|
||||
pub fn redirect_to_slash_directory(mut self) -> Self {
|
||||
self.redirect_to_slash = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set custom directory renderer
|
||||
pub fn files_listing_renderer<F>(mut self, f: F) -> Self
|
||||
where
|
||||
for<'r, 's> F: Fn(&'r Directory, &'s HttpRequest) -> Result<ServiceResponse, io::Error>
|
||||
+ 'static,
|
||||
{
|
||||
self.renderer = Rc::new(f);
|
||||
self
|
||||
}
|
||||
|
||||
/// Specifies mime override callback
|
||||
pub fn mime_override<F>(mut self, f: F) -> Self
|
||||
where
|
||||
F: Fn(&mime::Name) -> DispositionType + 'static,
|
||||
{
|
||||
self.mime_override = Some(Rc::new(f));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set index file
|
||||
///
|
||||
/// Shows specific index file for directory "/" instead of
|
||||
/// showing files listing.
|
||||
pub fn index_file<T: Into<String>>(mut self, index: T) -> Self {
|
||||
self.index = Some(index.into());
|
||||
self
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Specifies whether to use ETag or not.
|
||||
///
|
||||
/// Default is true.
|
||||
pub fn use_etag(mut self, value: bool) -> Self {
|
||||
self.file_flags.set(named::Flags::ETAG, value);
|
||||
self
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Specifies whether to use Last-Modified or not.
|
||||
///
|
||||
/// Default is true.
|
||||
pub fn use_last_modified(mut self, value: bool) -> Self {
|
||||
self.file_flags.set(named::Flags::LAST_MD, value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Specifies custom guards to use for directory listings and files.
|
||||
///
|
||||
/// Default behaviour allows GET and HEAD.
|
||||
#[inline]
|
||||
pub fn use_guards<G: Guard + 'static>(mut self, guards: G) -> Self {
|
||||
self.guards = Some(Rc::new(Box::new(guards)));
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable `Content-Disposition` header.
|
||||
///
|
||||
/// By default Content-Disposition` header is enabled.
|
||||
#[inline]
|
||||
pub fn disable_content_disposition(mut self) -> Self {
|
||||
self.file_flags.remove(named::Flags::CONTENT_DISPOSITION);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets default handler which is used when no matched file could be found.
|
||||
pub fn default_handler<F, U>(mut self, f: F) -> Self
|
||||
where
|
||||
F: IntoServiceFactory<U>,
|
||||
U: ServiceFactory<
|
||||
Config = (),
|
||||
Request = ServiceRequest,
|
||||
Response = ServiceResponse,
|
||||
Error = Error,
|
||||
> + 'static,
|
||||
{
|
||||
// create and configure default resource
|
||||
self.default = Rc::new(RefCell::new(Some(Rc::new(boxed::factory(
|
||||
f.into_factory().map_init_err(|_| ()),
|
||||
)))));
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpServiceFactory for Files {
|
||||
fn register(self, config: &mut AppService) {
|
||||
if self.default.borrow().is_none() {
|
||||
*self.default.borrow_mut() = Some(config.default_service());
|
||||
}
|
||||
let rdef = if config.is_root() {
|
||||
ResourceDef::root_prefix(&self.path)
|
||||
} else {
|
||||
ResourceDef::prefix(&self.path)
|
||||
};
|
||||
config.register_service(rdef, None, self, None)
|
||||
}
|
||||
}
|
||||
|
||||
impl ServiceFactory for Files {
|
||||
type Request = ServiceRequest;
|
||||
type Response = ServiceResponse;
|
||||
type Error = Error;
|
||||
type Config = ();
|
||||
type Service = FilesService;
|
||||
type InitError = ();
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Service, Self::InitError>>;
|
||||
|
||||
fn new_service(&self, _: ()) -> Self::Future {
|
||||
let mut srv = FilesService {
|
||||
directory: self.directory.clone(),
|
||||
index: self.index.clone(),
|
||||
show_index: self.show_index,
|
||||
redirect_to_slash: self.redirect_to_slash,
|
||||
default: None,
|
||||
renderer: self.renderer.clone(),
|
||||
mime_override: self.mime_override.clone(),
|
||||
file_flags: self.file_flags,
|
||||
guards: self.guards.clone(),
|
||||
};
|
||||
|
||||
if let Some(ref default) = *self.default.borrow() {
|
||||
default
|
||||
.new_service(())
|
||||
.map(move |result| match result {
|
||||
Ok(default) => {
|
||||
srv.default = Some(default);
|
||||
Ok(srv)
|
||||
}
|
||||
Err(_) => Err(()),
|
||||
})
|
||||
.boxed_local()
|
||||
} else {
|
||||
ok(srv).boxed_local()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FilesService {
|
||||
directory: PathBuf,
|
||||
index: Option<String>,
|
||||
show_index: bool,
|
||||
redirect_to_slash: bool,
|
||||
default: Option<HttpService>,
|
||||
renderer: Rc<DirectoryRenderer>,
|
||||
mime_override: Option<Rc<MimeOverride>>,
|
||||
file_flags: named::Flags,
|
||||
guards: Option<Rc<Box<dyn Guard>>>,
|
||||
}
|
||||
|
||||
impl FilesService {
|
||||
fn handle_err(
|
||||
&mut self,
|
||||
e: io::Error,
|
||||
req: ServiceRequest,
|
||||
) -> Either<
|
||||
Ready<Result<ServiceResponse, Error>>,
|
||||
LocalBoxFuture<'static, Result<ServiceResponse, Error>>,
|
||||
> {
|
||||
log::debug!("Files: Failed to handle {}: {}", req.path(), e);
|
||||
if let Some(ref mut default) = self.default {
|
||||
Either::Right(default.call(req))
|
||||
} else {
|
||||
Either::Left(ok(req.error_response(e)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for FilesService {
|
||||
type Request = ServiceRequest;
|
||||
type Response = ServiceResponse;
|
||||
type Error = Error;
|
||||
type Future = Either<
|
||||
Ready<Result<Self::Response, Self::Error>>,
|
||||
LocalBoxFuture<'static, Result<Self::Response, Self::Error>>,
|
||||
>;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, req: ServiceRequest) -> Self::Future {
|
||||
let is_method_valid = if let Some(guard) = &self.guards {
|
||||
// execute user defined guards
|
||||
(**guard).check(req.head())
|
||||
} else {
|
||||
// default behaviour
|
||||
match *req.method() {
|
||||
Method::HEAD | Method::GET => true,
|
||||
_ => false,
|
||||
}
|
||||
};
|
||||
|
||||
if !is_method_valid {
|
||||
return Either::Left(ok(req.into_response(
|
||||
actix_web::HttpResponse::MethodNotAllowed()
|
||||
.header(header::CONTENT_TYPE, "text/plain")
|
||||
.body("Request did not meet this resource's requirements."),
|
||||
)));
|
||||
}
|
||||
|
||||
let real_path = match PathBufWrp::get_pathbuf(req.match_info().path()) {
|
||||
Ok(item) => item,
|
||||
Err(e) => return Either::Left(ok(req.error_response(e))),
|
||||
};
|
||||
|
||||
// full file path
|
||||
let path = match self.directory.join(&real_path.0).canonicalize() {
|
||||
Ok(path) => path,
|
||||
Err(e) => return self.handle_err(e, req),
|
||||
};
|
||||
|
||||
if path.is_dir() {
|
||||
if let Some(ref redir_index) = self.index {
|
||||
if self.redirect_to_slash && !req.path().ends_with('/') {
|
||||
let redirect_to = format!("{}/", req.path());
|
||||
return Either::Left(ok(req.into_response(
|
||||
HttpResponse::Found()
|
||||
.header(header::LOCATION, redirect_to)
|
||||
.body("")
|
||||
.into_body(),
|
||||
)));
|
||||
}
|
||||
|
||||
let path = path.join(redir_index);
|
||||
|
||||
match NamedFile::open(path) {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition =
|
||||
mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
|
||||
named_file.flags = self.file_flags;
|
||||
let (req, _) = req.into_parts();
|
||||
Either::Left(ok(match named_file.into_response(&req) {
|
||||
Ok(item) => ServiceResponse::new(req, item),
|
||||
Err(e) => ServiceResponse::from_err(e, req),
|
||||
}))
|
||||
}
|
||||
Err(e) => self.handle_err(e, req),
|
||||
}
|
||||
} else if self.show_index {
|
||||
let dir = Directory::new(self.directory.clone(), path);
|
||||
let (req, _) = req.into_parts();
|
||||
let x = (self.renderer)(&dir, &req);
|
||||
match x {
|
||||
Ok(resp) => Either::Left(ok(resp)),
|
||||
Err(e) => Either::Left(ok(ServiceResponse::from_err(e, req))),
|
||||
}
|
||||
} else {
|
||||
Either::Left(ok(ServiceResponse::from_err(
|
||||
FilesError::IsDirectory,
|
||||
req.into_parts().0,
|
||||
)))
|
||||
}
|
||||
} else {
|
||||
match NamedFile::open(path) {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition =
|
||||
mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
|
||||
named_file.flags = self.file_flags;
|
||||
let (req, _) = req.into_parts();
|
||||
match named_file.into_response(&req) {
|
||||
Ok(item) => {
|
||||
Either::Left(ok(ServiceResponse::new(req.clone(), item)))
|
||||
}
|
||||
Err(e) => Either::Left(ok(ServiceResponse::from_err(e, req))),
|
||||
}
|
||||
}
|
||||
Err(e) => self.handle_err(e, req),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct PathBufWrp(PathBuf);
|
||||
|
||||
impl PathBufWrp {
|
||||
fn get_pathbuf(path: &str) -> Result<Self, UriSegmentError> {
|
||||
let mut buf = PathBuf::new();
|
||||
for segment in path.split('/') {
|
||||
if segment == ".." {
|
||||
buf.pop();
|
||||
} else if segment.starts_with('.') {
|
||||
return Err(UriSegmentError::BadStart('.'));
|
||||
} else if segment.starts_with('*') {
|
||||
return Err(UriSegmentError::BadStart('*'));
|
||||
} else if segment.ends_with(':') {
|
||||
return Err(UriSegmentError::BadEnd(':'));
|
||||
} else if segment.ends_with('>') {
|
||||
return Err(UriSegmentError::BadEnd('>'));
|
||||
} else if segment.ends_with('<') {
|
||||
return Err(UriSegmentError::BadEnd('<'));
|
||||
} else if segment.is_empty() {
|
||||
continue;
|
||||
} else if cfg!(windows) && segment.contains('\\') {
|
||||
return Err(UriSegmentError::BadChar('\\'));
|
||||
} else {
|
||||
buf.push(segment)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(PathBufWrp(buf))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRequest for PathBufWrp {
|
||||
type Error = UriSegmentError;
|
||||
type Future = Ready<Result<Self, Self::Error>>;
|
||||
type Config = ();
|
||||
|
||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||
ready(PathBufWrp::get_pathbuf(req.match_info().path()))
|
||||
}
|
||||
}
|
||||
type MimeOverride = dyn Fn(&mime::Name<'_>) -> DispositionType;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::fs;
|
||||
use std::iter::FromIterator;
|
||||
use std::ops::Add;
|
||||
use std::time::{Duration, SystemTime};
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
ops::Add,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use actix_service::ServiceFactory;
|
||||
use actix_web::{
|
||||
guard,
|
||||
http::{
|
||||
header::{self, ContentDisposition, DispositionParam, DispositionType},
|
||||
Method, StatusCode,
|
||||
},
|
||||
middleware::Compress,
|
||||
test::{self, TestRequest},
|
||||
web, App, HttpResponse, Responder,
|
||||
};
|
||||
use futures_util::future::ok;
|
||||
|
||||
use super::*;
|
||||
use actix_web::guard;
|
||||
use actix_web::http::header::{
|
||||
self, ContentDisposition, DispositionParam, DispositionType,
|
||||
};
|
||||
use actix_web::http::{Method, StatusCode};
|
||||
use actix_web::middleware::Compress;
|
||||
use actix_web::test::{self, TestRequest};
|
||||
use actix_web::{App, Responder};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_file_extension_to_mime() {
|
||||
@ -898,7 +334,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_mime_override() {
|
||||
fn all_attachment(_: &mime::Name) -> DispositionType {
|
||||
fn all_attachment(_: &mime::Name<'_>) -> DispositionType {
|
||||
DispositionType::Attachment
|
||||
}
|
||||
|
||||
@ -952,9 +388,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_content_range_headers() {
|
||||
let srv = test::start(|| {
|
||||
App::new().service(Files::new("/", "."))
|
||||
});
|
||||
let srv = test::start(|| App::new().service(Files::new("/", ".")));
|
||||
|
||||
// Valid range header
|
||||
let response = srv
|
||||
@ -979,9 +413,7 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_content_length_headers() {
|
||||
let srv = test::start(|| {
|
||||
App::new().service(Files::new("/", "."))
|
||||
});
|
||||
let srv = test::start(|| App::new().service(Files::new("/", ".")));
|
||||
|
||||
// Valid range header
|
||||
let response = srv
|
||||
@ -1014,21 +446,15 @@ mod tests {
|
||||
|
||||
// Check file contents
|
||||
let bytes = response.body().await.unwrap();
|
||||
let data = Bytes::from(fs::read("tests/test.binary").unwrap());
|
||||
let data = web::Bytes::from(fs::read("tests/test.binary").unwrap());
|
||||
assert_eq!(bytes, data);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_head_content_length_headers() {
|
||||
let srv = test::start(|| {
|
||||
App::new().service(Files::new("/", "."))
|
||||
});
|
||||
let srv = test::start(|| App::new().service(Files::new("/", ".")));
|
||||
|
||||
let response = srv
|
||||
.head("/tests/test.binary")
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
let response = srv.head("/tests/test.binary").send().await.unwrap();
|
||||
|
||||
let content_length = response
|
||||
.headers()
|
||||
@ -1053,7 +479,7 @@ mod tests {
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
|
||||
let bytes = test::read_body(response).await;
|
||||
let data = Bytes::from(fs::read("tests/test space.binary").unwrap());
|
||||
let data = web::Bytes::from(fs::read("tests/test space.binary").unwrap());
|
||||
assert_eq!(bytes, data);
|
||||
}
|
||||
|
||||
@ -1097,12 +523,10 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_content_encoding() {
|
||||
let mut srv = test::init_service(App::new().wrap(Compress::default()).service(
|
||||
web::resource("/").to(|| {
|
||||
async {
|
||||
NamedFile::open("Cargo.toml")
|
||||
.unwrap()
|
||||
.set_content_encoding(header::ContentEncoding::Identity)
|
||||
}
|
||||
web::resource("/").to(|| async {
|
||||
NamedFile::open("Cargo.toml")
|
||||
.unwrap()
|
||||
.set_content_encoding(header::ContentEncoding::Identity)
|
||||
}),
|
||||
))
|
||||
.await;
|
||||
@ -1119,12 +543,10 @@ mod tests {
|
||||
#[actix_rt::test]
|
||||
async fn test_named_file_content_encoding_gzip() {
|
||||
let mut srv = test::init_service(App::new().wrap(Compress::default()).service(
|
||||
web::resource("/").to(|| {
|
||||
async {
|
||||
NamedFile::open("Cargo.toml")
|
||||
.unwrap()
|
||||
.set_content_encoding(header::ContentEncoding::Gzip)
|
||||
}
|
||||
web::resource("/").to(|| async {
|
||||
NamedFile::open("Cargo.toml")
|
||||
.unwrap()
|
||||
.set_content_encoding(header::ContentEncoding::Gzip)
|
||||
}),
|
||||
))
|
||||
.await;
|
||||
@ -1235,7 +657,7 @@ mod tests {
|
||||
let resp = test::call_service(&mut st, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
let bytes = test::read_body(resp).await;
|
||||
assert_eq!(bytes, Bytes::from_static(b"default content"));
|
||||
assert_eq!(bytes, web::Bytes::from_static(b"default content"));
|
||||
}
|
||||
|
||||
// #[actix_rt::test]
|
||||
@ -1351,36 +773,4 @@ mod tests {
|
||||
// let response = srv.execute(request.send()).unwrap();
|
||||
// assert_eq!(response.status(), StatusCode::OK);
|
||||
// }
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_path_buf() {
|
||||
assert_eq!(
|
||||
PathBufWrp::get_pathbuf("/test/.tt").map(|t| t.0),
|
||||
Err(UriSegmentError::BadStart('.'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrp::get_pathbuf("/test/*tt").map(|t| t.0),
|
||||
Err(UriSegmentError::BadStart('*'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrp::get_pathbuf("/test/tt:").map(|t| t.0),
|
||||
Err(UriSegmentError::BadEnd(':'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrp::get_pathbuf("/test/tt<").map(|t| t.0),
|
||||
Err(UriSegmentError::BadEnd('<'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrp::get_pathbuf("/test/tt>").map(|t| t.0),
|
||||
Err(UriSegmentError::BadEnd('>'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrp::get_pathbuf("/seg1/seg2/").unwrap().0,
|
||||
PathBuf::from_iter(vec!["seg1", "seg2"])
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrp::get_pathbuf("/seg1/../seg2/").unwrap().0,
|
||||
PathBuf::from_iter(vec!["seg2"])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -7,18 +7,20 @@ use std::time::{SystemTime, UNIX_EPOCH};
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use mime;
|
||||
use mime_guess::from_path;
|
||||
|
||||
use actix_http::body::SizedStream;
|
||||
use actix_web::dev::BodyEncoding;
|
||||
use actix_web::http::header::{
|
||||
self, Charset, ContentDisposition, DispositionParam, DispositionType, ExtendedValue,
|
||||
use actix_web::{
|
||||
dev::{BodyEncoding, SizedStream},
|
||||
http::{
|
||||
header::{
|
||||
self, Charset, ContentDisposition, DispositionParam, DispositionType,
|
||||
ExtendedValue,
|
||||
},
|
||||
ContentEncoding, StatusCode,
|
||||
},
|
||||
Error, HttpMessage, HttpRequest, HttpResponse, Responder,
|
||||
};
|
||||
use actix_web::http::{ContentEncoding, StatusCode};
|
||||
use actix_web::{Error, HttpMessage, HttpRequest, HttpResponse, Responder};
|
||||
use bitflags::bitflags;
|
||||
use futures_util::future::{ready, Ready};
|
||||
use mime_guess::from_path;
|
||||
|
||||
use crate::range::HttpRange;
|
||||
use crate::ChunkedReadFile;
|
||||
@ -90,12 +92,14 @@ impl NamedFile {
|
||||
};
|
||||
|
||||
let ct = from_path(&path).first_or_octet_stream();
|
||||
let disposition_type = match ct.type_() {
|
||||
let disposition = match ct.type_() {
|
||||
mime::IMAGE | mime::TEXT | mime::VIDEO => DispositionType::Inline,
|
||||
_ => DispositionType::Attachment,
|
||||
};
|
||||
|
||||
let mut parameters =
|
||||
vec![DispositionParam::Filename(String::from(filename.as_ref()))];
|
||||
|
||||
if !filename.is_ascii() {
|
||||
parameters.push(DispositionParam::FilenameExt(ExtendedValue {
|
||||
charset: Charset::Ext(String::from("UTF-8")),
|
||||
@ -103,16 +107,19 @@ impl NamedFile {
|
||||
value: filename.into_owned().into_bytes(),
|
||||
}))
|
||||
}
|
||||
|
||||
let cd = ContentDisposition {
|
||||
disposition: disposition_type,
|
||||
parameters: parameters,
|
||||
disposition,
|
||||
parameters,
|
||||
};
|
||||
|
||||
(ct, cd)
|
||||
};
|
||||
|
||||
let md = file.metadata()?;
|
||||
let modified = md.modified().ok();
|
||||
let encoding = None;
|
||||
|
||||
Ok(NamedFile {
|
||||
path,
|
||||
file,
|
||||
@ -243,6 +250,7 @@ impl NamedFile {
|
||||
let dur = mtime
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("modification time must be after epoch");
|
||||
|
||||
header::EntityTag::strong(format!(
|
||||
"{:x}:{:x}:{:x}:{:x}",
|
||||
ino,
|
||||
@ -257,9 +265,11 @@ impl NamedFile {
|
||||
self.modified.map(|mtime| mtime.into())
|
||||
}
|
||||
|
||||
/// Creates an `HttpResponse` with file as a streaming body.
|
||||
pub fn into_response(self, req: &HttpRequest) -> Result<HttpResponse, Error> {
|
||||
if self.status_code != StatusCode::OK {
|
||||
let mut resp = HttpResponse::build(self.status_code);
|
||||
|
||||
resp.set(header::ContentType(self.content_type.clone()))
|
||||
.if_true(self.flags.contains(Flags::CONTENT_DISPOSITION), |res| {
|
||||
res.header(
|
||||
@ -267,9 +277,11 @@ impl NamedFile {
|
||||
self.content_disposition.to_string(),
|
||||
);
|
||||
});
|
||||
|
||||
if let Some(current_encoding) = self.encoding {
|
||||
resp.encoding(current_encoding);
|
||||
}
|
||||
|
||||
let reader = ChunkedReadFile {
|
||||
size: self.md.len(),
|
||||
offset: 0,
|
||||
@ -277,6 +289,7 @@ impl NamedFile {
|
||||
fut: None,
|
||||
counter: 0,
|
||||
};
|
||||
|
||||
return Ok(resp.streaming(reader));
|
||||
}
|
||||
|
||||
@ -285,6 +298,7 @@ impl NamedFile {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let last_modified = if self.flags.contains(Flags::LAST_MD) {
|
||||
self.last_modified()
|
||||
} else {
|
||||
@ -299,6 +313,7 @@ impl NamedFile {
|
||||
{
|
||||
let t1: SystemTime = m.clone().into();
|
||||
let t2: SystemTime = since.clone().into();
|
||||
|
||||
match (t1.duration_since(UNIX_EPOCH), t2.duration_since(UNIX_EPOCH)) {
|
||||
(Ok(t1), Ok(t2)) => t1 > t2,
|
||||
_ => false,
|
||||
@ -310,13 +325,14 @@ impl NamedFile {
|
||||
// check last modified
|
||||
let not_modified = if !none_match(etag.as_ref(), req) {
|
||||
true
|
||||
} else if req.headers().contains_key(&header::IF_NONE_MATCH) {
|
||||
} else if req.headers().contains_key(header::IF_NONE_MATCH) {
|
||||
false
|
||||
} else if let (Some(ref m), Some(header::IfModifiedSince(ref since))) =
|
||||
(last_modified, req.get_header())
|
||||
{
|
||||
let t1: SystemTime = m.clone().into();
|
||||
let t2: SystemTime = since.clone().into();
|
||||
|
||||
match (t1.duration_since(UNIX_EPOCH), t2.duration_since(UNIX_EPOCH)) {
|
||||
(Ok(t1), Ok(t2)) => t1 <= t2,
|
||||
_ => false,
|
||||
@ -333,6 +349,7 @@ impl NamedFile {
|
||||
self.content_disposition.to_string(),
|
||||
);
|
||||
});
|
||||
|
||||
// default compressing
|
||||
if let Some(current_encoding) = self.encoding {
|
||||
resp.encoding(current_encoding);
|
||||
@ -351,11 +368,12 @@ impl NamedFile {
|
||||
let mut offset = 0;
|
||||
|
||||
// check for range header
|
||||
if let Some(ranges) = req.headers().get(&header::RANGE) {
|
||||
if let Ok(rangesheader) = ranges.to_str() {
|
||||
if let Ok(rangesvec) = HttpRange::parse(rangesheader, length) {
|
||||
length = rangesvec[0].length;
|
||||
offset = rangesvec[0].start;
|
||||
if let Some(ranges) = req.headers().get(header::RANGE) {
|
||||
if let Ok(ranges_header) = ranges.to_str() {
|
||||
if let Ok(ranges) = HttpRange::parse(ranges_header, length) {
|
||||
length = ranges[0].length;
|
||||
offset = ranges[0].start;
|
||||
|
||||
resp.encoding(ContentEncoding::Identity);
|
||||
resp.header(
|
||||
header::CONTENT_RANGE,
|
||||
@ -415,6 +433,7 @@ impl DerefMut for NamedFile {
|
||||
fn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
|
||||
match req.get_header::<header::IfMatch>() {
|
||||
None | Some(header::IfMatch::Any) => true,
|
||||
|
||||
Some(header::IfMatch::Items(ref items)) => {
|
||||
if let Some(some_etag) = etag {
|
||||
for item in items {
|
||||
@ -423,6 +442,7 @@ fn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
@ -432,6 +452,7 @@ fn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
|
||||
fn none_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
|
||||
match req.get_header::<header::IfNoneMatch>() {
|
||||
Some(header::IfNoneMatch::Any) => false,
|
||||
|
||||
Some(header::IfNoneMatch::Items(ref items)) => {
|
||||
if let Some(some_etag) = etag {
|
||||
for item in items {
|
||||
@ -440,8 +461,10 @@ fn none_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
99
actix-files/src/path_buf.rs
Normal file
99
actix-files/src/path_buf.rs
Normal file
@ -0,0 +1,99 @@
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use actix_web::{dev::Payload, FromRequest, HttpRequest};
|
||||
use futures_util::future::{ready, Ready};
|
||||
|
||||
use crate::error::UriSegmentError;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct PathBufWrap(PathBuf);
|
||||
|
||||
impl FromStr for PathBufWrap {
|
||||
type Err = UriSegmentError;
|
||||
|
||||
fn from_str(path: &str) -> Result<Self, Self::Err> {
|
||||
let mut buf = PathBuf::new();
|
||||
|
||||
for segment in path.split('/') {
|
||||
if segment == ".." {
|
||||
buf.pop();
|
||||
} else if segment.starts_with('.') {
|
||||
return Err(UriSegmentError::BadStart('.'));
|
||||
} else if segment.starts_with('*') {
|
||||
return Err(UriSegmentError::BadStart('*'));
|
||||
} else if segment.ends_with(':') {
|
||||
return Err(UriSegmentError::BadEnd(':'));
|
||||
} else if segment.ends_with('>') {
|
||||
return Err(UriSegmentError::BadEnd('>'));
|
||||
} else if segment.ends_with('<') {
|
||||
return Err(UriSegmentError::BadEnd('<'));
|
||||
} else if segment.is_empty() {
|
||||
continue;
|
||||
} else if cfg!(windows) && segment.contains('\\') {
|
||||
return Err(UriSegmentError::BadChar('\\'));
|
||||
} else {
|
||||
buf.push(segment)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(PathBufWrap(buf))
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Path> for PathBufWrap {
|
||||
fn as_ref(&self) -> &Path {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRequest for PathBufWrap {
|
||||
type Error = UriSegmentError;
|
||||
type Future = Ready<Result<Self, Self::Error>>;
|
||||
type Config = ();
|
||||
|
||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||
ready(req.match_info().path().parse())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_path_buf() {
|
||||
assert_eq!(
|
||||
PathBufWrap::from_str("/test/.tt").map(|t| t.0),
|
||||
Err(UriSegmentError::BadStart('.'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrap::from_str("/test/*tt").map(|t| t.0),
|
||||
Err(UriSegmentError::BadStart('*'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrap::from_str("/test/tt:").map(|t| t.0),
|
||||
Err(UriSegmentError::BadEnd(':'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrap::from_str("/test/tt<").map(|t| t.0),
|
||||
Err(UriSegmentError::BadEnd('<'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrap::from_str("/test/tt>").map(|t| t.0),
|
||||
Err(UriSegmentError::BadEnd('>'))
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrap::from_str("/seg1/seg2/").unwrap().0,
|
||||
PathBuf::from_iter(vec!["seg1", "seg2"])
|
||||
);
|
||||
assert_eq!(
|
||||
PathBufWrap::from_str("/seg1/../seg2/").unwrap().0,
|
||||
PathBuf::from_iter(vec!["seg2"])
|
||||
);
|
||||
}
|
||||
}
|
@ -1,11 +1,14 @@
|
||||
/// HTTP Range header representation.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct HttpRange {
|
||||
/// Start of range.
|
||||
pub start: u64,
|
||||
|
||||
/// Length of range.
|
||||
pub length: u64,
|
||||
}
|
||||
|
||||
static PREFIX: &str = "bytes=";
|
||||
const PREFIX: &str = "bytes=";
|
||||
const PREFIX_LEN: usize = 6;
|
||||
|
||||
impl HttpRange {
|
||||
|
167
actix-files/src/service.rs
Normal file
167
actix-files/src/service.rs
Normal file
@ -0,0 +1,167 @@
|
||||
use std::{
|
||||
fmt, io,
|
||||
path::PathBuf,
|
||||
rc::Rc,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use actix_service::Service;
|
||||
use actix_web::{
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
error::Error,
|
||||
guard::Guard,
|
||||
http::{header, Method},
|
||||
HttpResponse,
|
||||
};
|
||||
use futures_util::future::{ok, Either, LocalBoxFuture, Ready};
|
||||
|
||||
use crate::{
|
||||
named, Directory, DirectoryRenderer, FilesError, HttpService, MimeOverride,
|
||||
NamedFile, PathBufWrap,
|
||||
};
|
||||
|
||||
/// Assembled file serving service.
|
||||
pub struct FilesService {
|
||||
pub(crate) directory: PathBuf,
|
||||
pub(crate) index: Option<String>,
|
||||
pub(crate) show_index: bool,
|
||||
pub(crate) redirect_to_slash: bool,
|
||||
pub(crate) default: Option<HttpService>,
|
||||
pub(crate) renderer: Rc<DirectoryRenderer>,
|
||||
pub(crate) mime_override: Option<Rc<MimeOverride>>,
|
||||
pub(crate) file_flags: named::Flags,
|
||||
pub(crate) guards: Option<Rc<dyn Guard>>,
|
||||
}
|
||||
|
||||
type FilesServiceFuture = Either<
|
||||
Ready<Result<ServiceResponse, Error>>,
|
||||
LocalBoxFuture<'static, Result<ServiceResponse, Error>>,
|
||||
>;
|
||||
|
||||
impl FilesService {
|
||||
fn handle_err(&mut self, e: io::Error, req: ServiceRequest) -> FilesServiceFuture {
|
||||
log::debug!("Failed to handle {}: {}", req.path(), e);
|
||||
|
||||
if let Some(ref mut default) = self.default {
|
||||
Either::Right(default.call(req))
|
||||
} else {
|
||||
Either::Left(ok(req.error_response(e)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for FilesService {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("FilesService")
|
||||
}
|
||||
}
|
||||
|
||||
impl Service for FilesService {
|
||||
type Request = ServiceRequest;
|
||||
type Response = ServiceResponse;
|
||||
type Error = Error;
|
||||
type Future = FilesServiceFuture;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, req: ServiceRequest) -> Self::Future {
|
||||
let is_method_valid = if let Some(guard) = &self.guards {
|
||||
// execute user defined guards
|
||||
(**guard).check(req.head())
|
||||
} else {
|
||||
// default behavior
|
||||
matches!(*req.method(), Method::HEAD | Method::GET)
|
||||
};
|
||||
|
||||
if !is_method_valid {
|
||||
return Either::Left(ok(req.into_response(
|
||||
actix_web::HttpResponse::MethodNotAllowed()
|
||||
.header(header::CONTENT_TYPE, "text/plain")
|
||||
.body("Request did not meet this resource's requirements."),
|
||||
)));
|
||||
}
|
||||
|
||||
let real_path: PathBufWrap = match req.match_info().path().parse() {
|
||||
Ok(item) => item,
|
||||
Err(e) => return Either::Left(ok(req.error_response(e))),
|
||||
};
|
||||
|
||||
// full file path
|
||||
let path = match self.directory.join(&real_path).canonicalize() {
|
||||
Ok(path) => path,
|
||||
Err(e) => return self.handle_err(e, req),
|
||||
};
|
||||
|
||||
if path.is_dir() {
|
||||
if let Some(ref redir_index) = self.index {
|
||||
if self.redirect_to_slash && !req.path().ends_with('/') {
|
||||
let redirect_to = format!("{}/", req.path());
|
||||
|
||||
return Either::Left(ok(req.into_response(
|
||||
HttpResponse::Found()
|
||||
.header(header::LOCATION, redirect_to)
|
||||
.body("")
|
||||
.into_body(),
|
||||
)));
|
||||
}
|
||||
|
||||
let path = path.join(redir_index);
|
||||
|
||||
match NamedFile::open(path) {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition =
|
||||
mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
Either::Left(ok(match named_file.into_response(&req) {
|
||||
Ok(item) => ServiceResponse::new(req, item),
|
||||
Err(e) => ServiceResponse::from_err(e, req),
|
||||
}))
|
||||
}
|
||||
Err(e) => self.handle_err(e, req),
|
||||
}
|
||||
} else if self.show_index {
|
||||
let dir = Directory::new(self.directory.clone(), path);
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let x = (self.renderer)(&dir, &req);
|
||||
|
||||
match x {
|
||||
Ok(resp) => Either::Left(ok(resp)),
|
||||
Err(e) => Either::Left(ok(ServiceResponse::from_err(e, req))),
|
||||
}
|
||||
} else {
|
||||
Either::Left(ok(ServiceResponse::from_err(
|
||||
FilesError::IsDirectory,
|
||||
req.into_parts().0,
|
||||
)))
|
||||
}
|
||||
} else {
|
||||
match NamedFile::open(path) {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = self.mime_override {
|
||||
let new_disposition =
|
||||
mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
match named_file.into_response(&req) {
|
||||
Ok(item) => {
|
||||
Either::Left(ok(ServiceResponse::new(req.clone(), item)))
|
||||
}
|
||||
Err(e) => Either::Left(ok(ServiceResponse::from_err(e, req))),
|
||||
}
|
||||
}
|
||||
Err(e) => self.handle_err(e, req),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
[package]
|
||||
name = "actix-framed"
|
||||
version = "0.3.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix framed app server"
|
||||
readme = "README.md"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-framed/"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
license = "MIT/Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "actix_framed"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-codec = "0.2.0"
|
||||
actix-service = "1.0.1"
|
||||
actix-router = "0.2.1"
|
||||
actix-rt = "1.0.0"
|
||||
actix-http = "2.0.0-alpha.4"
|
||||
|
||||
bytes = "0.5.3"
|
||||
futures-util = { version = "0.3.5", default-features = false }
|
||||
pin-project = "0.4.6"
|
||||
log = "0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-server = "1.0.0"
|
||||
actix-connect = { version = "2.0.0-alpha.2", features = ["openssl"] }
|
||||
actix-http-test = { version = "2.0.0-alpha.1", features = ["openssl"] }
|
||||
actix-utils = "1.0.3"
|
@ -1,201 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017-NOW Nikolay Kim
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -1,25 +0,0 @@
|
||||
Copyright (c) 2017 Nikolay Kim
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
@ -1,8 +0,0 @@
|
||||
# Framed app for actix web [](https://travis-ci.org/actix/actix-web) [](https://codecov.io/gh/actix/actix-web) [](https://crates.io/crates/actix-framed) [](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
## Documentation & community resources
|
||||
|
||||
* [API Documentation](https://docs.rs/actix-framed/)
|
||||
* [Chat on gitter](https://gitter.im/actix/actix)
|
||||
* Cargo package: [actix-framed](https://crates.io/crates/actix-framed)
|
||||
* Minimum supported Rust version: 1.40 or later
|
@ -1,28 +0,0 @@
|
||||
# Changes
|
||||
|
||||
## [Unreleased] - 2020-xx-xx
|
||||
|
||||
* Bump minimum supported Rust version to 1.40
|
||||
|
||||
## [0.3.0] - 2019-12-25
|
||||
|
||||
* Migrate to actix-http 1.0
|
||||
|
||||
## [0.2.1] - 2019-07-20
|
||||
|
||||
* Remove unneeded actix-utils dependency
|
||||
|
||||
|
||||
## [0.2.0] - 2019-05-12
|
||||
|
||||
* Update dependencies
|
||||
|
||||
|
||||
## [0.1.0] - 2019-04-16
|
||||
|
||||
* Update tests
|
||||
|
||||
|
||||
## [0.1.0-alpha.1] - 2019-04-12
|
||||
|
||||
* Initial release
|
@ -1,221 +0,0 @@
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use actix_http::h1::{Codec, SendResponse};
|
||||
use actix_http::{Error, Request, Response};
|
||||
use actix_router::{Path, Router, Url};
|
||||
use actix_service::{IntoServiceFactory, Service, ServiceFactory};
|
||||
use futures_util::future::{ok, FutureExt, LocalBoxFuture};
|
||||
|
||||
use crate::helpers::{BoxedHttpNewService, BoxedHttpService, HttpNewService};
|
||||
use crate::request::FramedRequest;
|
||||
use crate::state::State;
|
||||
|
||||
type BoxedResponse = LocalBoxFuture<'static, Result<(), Error>>;
|
||||
|
||||
pub trait HttpServiceFactory {
|
||||
type Factory: ServiceFactory;
|
||||
|
||||
fn path(&self) -> &str;
|
||||
|
||||
fn create(self) -> Self::Factory;
|
||||
}
|
||||
|
||||
/// Application builder
|
||||
pub struct FramedApp<T, S = ()> {
|
||||
state: State<S>,
|
||||
services: Vec<(String, BoxedHttpNewService<FramedRequest<T, S>>)>,
|
||||
}
|
||||
|
||||
impl<T: 'static> FramedApp<T, ()> {
|
||||
pub fn new() -> Self {
|
||||
FramedApp {
|
||||
state: State::new(()),
|
||||
services: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, S: 'static> FramedApp<T, S> {
|
||||
pub fn with(state: S) -> FramedApp<T, S> {
|
||||
FramedApp {
|
||||
services: Vec::new(),
|
||||
state: State::new(state),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn service<U>(mut self, factory: U) -> Self
|
||||
where
|
||||
U: HttpServiceFactory,
|
||||
U::Factory: ServiceFactory<
|
||||
Config = (),
|
||||
Request = FramedRequest<T, S>,
|
||||
Response = (),
|
||||
Error = Error,
|
||||
InitError = (),
|
||||
> + 'static,
|
||||
<U::Factory as ServiceFactory>::Future: 'static,
|
||||
<U::Factory as ServiceFactory>::Service: Service<
|
||||
Request = FramedRequest<T, S>,
|
||||
Response = (),
|
||||
Error = Error,
|
||||
Future = LocalBoxFuture<'static, Result<(), Error>>,
|
||||
>,
|
||||
{
|
||||
let path = factory.path().to_string();
|
||||
self.services
|
||||
.push((path, Box::new(HttpNewService::new(factory.create()))));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> IntoServiceFactory<FramedAppFactory<T, S>> for FramedApp<T, S>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||
S: 'static,
|
||||
{
|
||||
fn into_factory(self) -> FramedAppFactory<T, S> {
|
||||
FramedAppFactory {
|
||||
state: self.state,
|
||||
services: Rc::new(self.services),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FramedAppFactory<T, S> {
|
||||
state: State<S>,
|
||||
services: Rc<Vec<(String, BoxedHttpNewService<FramedRequest<T, S>>)>>,
|
||||
}
|
||||
|
||||
impl<T, S> ServiceFactory for FramedAppFactory<T, S>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||
S: 'static,
|
||||
{
|
||||
type Config = ();
|
||||
type Request = (Request, Framed<T, Codec>);
|
||||
type Response = ();
|
||||
type Error = Error;
|
||||
type InitError = ();
|
||||
type Service = FramedAppService<T, S>;
|
||||
type Future = CreateService<T, S>;
|
||||
|
||||
fn new_service(&self, _: ()) -> Self::Future {
|
||||
CreateService {
|
||||
fut: self
|
||||
.services
|
||||
.iter()
|
||||
.map(|(path, service)| {
|
||||
CreateServiceItem::Future(
|
||||
Some(path.clone()),
|
||||
service.new_service(()),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
state: self.state.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct CreateService<T, S> {
|
||||
fut: Vec<CreateServiceItem<T, S>>,
|
||||
state: State<S>,
|
||||
}
|
||||
|
||||
enum CreateServiceItem<T, S> {
|
||||
Future(
|
||||
Option<String>,
|
||||
LocalBoxFuture<'static, Result<BoxedHttpService<FramedRequest<T, S>>, ()>>,
|
||||
),
|
||||
Service(String, BoxedHttpService<FramedRequest<T, S>>),
|
||||
}
|
||||
|
||||
impl<S: 'static, T: 'static> Future for CreateService<T, S>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin,
|
||||
{
|
||||
type Output = Result<FramedAppService<T, S>, ()>;
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
|
||||
let mut done = true;
|
||||
|
||||
// poll http services
|
||||
for item in &mut self.fut {
|
||||
let res = match item {
|
||||
CreateServiceItem::Future(ref mut path, ref mut fut) => {
|
||||
match Pin::new(fut).poll(cx) {
|
||||
Poll::Ready(Ok(service)) => {
|
||||
Some((path.take().unwrap(), service))
|
||||
}
|
||||
Poll::Ready(Err(e)) => return Poll::Ready(Err(e)),
|
||||
Poll::Pending => {
|
||||
done = false;
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
CreateServiceItem::Service(_, _) => continue,
|
||||
};
|
||||
|
||||
if let Some((path, service)) = res {
|
||||
*item = CreateServiceItem::Service(path, service);
|
||||
}
|
||||
}
|
||||
|
||||
if done {
|
||||
let router = self
|
||||
.fut
|
||||
.drain(..)
|
||||
.fold(Router::build(), |mut router, item| {
|
||||
match item {
|
||||
CreateServiceItem::Service(path, service) => {
|
||||
router.path(&path, service);
|
||||
}
|
||||
CreateServiceItem::Future(_, _) => unreachable!(),
|
||||
}
|
||||
router
|
||||
});
|
||||
Poll::Ready(Ok(FramedAppService {
|
||||
router: router.finish(),
|
||||
state: self.state.clone(),
|
||||
}))
|
||||
} else {
|
||||
Poll::Pending
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FramedAppService<T, S> {
|
||||
state: State<S>,
|
||||
router: Router<BoxedHttpService<FramedRequest<T, S>>>,
|
||||
}
|
||||
|
||||
impl<S: 'static, T: 'static> Service for FramedAppService<T, S>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin,
|
||||
{
|
||||
type Request = (Request, Framed<T, Codec>);
|
||||
type Response = ();
|
||||
type Error = Error;
|
||||
type Future = BoxedResponse;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, (req, framed): (Request, Framed<T, Codec>)) -> Self::Future {
|
||||
let mut path = Path::new(Url::new(req.uri().clone()));
|
||||
|
||||
if let Some((srv, _info)) = self.router.recognize_mut(&mut path) {
|
||||
return srv.call(FramedRequest::new(req, framed, path, self.state.clone()));
|
||||
}
|
||||
SendResponse::new(framed, Response::NotFound().finish())
|
||||
.then(|_| ok(()))
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
@ -1,98 +0,0 @@
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_http::Error;
|
||||
use actix_service::{Service, ServiceFactory};
|
||||
use futures_util::future::{FutureExt, LocalBoxFuture};
|
||||
|
||||
pub(crate) type BoxedHttpService<Req> = Box<
|
||||
dyn Service<
|
||||
Request = Req,
|
||||
Response = (),
|
||||
Error = Error,
|
||||
Future = LocalBoxFuture<'static, Result<(), Error>>,
|
||||
>,
|
||||
>;
|
||||
|
||||
pub(crate) type BoxedHttpNewService<Req> = Box<
|
||||
dyn ServiceFactory<
|
||||
Config = (),
|
||||
Request = Req,
|
||||
Response = (),
|
||||
Error = Error,
|
||||
InitError = (),
|
||||
Service = BoxedHttpService<Req>,
|
||||
Future = LocalBoxFuture<'static, Result<BoxedHttpService<Req>, ()>>,
|
||||
>,
|
||||
>;
|
||||
|
||||
pub(crate) struct HttpNewService<T: ServiceFactory>(T);
|
||||
|
||||
impl<T> HttpNewService<T>
|
||||
where
|
||||
T: ServiceFactory<Response = (), Error = Error>,
|
||||
T::Response: 'static,
|
||||
T::Future: 'static,
|
||||
T::Service: Service<Future = LocalBoxFuture<'static, Result<(), Error>>> + 'static,
|
||||
<T::Service as Service>::Future: 'static,
|
||||
{
|
||||
pub fn new(service: T) -> Self {
|
||||
HttpNewService(service)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ServiceFactory for HttpNewService<T>
|
||||
where
|
||||
T: ServiceFactory<Config = (), Response = (), Error = Error>,
|
||||
T::Request: 'static,
|
||||
T::Future: 'static,
|
||||
T::Service: Service<Future = LocalBoxFuture<'static, Result<(), Error>>> + 'static,
|
||||
<T::Service as Service>::Future: 'static,
|
||||
{
|
||||
type Config = ();
|
||||
type Request = T::Request;
|
||||
type Response = ();
|
||||
type Error = Error;
|
||||
type InitError = ();
|
||||
type Service = BoxedHttpService<T::Request>;
|
||||
type Future = LocalBoxFuture<'static, Result<Self::Service, ()>>;
|
||||
|
||||
fn new_service(&self, _: ()) -> Self::Future {
|
||||
let fut = self.0.new_service(());
|
||||
|
||||
async move {
|
||||
fut.await.map_err(|_| ()).map(|service| {
|
||||
let service: BoxedHttpService<_> =
|
||||
Box::new(HttpServiceWrapper { service });
|
||||
service
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
struct HttpServiceWrapper<T: Service> {
|
||||
service: T,
|
||||
}
|
||||
|
||||
impl<T> Service for HttpServiceWrapper<T>
|
||||
where
|
||||
T: Service<
|
||||
Response = (),
|
||||
Future = LocalBoxFuture<'static, Result<(), Error>>,
|
||||
Error = Error,
|
||||
>,
|
||||
T::Request: 'static,
|
||||
{
|
||||
type Request = T::Request;
|
||||
type Response = ();
|
||||
type Error = Error;
|
||||
type Future = LocalBoxFuture<'static, Result<(), Error>>;
|
||||
|
||||
fn poll_ready(&mut self, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
|
||||
self.service.poll_ready(cx)
|
||||
}
|
||||
|
||||
fn call(&mut self, req: Self::Request) -> Self::Future {
|
||||
self.service.call(req)
|
||||
}
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
#![allow(clippy::type_complexity, clippy::new_without_default, dead_code)]
|
||||
mod app;
|
||||
mod helpers;
|
||||
mod request;
|
||||
mod route;
|
||||
mod service;
|
||||
mod state;
|
||||
pub mod test;
|
||||
|
||||
// re-export for convenience
|
||||
pub use actix_http::{http, Error, HttpMessage, Response, ResponseError};
|
||||
|
||||
pub use self::app::{FramedApp, FramedAppService};
|
||||
pub use self::request::FramedRequest;
|
||||
pub use self::route::FramedRoute;
|
||||
pub use self::service::{SendError, VerifyWebSockets};
|
||||
pub use self::state::State;
|
@ -1,172 +0,0 @@
|
||||
use std::cell::{Ref, RefMut};
|
||||
|
||||
use actix_codec::Framed;
|
||||
use actix_http::http::{HeaderMap, Method, Uri, Version};
|
||||
use actix_http::{h1::Codec, Extensions, Request, RequestHead};
|
||||
use actix_router::{Path, Url};
|
||||
|
||||
use crate::state::State;
|
||||
|
||||
pub struct FramedRequest<Io, S = ()> {
|
||||
req: Request,
|
||||
framed: Framed<Io, Codec>,
|
||||
state: State<S>,
|
||||
pub(crate) path: Path<Url>,
|
||||
}
|
||||
|
||||
impl<Io, S> FramedRequest<Io, S> {
|
||||
pub fn new(
|
||||
req: Request,
|
||||
framed: Framed<Io, Codec>,
|
||||
path: Path<Url>,
|
||||
state: State<S>,
|
||||
) -> Self {
|
||||
Self {
|
||||
req,
|
||||
framed,
|
||||
state,
|
||||
path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Io, S> FramedRequest<Io, S> {
|
||||
/// Split request into a parts
|
||||
pub fn into_parts(self) -> (Request, Framed<Io, Codec>, State<S>) {
|
||||
(self.req, self.framed, self.state)
|
||||
}
|
||||
|
||||
/// This method returns reference to the request head
|
||||
#[inline]
|
||||
pub fn head(&self) -> &RequestHead {
|
||||
self.req.head()
|
||||
}
|
||||
|
||||
/// This method returns mutable reference to the request head.
|
||||
/// panics if multiple references of http request exists.
|
||||
#[inline]
|
||||
pub fn head_mut(&mut self) -> &mut RequestHead {
|
||||
self.req.head_mut()
|
||||
}
|
||||
|
||||
/// Shared application state
|
||||
#[inline]
|
||||
pub fn state(&self) -> &S {
|
||||
self.state.get_ref()
|
||||
}
|
||||
|
||||
/// Request's uri.
|
||||
#[inline]
|
||||
pub fn uri(&self) -> &Uri {
|
||||
&self.head().uri
|
||||
}
|
||||
|
||||
/// Read the Request method.
|
||||
#[inline]
|
||||
pub fn method(&self) -> &Method {
|
||||
&self.head().method
|
||||
}
|
||||
|
||||
/// Read the Request Version.
|
||||
#[inline]
|
||||
pub fn version(&self) -> Version {
|
||||
self.head().version
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Returns request's headers.
|
||||
pub fn headers(&self) -> &HeaderMap {
|
||||
&self.head().headers
|
||||
}
|
||||
|
||||
/// The target path of this Request.
|
||||
#[inline]
|
||||
pub fn path(&self) -> &str {
|
||||
self.head().uri.path()
|
||||
}
|
||||
|
||||
/// The query string in the URL.
|
||||
///
|
||||
/// E.g., id=10
|
||||
#[inline]
|
||||
pub fn query_string(&self) -> &str {
|
||||
if let Some(query) = self.uri().query().as_ref() {
|
||||
query
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a reference to the Path parameters.
|
||||
///
|
||||
/// Params is a container for url parameters.
|
||||
/// A variable segment is specified in the form `{identifier}`,
|
||||
/// where the identifier can be used later in a request handler to
|
||||
/// access the matched value for that segment.
|
||||
#[inline]
|
||||
pub fn match_info(&self) -> &Path<Url> {
|
||||
&self.path
|
||||
}
|
||||
|
||||
/// Request extensions
|
||||
#[inline]
|
||||
pub fn extensions(&self) -> Ref<Extensions> {
|
||||
self.head().extensions()
|
||||
}
|
||||
|
||||
/// Mutable reference to a the request's extensions
|
||||
#[inline]
|
||||
pub fn extensions_mut(&self) -> RefMut<Extensions> {
|
||||
self.head().extensions_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use actix_http::http::{HeaderName, HeaderValue};
|
||||
use actix_http::test::{TestBuffer, TestRequest};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_request() {
|
||||
let buf = TestBuffer::empty();
|
||||
let framed = Framed::new(buf, Codec::default());
|
||||
let req = TestRequest::with_uri("/index.html?q=1")
|
||||
.header("content-type", "test")
|
||||
.finish();
|
||||
let path = Path::new(Url::new(req.uri().clone()));
|
||||
|
||||
let mut freq = FramedRequest::new(req, framed, path, State::new(10u8));
|
||||
assert_eq!(*freq.state(), 10);
|
||||
assert_eq!(freq.version(), Version::HTTP_11);
|
||||
assert_eq!(freq.method(), Method::GET);
|
||||
assert_eq!(freq.path(), "/index.html");
|
||||
assert_eq!(freq.query_string(), "q=1");
|
||||
assert_eq!(
|
||||
freq.headers()
|
||||
.get("content-type")
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
"test"
|
||||
);
|
||||
|
||||
freq.head_mut().headers.insert(
|
||||
HeaderName::try_from("x-hdr").unwrap(),
|
||||
HeaderValue::from_static("test"),
|
||||
);
|
||||
assert_eq!(
|
||||
freq.headers().get("x-hdr").unwrap().to_str().unwrap(),
|
||||
"test"
|
||||
);
|
||||
|
||||
freq.extensions_mut().insert(100usize);
|
||||
assert_eq!(*freq.extensions().get::<usize>().unwrap(), 100usize);
|
||||
|
||||
let (_, _, state) = freq.into_parts();
|
||||
assert_eq!(*state, 10);
|
||||
}
|
||||
}
|
@ -1,159 +0,0 @@
|
||||
use std::fmt;
|
||||
use std::future::Future;
|
||||
use std::marker::PhantomData;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite};
|
||||
use actix_http::{http::Method, Error};
|
||||
use actix_service::{Service, ServiceFactory};
|
||||
use futures_util::future::{ok, FutureExt, LocalBoxFuture, Ready};
|
||||
use log::error;
|
||||
|
||||
use crate::app::HttpServiceFactory;
|
||||
use crate::request::FramedRequest;
|
||||
|
||||
/// Resource route definition
|
||||
///
|
||||
/// Route uses builder-like pattern for configuration.
|
||||
/// If handler is not explicitly set, default *404 Not Found* handler is used.
|
||||
pub struct FramedRoute<Io, S, F = (), R = (), E = ()> {
|
||||
handler: F,
|
||||
pattern: String,
|
||||
methods: Vec<Method>,
|
||||
state: PhantomData<(Io, S, R, E)>,
|
||||
}
|
||||
|
||||
impl<Io, S> FramedRoute<Io, S> {
|
||||
pub fn new(pattern: &str) -> Self {
|
||||
FramedRoute {
|
||||
handler: (),
|
||||
pattern: pattern.to_string(),
|
||||
methods: Vec::new(),
|
||||
state: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(path: &str) -> FramedRoute<Io, S> {
|
||||
FramedRoute::new(path).method(Method::GET)
|
||||
}
|
||||
|
||||
pub fn post(path: &str) -> FramedRoute<Io, S> {
|
||||
FramedRoute::new(path).method(Method::POST)
|
||||
}
|
||||
|
||||
pub fn put(path: &str) -> FramedRoute<Io, S> {
|
||||
FramedRoute::new(path).method(Method::PUT)
|
||||
}
|
||||
|
||||
pub fn delete(path: &str) -> FramedRoute<Io, S> {
|
||||
FramedRoute::new(path).method(Method::DELETE)
|
||||
}
|
||||
|
||||
pub fn method(mut self, method: Method) -> Self {
|
||||
self.methods.push(method);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn to<F, R, E>(self, handler: F) -> FramedRoute<Io, S, F, R, E>
|
||||
where
|
||||
F: FnMut(FramedRequest<Io, S>) -> R,
|
||||
R: Future<Output = Result<(), E>> + 'static,
|
||||
|
||||
E: fmt::Debug,
|
||||
{
|
||||
FramedRoute {
|
||||
handler,
|
||||
pattern: self.pattern,
|
||||
methods: self.methods,
|
||||
state: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Io, S, F, R, E> HttpServiceFactory for FramedRoute<Io, S, F, R, E>
|
||||
where
|
||||
Io: AsyncRead + AsyncWrite + 'static,
|
||||
F: FnMut(FramedRequest<Io, S>) -> R + Clone,
|
||||
R: Future<Output = Result<(), E>> + 'static,
|
||||
E: fmt::Display,
|
||||
{
|
||||
type Factory = FramedRouteFactory<Io, S, F, R, E>;
|
||||
|
||||
fn path(&self) -> &str {
|
||||
&self.pattern
|
||||
}
|
||||
|
||||
fn create(self) -> Self::Factory {
|
||||
FramedRouteFactory {
|
||||
handler: self.handler,
|
||||
methods: self.methods,
|
||||
_t: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FramedRouteFactory<Io, S, F, R, E> {
|
||||
handler: F,
|
||||
methods: Vec<Method>,
|
||||
_t: PhantomData<(Io, S, R, E)>,
|
||||
}
|
||||
|
||||
impl<Io, S, F, R, E> ServiceFactory for FramedRouteFactory<Io, S, F, R, E>
|
||||
where
|
||||
Io: AsyncRead + AsyncWrite + 'static,
|
||||
F: FnMut(FramedRequest<Io, S>) -> R + Clone,
|
||||
R: Future<Output = Result<(), E>> + 'static,
|
||||
E: fmt::Display,
|
||||
{
|
||||
type Config = ();
|
||||
type Request = FramedRequest<Io, S>;
|
||||
type Response = ();
|
||||
type Error = Error;
|
||||
type InitError = ();
|
||||
type Service = FramedRouteService<Io, S, F, R, E>;
|
||||
type Future = Ready<Result<Self::Service, Self::InitError>>;
|
||||
|
||||
fn new_service(&self, _: ()) -> Self::Future {
|
||||
ok(FramedRouteService {
|
||||
handler: self.handler.clone(),
|
||||
methods: self.methods.clone(),
|
||||
_t: PhantomData,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FramedRouteService<Io, S, F, R, E> {
|
||||
handler: F,
|
||||
methods: Vec<Method>,
|
||||
_t: PhantomData<(Io, S, R, E)>,
|
||||
}
|
||||
|
||||
impl<Io, S, F, R, E> Service for FramedRouteService<Io, S, F, R, E>
|
||||
where
|
||||
Io: AsyncRead + AsyncWrite + 'static,
|
||||
F: FnMut(FramedRequest<Io, S>) -> R + Clone,
|
||||
R: Future<Output = Result<(), E>> + 'static,
|
||||
E: fmt::Display,
|
||||
{
|
||||
type Request = FramedRequest<Io, S>;
|
||||
type Response = ();
|
||||
type Error = Error;
|
||||
type Future = LocalBoxFuture<'static, Result<(), Error>>;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, req: FramedRequest<Io, S>) -> Self::Future {
|
||||
let fut = (self.handler)(req);
|
||||
|
||||
async move {
|
||||
let res = fut.await;
|
||||
if let Err(e) = res {
|
||||
error!("Error in request handler: {}", e);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
@ -1,156 +0,0 @@
|
||||
use std::future::Future;
|
||||
use std::marker::PhantomData;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use actix_http::body::BodySize;
|
||||
use actix_http::error::ResponseError;
|
||||
use actix_http::h1::{Codec, Message};
|
||||
use actix_http::ws::{verify_handshake, HandshakeError};
|
||||
use actix_http::{Request, Response};
|
||||
use actix_service::{Service, ServiceFactory};
|
||||
use futures_util::future::{err, ok, Either, Ready};
|
||||
|
||||
/// Service that verifies incoming request if it is valid websocket
|
||||
/// upgrade request. In case of error returns `HandshakeError`
|
||||
pub struct VerifyWebSockets<T, C> {
|
||||
_t: PhantomData<(T, C)>,
|
||||
}
|
||||
|
||||
impl<T, C> Default for VerifyWebSockets<T, C> {
|
||||
fn default() -> Self {
|
||||
VerifyWebSockets { _t: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, C> ServiceFactory for VerifyWebSockets<T, C> {
|
||||
type Config = C;
|
||||
type Request = (Request, Framed<T, Codec>);
|
||||
type Response = (Request, Framed<T, Codec>);
|
||||
type Error = (HandshakeError, Framed<T, Codec>);
|
||||
type InitError = ();
|
||||
type Service = VerifyWebSockets<T, C>;
|
||||
type Future = Ready<Result<Self::Service, Self::InitError>>;
|
||||
|
||||
fn new_service(&self, _: C) -> Self::Future {
|
||||
ok(VerifyWebSockets { _t: PhantomData })
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, C> Service for VerifyWebSockets<T, C> {
|
||||
type Request = (Request, Framed<T, Codec>);
|
||||
type Response = (Request, Framed<T, Codec>);
|
||||
type Error = (HandshakeError, Framed<T, Codec>);
|
||||
type Future = Ready<Result<Self::Response, Self::Error>>;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, (req, framed): (Request, Framed<T, Codec>)) -> Self::Future {
|
||||
match verify_handshake(req.head()) {
|
||||
Err(e) => err((e, framed)),
|
||||
Ok(_) => ok((req, framed)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Send http/1 error response
|
||||
pub struct SendError<T, R, E, C>(PhantomData<(T, R, E, C)>);
|
||||
|
||||
impl<T, R, E, C> Default for SendError<T, R, E, C>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite,
|
||||
E: ResponseError,
|
||||
{
|
||||
fn default() -> Self {
|
||||
SendError(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, R, E, C> ServiceFactory for SendError<T, R, E, C>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||
R: 'static,
|
||||
E: ResponseError + 'static,
|
||||
{
|
||||
type Config = C;
|
||||
type Request = Result<R, (E, Framed<T, Codec>)>;
|
||||
type Response = R;
|
||||
type Error = (E, Framed<T, Codec>);
|
||||
type InitError = ();
|
||||
type Service = SendError<T, R, E, C>;
|
||||
type Future = Ready<Result<Self::Service, Self::InitError>>;
|
||||
|
||||
fn new_service(&self, _: C) -> Self::Future {
|
||||
ok(SendError(PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, R, E, C> Service for SendError<T, R, E, C>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||
R: 'static,
|
||||
E: ResponseError + 'static,
|
||||
{
|
||||
type Request = Result<R, (E, Framed<T, Codec>)>;
|
||||
type Response = R;
|
||||
type Error = (E, Framed<T, Codec>);
|
||||
type Future = Either<Ready<Result<R, (E, Framed<T, Codec>)>>, SendErrorFut<T, R, E>>;
|
||||
|
||||
fn poll_ready(&mut self, _: &mut Context) -> Poll<Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn call(&mut self, req: Result<R, (E, Framed<T, Codec>)>) -> Self::Future {
|
||||
match req {
|
||||
Ok(r) => Either::Left(ok(r)),
|
||||
Err((e, framed)) => {
|
||||
let res = e.error_response().drop_body();
|
||||
Either::Right(SendErrorFut {
|
||||
framed: Some(framed),
|
||||
res: Some((res, BodySize::Empty).into()),
|
||||
err: Some(e),
|
||||
_t: PhantomData,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[pin_project::pin_project]
|
||||
pub struct SendErrorFut<T, R, E> {
|
||||
res: Option<Message<(Response<()>, BodySize)>>,
|
||||
framed: Option<Framed<T, Codec>>,
|
||||
err: Option<E>,
|
||||
_t: PhantomData<R>,
|
||||
}
|
||||
|
||||
impl<T, R, E> Future for SendErrorFut<T, R, E>
|
||||
where
|
||||
E: ResponseError,
|
||||
T: AsyncRead + AsyncWrite + Unpin,
|
||||
{
|
||||
type Output = Result<R, (E, Framed<T, Codec>)>;
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
|
||||
if let Some(res) = self.res.take() {
|
||||
if self.framed.as_mut().unwrap().write(res).is_err() {
|
||||
return Poll::Ready(Err((
|
||||
self.err.take().unwrap(),
|
||||
self.framed.take().unwrap(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
match self.framed.as_mut().unwrap().flush(cx) {
|
||||
Poll::Ready(Ok(_)) => {
|
||||
Poll::Ready(Err((self.err.take().unwrap(), self.framed.take().unwrap())))
|
||||
}
|
||||
Poll::Ready(Err(_)) => {
|
||||
Poll::Ready(Err((self.err.take().unwrap(), self.framed.take().unwrap())))
|
||||
}
|
||||
Poll::Pending => Poll::Pending,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Application state
|
||||
pub struct State<S>(Arc<S>);
|
||||
|
||||
impl<S> State<S> {
|
||||
pub fn new(state: S) -> State<S> {
|
||||
State(Arc::new(state))
|
||||
}
|
||||
|
||||
pub fn get_ref(&self) -> &S {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Deref for State<S> {
|
||||
type Target = S;
|
||||
|
||||
fn deref(&self) -> &S {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Clone for State<S> {
|
||||
fn clone(&self) -> State<S> {
|
||||
State(self.0.clone())
|
||||
}
|
||||
}
|
@ -1,155 +0,0 @@
|
||||
//! Various helpers for Actix applications to use during testing.
|
||||
use std::convert::TryFrom;
|
||||
use std::future::Future;
|
||||
|
||||
use actix_codec::Framed;
|
||||
use actix_http::h1::Codec;
|
||||
use actix_http::http::header::{Header, HeaderName, IntoHeaderValue};
|
||||
use actix_http::http::{Error as HttpError, Method, Uri, Version};
|
||||
use actix_http::test::{TestBuffer, TestRequest as HttpTestRequest};
|
||||
use actix_router::{Path, Url};
|
||||
|
||||
use crate::{FramedRequest, State};
|
||||
|
||||
/// Test `Request` builder.
|
||||
pub struct TestRequest<S = ()> {
|
||||
req: HttpTestRequest,
|
||||
path: Path<Url>,
|
||||
state: State<S>,
|
||||
}
|
||||
|
||||
impl Default for TestRequest<()> {
|
||||
fn default() -> TestRequest {
|
||||
TestRequest {
|
||||
req: HttpTestRequest::default(),
|
||||
path: Path::new(Url::new(Uri::default())),
|
||||
state: State::new(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TestRequest<()> {
|
||||
/// Create TestRequest and set request uri
|
||||
pub fn with_uri(path: &str) -> Self {
|
||||
Self::get().uri(path)
|
||||
}
|
||||
|
||||
/// Create TestRequest and set header
|
||||
pub fn with_hdr<H: Header>(hdr: H) -> Self {
|
||||
Self::default().set(hdr)
|
||||
}
|
||||
|
||||
/// Create TestRequest and set header
|
||||
pub fn with_header<K, V>(key: K, value: V) -> Self
|
||||
where
|
||||
HeaderName: TryFrom<K>,
|
||||
<HeaderName as TryFrom<K>>::Error: Into<HttpError>,
|
||||
V: IntoHeaderValue,
|
||||
{
|
||||
Self::default().header(key, value)
|
||||
}
|
||||
|
||||
/// Create TestRequest and set method to `Method::GET`
|
||||
pub fn get() -> Self {
|
||||
Self::default().method(Method::GET)
|
||||
}
|
||||
|
||||
/// Create TestRequest and set method to `Method::POST`
|
||||
pub fn post() -> Self {
|
||||
Self::default().method(Method::POST)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> TestRequest<S> {
|
||||
/// Create TestRequest and set request uri
|
||||
pub fn with_state(state: S) -> TestRequest<S> {
|
||||
let req = TestRequest::get();
|
||||
TestRequest {
|
||||
state: State::new(state),
|
||||
req: req.req,
|
||||
path: req.path,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set HTTP version of this request
|
||||
pub fn version(mut self, ver: Version) -> Self {
|
||||
self.req.version(ver);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set HTTP method of this request
|
||||
pub fn method(mut self, meth: Method) -> Self {
|
||||
self.req.method(meth);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set HTTP Uri of this request
|
||||
pub fn uri(mut self, path: &str) -> Self {
|
||||
self.req.uri(path);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set a header
|
||||
pub fn set<H: Header>(mut self, hdr: H) -> Self {
|
||||
self.req.set(hdr);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set a header
|
||||
pub fn header<K, V>(mut self, key: K, value: V) -> Self
|
||||
where
|
||||
HeaderName: TryFrom<K>,
|
||||
<HeaderName as TryFrom<K>>::Error: Into<HttpError>,
|
||||
V: IntoHeaderValue,
|
||||
{
|
||||
self.req.header(key, value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set request path pattern parameter
|
||||
pub fn param(mut self, name: &'static str, value: &'static str) -> Self {
|
||||
self.path.add_static(name, value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Complete request creation and generate `Request` instance
|
||||
pub fn finish(mut self) -> FramedRequest<TestBuffer, S> {
|
||||
let req = self.req.finish();
|
||||
self.path.get_mut().update(req.uri());
|
||||
let framed = Framed::new(TestBuffer::empty(), Codec::default());
|
||||
FramedRequest::new(req, framed, self.path, self.state)
|
||||
}
|
||||
|
||||
/// This method generates `FramedRequest` instance and executes async handler
|
||||
pub async fn run<F, R, I, E>(self, f: F) -> Result<I, E>
|
||||
where
|
||||
F: FnOnce(FramedRequest<TestBuffer, S>) -> R,
|
||||
R: Future<Output = Result<I, E>>,
|
||||
{
|
||||
f(self.finish()).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let req = TestRequest::with_uri("/index.html")
|
||||
.header("x-test", "test")
|
||||
.param("test", "123")
|
||||
.finish();
|
||||
|
||||
assert_eq!(*req.state(), ());
|
||||
assert_eq!(req.version(), Version::HTTP_11);
|
||||
assert_eq!(req.method(), Method::GET);
|
||||
assert_eq!(req.path(), "/index.html");
|
||||
assert_eq!(req.query_string(), "");
|
||||
assert_eq!(
|
||||
req.headers().get("x-test").unwrap().to_str().unwrap(),
|
||||
"test"
|
||||
);
|
||||
assert_eq!(&req.match_info()["test"], "123");
|
||||
}
|
||||
}
|
@ -1,161 +0,0 @@
|
||||
use actix_codec::{AsyncRead, AsyncWrite};
|
||||
use actix_http::{body, http::StatusCode, ws, Error, HttpService, Response};
|
||||
use actix_http_test::test_server;
|
||||
use actix_service::{pipeline_factory, IntoServiceFactory, ServiceFactory};
|
||||
use actix_utils::framed::Dispatcher;
|
||||
use bytes::Bytes;
|
||||
use futures_util::{future, SinkExt, StreamExt};
|
||||
|
||||
use actix_framed::{FramedApp, FramedRequest, FramedRoute, SendError, VerifyWebSockets};
|
||||
|
||||
async fn ws_service<T: AsyncRead + AsyncWrite>(
|
||||
req: FramedRequest<T>,
|
||||
) -> Result<(), Error> {
|
||||
let (req, mut framed, _) = req.into_parts();
|
||||
let res = ws::handshake(req.head()).unwrap().message_body(());
|
||||
|
||||
framed
|
||||
.send((res, body::BodySize::None).into())
|
||||
.await
|
||||
.unwrap();
|
||||
Dispatcher::new(framed.into_framed(ws::Codec::new()), service)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn service(msg: ws::Frame) -> Result<ws::Message, Error> {
|
||||
let msg = match msg {
|
||||
ws::Frame::Ping(msg) => ws::Message::Pong(msg),
|
||||
ws::Frame::Text(text) => {
|
||||
ws::Message::Text(String::from_utf8_lossy(&text).to_string())
|
||||
}
|
||||
ws::Frame::Binary(bin) => ws::Message::Binary(bin),
|
||||
ws::Frame::Close(reason) => ws::Message::Close(reason),
|
||||
_ => panic!(),
|
||||
};
|
||||
Ok(msg)
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_simple() {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.upgrade(
|
||||
FramedApp::new().service(FramedRoute::get("/index.html").to(ws_service)),
|
||||
)
|
||||
.finish(|_| future::ok::<_, Error>(Response::NotFound()))
|
||||
.tcp()
|
||||
})
|
||||
.await;
|
||||
|
||||
assert!(srv.ws_at("/test").await.is_err());
|
||||
|
||||
// client service
|
||||
let mut framed = srv.ws_at("/index.html").await.unwrap();
|
||||
framed
|
||||
.send(ws::Message::Text("text".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
let (item, mut framed) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Text(Bytes::from_static(b"text"))
|
||||
);
|
||||
|
||||
framed
|
||||
.send(ws::Message::Binary("text".into()))
|
||||
.await
|
||||
.unwrap();
|
||||
let (item, mut framed) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Binary(Bytes::from_static(b"text"))
|
||||
);
|
||||
|
||||
framed.send(ws::Message::Ping("text".into())).await.unwrap();
|
||||
let (item, mut framed) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Pong("text".to_string().into())
|
||||
);
|
||||
|
||||
framed
|
||||
.send(ws::Message::Close(Some(ws::CloseCode::Normal.into())))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (item, _) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Close(Some(ws::CloseCode::Normal.into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_service() {
|
||||
let mut srv = test_server(|| {
|
||||
pipeline_factory(actix_http::h1::OneRequest::new().map_err(|_| ())).and_then(
|
||||
pipeline_factory(
|
||||
pipeline_factory(VerifyWebSockets::default())
|
||||
.then(SendError::default())
|
||||
.map_err(|_| ()),
|
||||
)
|
||||
.and_then(
|
||||
FramedApp::new()
|
||||
.service(FramedRoute::get("/index.html").to(ws_service))
|
||||
.into_factory()
|
||||
.map_err(|_| ()),
|
||||
),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
// non ws request
|
||||
let res = srv.get("/index.html").send().await.unwrap();
|
||||
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
|
||||
|
||||
// not found
|
||||
assert!(srv.ws_at("/test").await.is_err());
|
||||
|
||||
// client service
|
||||
let mut framed = srv.ws_at("/index.html").await.unwrap();
|
||||
framed
|
||||
.send(ws::Message::Text("text".to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
let (item, mut framed) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Text(Bytes::from_static(b"text"))
|
||||
);
|
||||
|
||||
framed
|
||||
.send(ws::Message::Binary("text".into()))
|
||||
.await
|
||||
.unwrap();
|
||||
let (item, mut framed) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Binary(Bytes::from_static(b"text"))
|
||||
);
|
||||
|
||||
framed.send(ws::Message::Ping("text".into())).await.unwrap();
|
||||
let (item, mut framed) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Pong("text".to_string().into())
|
||||
);
|
||||
|
||||
framed
|
||||
.send(ws::Message::Close(Some(ws::CloseCode::Normal.into())))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (item, _) = framed.into_future().await;
|
||||
assert_eq!(
|
||||
item.unwrap().unwrap(),
|
||||
ws::Frame::Close(Some(ws::CloseCode::Normal.into()))
|
||||
);
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
environment:
|
||||
global:
|
||||
PROJECT_NAME: actix-http
|
||||
matrix:
|
||||
# Stable channel
|
||||
- TARGET: i686-pc-windows-msvc
|
||||
CHANNEL: stable
|
||||
- TARGET: x86_64-pc-windows-gnu
|
||||
CHANNEL: stable
|
||||
- TARGET: x86_64-pc-windows-msvc
|
||||
CHANNEL: stable
|
||||
# Nightly channel
|
||||
- TARGET: i686-pc-windows-msvc
|
||||
CHANNEL: nightly
|
||||
- TARGET: x86_64-pc-windows-gnu
|
||||
CHANNEL: nightly
|
||||
- TARGET: x86_64-pc-windows-msvc
|
||||
CHANNEL: nightly
|
||||
|
||||
# Install Rust and Cargo
|
||||
# (Based on from https://github.com/rust-lang/libc/blob/master/appveyor.yml)
|
||||
install:
|
||||
- ps: >-
|
||||
If ($Env:TARGET -eq 'x86_64-pc-windows-gnu') {
|
||||
$Env:PATH += ';C:\msys64\mingw64\bin'
|
||||
} ElseIf ($Env:TARGET -eq 'i686-pc-windows-gnu') {
|
||||
$Env:PATH += ';C:\MinGW\bin'
|
||||
}
|
||||
- curl -sSf -o rustup-init.exe https://win.rustup.rs
|
||||
- rustup-init.exe --default-host %TARGET% --default-toolchain %CHANNEL% -y
|
||||
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
|
||||
- rustc -Vv
|
||||
- cargo -V
|
||||
|
||||
# 'cargo test' takes care of building for us, so disable Appveyor's build stage.
|
||||
build: false
|
||||
|
||||
# Equivalent to Travis' `script` phase
|
||||
test_script:
|
||||
- cargo clean
|
||||
- cargo test
|
@ -1,5 +1,50 @@
|
||||
# Changes
|
||||
|
||||
## Unreleased - 2020-xx-xx
|
||||
|
||||
|
||||
## 2.0.0 - 2020-09-11
|
||||
* No significant changes from `2.0.0-beta.4`.
|
||||
|
||||
|
||||
## 2.0.0-beta.4 - 2020-09-09
|
||||
### Changed
|
||||
* Update actix-codec and actix-utils dependencies.
|
||||
* Update actix-connect and actix-tls dependencies.
|
||||
|
||||
|
||||
## [2.0.0-beta.3] - 2020-08-14
|
||||
|
||||
### Fixed
|
||||
* Memory leak of `client::pool::ConnectorPoolSupport`. [#1626]
|
||||
|
||||
[#1626]: https://github.com/actix/actix-web/pull/1626
|
||||
|
||||
|
||||
## [2.0.0-beta.2] - 2020-07-21
|
||||
### Fixed
|
||||
* Potential UB in h1 decoder using uninitialized memory. [#1614]
|
||||
|
||||
### Changed
|
||||
* Fix illegal chunked encoding. [#1615]
|
||||
|
||||
[#1614]: https://github.com/actix/actix-web/pull/1614
|
||||
[#1615]: https://github.com/actix/actix-web/pull/1615
|
||||
|
||||
|
||||
## [2.0.0-beta.1] - 2020-07-11
|
||||
|
||||
### Changed
|
||||
|
||||
* Migrate cookie handling to `cookie` crate. [#1558]
|
||||
* Update `sha-1` to 0.9. [#1586]
|
||||
* Fix leak in client pool. [#1580]
|
||||
* MSRV is now 1.41.1.
|
||||
|
||||
[#1558]: https://github.com/actix/actix-web/pull/1558
|
||||
[#1586]: https://github.com/actix/actix-web/pull/1586
|
||||
[#1580]: https://github.com/actix/actix-web/pull/1580
|
||||
|
||||
## [2.0.0-alpha.4] - 2020-05-21
|
||||
|
||||
### Changed
|
||||
|
@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "actix-http"
|
||||
version = "2.0.0-alpha.4"
|
||||
version = "2.0.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix http primitives"
|
||||
description = "Actix HTTP primitives"
|
||||
readme = "README.md"
|
||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
@ -11,7 +11,7 @@ documentation = "https://docs.rs/actix-http/"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
@ -34,24 +34,25 @@ rustls = ["actix-tls/rustls", "actix-connect/rustls"]
|
||||
compress = ["flate2", "brotli2"]
|
||||
|
||||
# support for secure cookies
|
||||
secure-cookies = ["ring"]
|
||||
secure-cookies = ["cookie/secure"]
|
||||
|
||||
# support for actix Actor messages
|
||||
actors = ["actix"]
|
||||
|
||||
[dependencies]
|
||||
actix-service = "1.0.5"
|
||||
actix-codec = "0.2.0"
|
||||
actix-connect = "2.0.0-alpha.3"
|
||||
actix-utils = "1.0.6"
|
||||
actix-service = "1.0.6"
|
||||
actix-codec = "0.3.0"
|
||||
actix-connect = "2.0.0"
|
||||
actix-utils = "2.0.0"
|
||||
actix-rt = "1.0.0"
|
||||
actix-threadpool = "0.3.1"
|
||||
actix-tls = { version = "2.0.0-alpha.1", optional = true }
|
||||
actix = { version = "0.10.0-alpha.1", optional = true }
|
||||
actix-tls = { version = "2.0.0", optional = true }
|
||||
actix = { version = "0.10.0", optional = true }
|
||||
|
||||
base64 = "0.12"
|
||||
bitflags = "1.2"
|
||||
bytes = "0.5.3"
|
||||
cookie = { version = "0.14.1", features = ["percent-encode"] }
|
||||
copyless = "0.1.4"
|
||||
derive_more = "0.99.2"
|
||||
either = "1.5.3"
|
||||
@ -70,33 +71,30 @@ language-tags = "0.2"
|
||||
log = "0.4"
|
||||
mime = "0.3"
|
||||
percent-encoding = "2.1"
|
||||
pin-project = "0.4.6"
|
||||
pin-project = "0.4.17"
|
||||
rand = "0.7"
|
||||
regex = "1.3"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
sha-1 = "0.8"
|
||||
sha-1 = "0.9"
|
||||
slab = "0.4"
|
||||
serde_urlencoded = "0.6.1"
|
||||
time = { version = "0.2.7", default-features = false, features = ["std"] }
|
||||
|
||||
# for secure cookie
|
||||
ring = { version = "0.16.9", optional = true }
|
||||
|
||||
# compression
|
||||
brotli2 = { version="0.3.2", optional = true }
|
||||
flate2 = { version = "1.0.13", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-server = "1.0.1"
|
||||
actix-connect = { version = "2.0.0-alpha.2", features = ["openssl"] }
|
||||
actix-http-test = { version = "2.0.0-alpha.1", features = ["openssl"] }
|
||||
actix-tls = { version = "2.0.0-alpha.1", features = ["openssl"] }
|
||||
actix-connect = { version = "2.0.0", features = ["openssl"] }
|
||||
actix-http-test = { version = "2.0.0", features = ["openssl"] }
|
||||
actix-tls = { version = "2.0.0", features = ["openssl"] }
|
||||
criterion = "0.3"
|
||||
env_logger = "0.7"
|
||||
serde_derive = "1.0"
|
||||
open-ssl = { version="0.10", package = "openssl" }
|
||||
rust-tls = { version="0.17", package = "rustls" }
|
||||
rust-tls = { version="0.18", package = "rustls" }
|
||||
|
||||
[[bench]]
|
||||
name = "content-length"
|
||||
@ -105,3 +103,7 @@ harness = false
|
||||
[[bench]]
|
||||
name = "status-line"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "uninit-headers"
|
||||
harness = false
|
||||
|
@ -1,201 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017-NOW Nikolay Kim
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
1
actix-http/LICENSE-APACHE
Symbolic link
1
actix-http/LICENSE-APACHE
Symbolic link
@ -0,0 +1 @@
|
||||
../LICENSE-APACHE
|
@ -1,25 +0,0 @@
|
||||
Copyright (c) 2017 Nikolay Kim
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
1
actix-http/LICENSE-MIT
Symbolic link
1
actix-http/LICENSE-MIT
Symbolic link
@ -0,0 +1 @@
|
||||
../LICENSE-MIT
|
@ -13,12 +13,11 @@ Actix http
|
||||
## Example
|
||||
|
||||
```rust
|
||||
// see examples/framed_hello.rs for complete list of used crates.
|
||||
use std::{env, io};
|
||||
|
||||
use actix_http::{HttpService, Response};
|
||||
use actix_server::Server;
|
||||
use futures::future;
|
||||
use futures_util::future;
|
||||
use http::header::HeaderValue;
|
||||
use log::info;
|
||||
|
||||
|
137
actix-http/benches/uninit-headers.rs
Normal file
137
actix-http/benches/uninit-headers.rs
Normal file
@ -0,0 +1,137 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
|
||||
use bytes::BytesMut;
|
||||
|
||||
// A Miri run detects UB, seen on this playground:
|
||||
// https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=f5d9aa166aa48df8dca05fce2b6c3915
|
||||
|
||||
fn bench_header_parsing(c: &mut Criterion) {
|
||||
c.bench_function("Original (Unsound) [short]", |b| {
|
||||
b.iter(|| {
|
||||
let mut buf = BytesMut::from(REQ_SHORT);
|
||||
_original::parse_headers(&mut buf);
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("New (safe) [short]", |b| {
|
||||
b.iter(|| {
|
||||
let mut buf = BytesMut::from(REQ_SHORT);
|
||||
_new::parse_headers(&mut buf);
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Original (Unsound) [realistic]", |b| {
|
||||
b.iter(|| {
|
||||
let mut buf = BytesMut::from(REQ);
|
||||
_original::parse_headers(&mut buf);
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("New (safe) [realistic]", |b| {
|
||||
b.iter(|| {
|
||||
let mut buf = BytesMut::from(REQ);
|
||||
_new::parse_headers(&mut buf);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_header_parsing);
|
||||
criterion_main!(benches);
|
||||
|
||||
const MAX_HEADERS: usize = 96;
|
||||
|
||||
const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
|
||||
[httparse::EMPTY_HEADER; MAX_HEADERS];
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct HeaderIndex {
|
||||
name: (usize, usize),
|
||||
value: (usize, usize),
|
||||
}
|
||||
|
||||
const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
|
||||
name: (0, 0),
|
||||
value: (0, 0),
|
||||
};
|
||||
|
||||
const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
|
||||
[EMPTY_HEADER_INDEX; MAX_HEADERS];
|
||||
|
||||
impl HeaderIndex {
|
||||
fn record(
|
||||
bytes: &[u8],
|
||||
headers: &[httparse::Header<'_>],
|
||||
indices: &mut [HeaderIndex],
|
||||
) {
|
||||
let bytes_ptr = bytes.as_ptr() as usize;
|
||||
for (header, indices) in headers.iter().zip(indices.iter_mut()) {
|
||||
let name_start = header.name.as_ptr() as usize - bytes_ptr;
|
||||
let name_end = name_start + header.name.len();
|
||||
indices.name = (name_start, name_end);
|
||||
let value_start = header.value.as_ptr() as usize - bytes_ptr;
|
||||
let value_end = value_start + header.value.len();
|
||||
indices.value = (value_start, value_end);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// test cases taken from:
|
||||
// https://github.com/seanmonstar/httparse/blob/master/benches/parse.rs
|
||||
|
||||
const REQ_SHORT: &'static [u8] = b"\
|
||||
GET / HTTP/1.0\r\n\
|
||||
Host: example.com\r\n\
|
||||
Cookie: session=60; user_id=1\r\n\r\n";
|
||||
|
||||
const REQ: &'static [u8] = b"\
|
||||
GET /wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg HTTP/1.1\r\n\
|
||||
Host: www.kittyhell.com\r\n\
|
||||
User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\r\n\
|
||||
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n\
|
||||
Accept-Language: ja,en-us;q=0.7,en;q=0.3\r\n\
|
||||
Accept-Encoding: gzip,deflate\r\n\
|
||||
Accept-Charset: Shift_JIS,utf-8;q=0.7,*;q=0.7\r\n\
|
||||
Keep-Alive: 115\r\n\
|
||||
Connection: keep-alive\r\n\
|
||||
Cookie: wp_ozh_wsa_visits=2; wp_ozh_wsa_visit_lasttime=xxxxxxxxxx; __utma=xxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.x; __utmz=xxxxxxxxx.xxxxxxxxxx.x.x.utmccn=(referral)|utmcsr=reader.livedoor.com|utmcct=/reader/|utmcmd=referral|padding=under256\r\n\r\n";
|
||||
|
||||
mod _new {
|
||||
use super::*;
|
||||
|
||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
||||
|
||||
let mut req = httparse::Request::new(&mut parsed);
|
||||
match req.parse(src).unwrap() {
|
||||
httparse::Status::Complete(_len) => {
|
||||
HeaderIndex::record(src, req.headers, &mut headers);
|
||||
req.headers.len()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod _original {
|
||||
use super::*;
|
||||
|
||||
use std::mem::MaybeUninit;
|
||||
|
||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
|
||||
let mut req = httparse::Request::new(&mut parsed);
|
||||
match req.parse(src).unwrap() {
|
||||
httparse::Status::Complete(_len) => {
|
||||
HeaderIndex::record(src, req.headers, &mut headers);
|
||||
req.headers.len()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
@ -6,7 +6,7 @@ use std::{fmt, mem};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures_core::Stream;
|
||||
use futures_util::ready;
|
||||
use pin_project::{pin_project, project};
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
@ -21,12 +21,7 @@ pub enum BodySize {
|
||||
|
||||
impl BodySize {
|
||||
pub fn is_eof(&self) -> bool {
|
||||
match self {
|
||||
BodySize::None
|
||||
| BodySize::Empty
|
||||
| BodySize::Sized(0) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, BodySize::None | BodySize::Empty | BodySize::Sized(0))
|
||||
}
|
||||
}
|
||||
|
||||
@ -70,7 +65,7 @@ impl<T: MessageBody + Unpin> MessageBody for Box<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = ResponseBodyProj)]
|
||||
pub enum ResponseBody<B> {
|
||||
Body(#[pin] B),
|
||||
Other(#[pin] Body),
|
||||
@ -109,15 +104,13 @@ impl<B: MessageBody> MessageBody for ResponseBody<B> {
|
||||
}
|
||||
}
|
||||
|
||||
#[project]
|
||||
fn poll_next(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Error>>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
ResponseBody::Body(body) => body.poll_next(cx),
|
||||
ResponseBody::Other(body) => body.poll_next(cx),
|
||||
ResponseBodyProj::Body(body) => body.poll_next(cx),
|
||||
ResponseBodyProj::Other(body) => body.poll_next(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -125,20 +118,18 @@ impl<B: MessageBody> MessageBody for ResponseBody<B> {
|
||||
impl<B: MessageBody> Stream for ResponseBody<B> {
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
||||
#[project]
|
||||
fn poll_next(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
ResponseBody::Body(body) => body.poll_next(cx),
|
||||
ResponseBody::Other(body) => body.poll_next(cx),
|
||||
ResponseBodyProj::Body(body) => body.poll_next(cx),
|
||||
ResponseBodyProj::Other(body) => body.poll_next(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = BodyProj)]
|
||||
/// Represents various types of http message body.
|
||||
pub enum Body {
|
||||
/// Empty response. `Content-Length` header is not set.
|
||||
@ -173,16 +164,14 @@ impl MessageBody for Body {
|
||||
}
|
||||
}
|
||||
|
||||
#[project]
|
||||
fn poll_next(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Error>>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
Body::None => Poll::Ready(None),
|
||||
Body::Empty => Poll::Ready(None),
|
||||
Body::Bytes(ref mut bin) => {
|
||||
BodyProj::None => Poll::Ready(None),
|
||||
BodyProj::Empty => Poll::Ready(None),
|
||||
BodyProj::Bytes(ref mut bin) => {
|
||||
let len = bin.len();
|
||||
if len == 0 {
|
||||
Poll::Ready(None)
|
||||
@ -190,7 +179,7 @@ impl MessageBody for Body {
|
||||
Poll::Ready(Some(Ok(mem::take(bin))))
|
||||
}
|
||||
}
|
||||
Body::Message(ref mut body) => Pin::new(body.as_mut()).poll_next(cx),
|
||||
BodyProj::Message(ref mut body) => Pin::new(body.as_mut()).poll_next(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -198,14 +187,8 @@ impl MessageBody for Body {
|
||||
impl PartialEq for Body {
|
||||
fn eq(&self, other: &Body) -> bool {
|
||||
match *self {
|
||||
Body::None => match *other {
|
||||
Body::None => true,
|
||||
_ => false,
|
||||
},
|
||||
Body::Empty => match *other {
|
||||
Body::Empty => true,
|
||||
_ => false,
|
||||
},
|
||||
Body::None => matches!(*other, Body::None),
|
||||
Body::Empty => matches!(*other, Body::Empty),
|
||||
Body::Bytes(ref b) => match *other {
|
||||
Body::Bytes(ref b2) => b == b2,
|
||||
_ => false,
|
||||
@ -482,9 +465,9 @@ where
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use futures_util::stream;
|
||||
use futures_util::future::poll_fn;
|
||||
use futures_util::pin_mut;
|
||||
use futures_util::stream;
|
||||
|
||||
impl Body {
|
||||
pub(crate) fn get_ref(&self) -> &[u8] {
|
||||
@ -618,10 +601,6 @@ mod tests {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_body_eq() {
|
||||
assert!(Body::None == Body::None);
|
||||
assert!(Body::None != Body::Empty);
|
||||
assert!(Body::Empty == Body::Empty);
|
||||
assert!(Body::Empty != Body::None);
|
||||
assert!(
|
||||
Body::Bytes(Bytes::from_static(b"1"))
|
||||
== Body::Bytes(Bytes::from_static(b"1"))
|
||||
@ -633,7 +612,7 @@ mod tests {
|
||||
async fn test_body_debug() {
|
||||
assert!(format!("{:?}", Body::None).contains("Body::None"));
|
||||
assert!(format!("{:?}", Body::Empty).contains("Body::Empty"));
|
||||
assert!(format!("{:?}", Body::Bytes(Bytes::from_static(b"1"))).contains("1"));
|
||||
assert!(format!("{:?}", Body::Bytes(Bytes::from_static(b"1"))).contains('1'));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -735,7 +714,7 @@ mod tests {
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast");
|
||||
let body = &mut resp_body.downcast_mut::<String>().unwrap();
|
||||
body.push_str("!");
|
||||
body.push('!');
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast!");
|
||||
let not_body = resp_body.downcast_ref::<()>();
|
||||
|
@ -7,7 +7,7 @@ use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use bytes::{Buf, Bytes};
|
||||
use futures_util::future::{err, Either, FutureExt, LocalBoxFuture, Ready};
|
||||
use h2::client::SendRequest;
|
||||
use pin_project::{pin_project, project};
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::body::MessageBody;
|
||||
use crate::h1::ClientCodec;
|
||||
@ -46,10 +46,10 @@ pub trait Connection {
|
||||
|
||||
pub(crate) trait ConnectionLifetime: AsyncRead + AsyncWrite + 'static {
|
||||
/// Close connection
|
||||
fn close(&mut self);
|
||||
fn close(self: Pin<&mut Self>);
|
||||
|
||||
/// Release connection to the connection pool
|
||||
fn release(&mut self);
|
||||
fn release(self: Pin<&mut Self>);
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
@ -195,17 +195,21 @@ where
|
||||
match self {
|
||||
EitherConnection::A(con) => con
|
||||
.open_tunnel(head)
|
||||
.map(|res| res.map(|(head, framed)| (head, framed.map_io(EitherIo::A))))
|
||||
.map(|res| {
|
||||
res.map(|(head, framed)| (head, framed.into_map_io(EitherIo::A)))
|
||||
})
|
||||
.boxed_local(),
|
||||
EitherConnection::B(con) => con
|
||||
.open_tunnel(head)
|
||||
.map(|res| res.map(|(head, framed)| (head, framed.map_io(EitherIo::B))))
|
||||
.map(|res| {
|
||||
res.map(|(head, framed)| (head, framed.into_map_io(EitherIo::B)))
|
||||
})
|
||||
.boxed_local(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = EitherIoProj)]
|
||||
pub enum EitherIo<A, B> {
|
||||
A(#[pin] A),
|
||||
B(#[pin] B),
|
||||
@ -216,16 +220,14 @@ where
|
||||
A: AsyncRead,
|
||||
B: AsyncRead,
|
||||
{
|
||||
#[project]
|
||||
fn poll_read(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut [u8],
|
||||
) -> Poll<io::Result<usize>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
EitherIo::A(val) => val.poll_read(cx, buf),
|
||||
EitherIo::B(val) => val.poll_read(cx, buf),
|
||||
EitherIoProj::A(val) => val.poll_read(cx, buf),
|
||||
EitherIoProj::B(val) => val.poll_read(cx, buf),
|
||||
}
|
||||
}
|
||||
|
||||
@ -245,41 +247,34 @@ where
|
||||
A: AsyncWrite,
|
||||
B: AsyncWrite,
|
||||
{
|
||||
#[project]
|
||||
fn poll_write(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &[u8],
|
||||
) -> Poll<io::Result<usize>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
EitherIo::A(val) => val.poll_write(cx, buf),
|
||||
EitherIo::B(val) => val.poll_write(cx, buf),
|
||||
EitherIoProj::A(val) => val.poll_write(cx, buf),
|
||||
EitherIoProj::B(val) => val.poll_write(cx, buf),
|
||||
}
|
||||
}
|
||||
|
||||
#[project]
|
||||
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
EitherIo::A(val) => val.poll_flush(cx),
|
||||
EitherIo::B(val) => val.poll_flush(cx),
|
||||
EitherIoProj::A(val) => val.poll_flush(cx),
|
||||
EitherIoProj::B(val) => val.poll_flush(cx),
|
||||
}
|
||||
}
|
||||
|
||||
#[project]
|
||||
fn poll_shutdown(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<io::Result<()>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
EitherIo::A(val) => val.poll_shutdown(cx),
|
||||
EitherIo::B(val) => val.poll_shutdown(cx),
|
||||
EitherIoProj::A(val) => val.poll_shutdown(cx),
|
||||
EitherIoProj::B(val) => val.poll_shutdown(cx),
|
||||
}
|
||||
}
|
||||
|
||||
#[project]
|
||||
fn poll_write_buf<U: Buf>(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
@ -288,10 +283,9 @@ where
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
#[project]
|
||||
match self.project() {
|
||||
EitherIo::A(val) => val.poll_write_buf(cx, buf),
|
||||
EitherIo::B(val) => val.poll_write_buf(cx, buf),
|
||||
EitherIoProj::A(val) => val.poll_write_buf(cx, buf),
|
||||
EitherIoProj::B(val) => val.poll_write_buf(cx, buf),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ pub enum ConnectError {
|
||||
H2(h2::Error),
|
||||
|
||||
/// Connecting took too long
|
||||
#[display(fmt = "Timeout out while establishing connection")]
|
||||
#[display(fmt = "Timeout while establishing connection")]
|
||||
Timeout,
|
||||
|
||||
/// Connector has been disconnected
|
||||
@ -110,7 +110,7 @@ pub enum SendRequestError {
|
||||
#[display(fmt = "{}", _0)]
|
||||
H2(h2::Error),
|
||||
/// Response took too long
|
||||
#[display(fmt = "Timeout out while waiting for response")]
|
||||
#[display(fmt = "Timeout while waiting for response")]
|
||||
Timeout,
|
||||
/// Tunnels are not supported for http2 connection
|
||||
#[display(fmt = "Tunnels are not supported for http2 connection")]
|
||||
|
@ -67,17 +67,17 @@ where
|
||||
};
|
||||
|
||||
// create Framed and send request
|
||||
let mut framed = Framed::new(io, h1::ClientCodec::default());
|
||||
framed.send((head, body.size()).into()).await?;
|
||||
let mut framed_inner = Framed::new(io, h1::ClientCodec::default());
|
||||
framed_inner.send((head, body.size()).into()).await?;
|
||||
|
||||
// send request body
|
||||
match body.size() {
|
||||
BodySize::None | BodySize::Empty | BodySize::Sized(0) => (),
|
||||
_ => send_body(body, &mut framed).await?,
|
||||
_ => send_body(body, Pin::new(&mut framed_inner)).await?,
|
||||
};
|
||||
|
||||
// read response and init read body
|
||||
let res = framed.into_future().await;
|
||||
let res = Pin::new(&mut framed_inner).into_future().await;
|
||||
let (head, framed) = if let (Some(result), framed) = res {
|
||||
let item = result.map_err(SendRequestError::from)?;
|
||||
(item, framed)
|
||||
@ -85,14 +85,14 @@ where
|
||||
return Err(SendRequestError::from(ConnectError::Disconnected));
|
||||
};
|
||||
|
||||
match framed.get_codec().message_type() {
|
||||
match framed.codec_ref().message_type() {
|
||||
h1::MessageType::None => {
|
||||
let force_close = !framed.get_codec().keepalive();
|
||||
let force_close = !framed.codec_ref().keepalive();
|
||||
release_connection(framed, force_close);
|
||||
Ok((head, Payload::None))
|
||||
}
|
||||
_ => {
|
||||
let pl: PayloadStream = PlStream::new(framed).boxed_local();
|
||||
let pl: PayloadStream = PlStream::new(framed_inner).boxed_local();
|
||||
Ok((head, pl.into()))
|
||||
}
|
||||
}
|
||||
@ -119,35 +119,36 @@ where
|
||||
}
|
||||
|
||||
/// send request body to the peer
|
||||
pub(crate) async fn send_body<I, B>(
|
||||
pub(crate) async fn send_body<T, B>(
|
||||
body: B,
|
||||
framed: &mut Framed<I, h1::ClientCodec>,
|
||||
mut framed: Pin<&mut Framed<T, h1::ClientCodec>>,
|
||||
) -> Result<(), SendRequestError>
|
||||
where
|
||||
I: ConnectionLifetime,
|
||||
T: ConnectionLifetime + Unpin,
|
||||
B: MessageBody,
|
||||
{
|
||||
let mut eof = false;
|
||||
pin_mut!(body);
|
||||
|
||||
let mut eof = false;
|
||||
while !eof {
|
||||
while !eof && !framed.is_write_buf_full() {
|
||||
while !eof && !framed.as_ref().is_write_buf_full() {
|
||||
match poll_fn(|cx| body.as_mut().poll_next(cx)).await {
|
||||
Some(result) => {
|
||||
framed.write(h1::Message::Chunk(Some(result?)))?;
|
||||
framed.as_mut().write(h1::Message::Chunk(Some(result?)))?;
|
||||
}
|
||||
None => {
|
||||
eof = true;
|
||||
framed.write(h1::Message::Chunk(None))?;
|
||||
framed.as_mut().write(h1::Message::Chunk(None))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !framed.is_write_buf_empty() {
|
||||
poll_fn(|cx| match framed.flush(cx) {
|
||||
if !framed.as_ref().is_write_buf_empty() {
|
||||
poll_fn(|cx| match framed.as_mut().flush(cx) {
|
||||
Poll::Ready(Ok(_)) => Poll::Ready(Ok(())),
|
||||
Poll::Ready(Err(err)) => Poll::Ready(Err(err)),
|
||||
Poll::Pending => {
|
||||
if !framed.is_write_buf_full() {
|
||||
if !framed.as_ref().is_write_buf_full() {
|
||||
Poll::Ready(Ok(()))
|
||||
} else {
|
||||
Poll::Pending
|
||||
@ -158,13 +159,14 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
SinkExt::flush(framed).await?;
|
||||
SinkExt::flush(Pin::into_inner(framed)).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
/// HTTP client connection
|
||||
pub struct H1Connection<T> {
|
||||
/// T should be `Unpin`
|
||||
io: Option<T>,
|
||||
created: time::Instant,
|
||||
pool: Option<Acquired<T>>,
|
||||
@ -175,7 +177,7 @@ where
|
||||
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||
{
|
||||
/// Close connection
|
||||
fn close(&mut self) {
|
||||
fn close(mut self: Pin<&mut Self>) {
|
||||
if let Some(mut pool) = self.pool.take() {
|
||||
if let Some(io) = self.io.take() {
|
||||
pool.close(IoConnection::new(
|
||||
@ -188,7 +190,7 @@ where
|
||||
}
|
||||
|
||||
/// Release this connection to the connection pool
|
||||
fn release(&mut self) {
|
||||
fn release(mut self: Pin<&mut Self>) {
|
||||
if let Some(mut pool) = self.pool.take() {
|
||||
if let Some(io) = self.io.take() {
|
||||
pool.release(IoConnection::new(
|
||||
@ -242,14 +244,18 @@ impl<T: AsyncRead + AsyncWrite + Unpin + 'static> AsyncWrite for H1Connection<T>
|
||||
}
|
||||
}
|
||||
|
||||
#[pin_project::pin_project]
|
||||
pub(crate) struct PlStream<Io> {
|
||||
#[pin]
|
||||
framed: Option<Framed<Io, h1::ClientPayloadCodec>>,
|
||||
}
|
||||
|
||||
impl<Io: ConnectionLifetime> PlStream<Io> {
|
||||
fn new(framed: Framed<Io, h1::ClientCodec>) -> Self {
|
||||
let framed = framed.into_map_codec(|codec| codec.into_payload_codec());
|
||||
|
||||
PlStream {
|
||||
framed: Some(framed.map_codec(|codec| codec.into_payload_codec())),
|
||||
framed: Some(framed),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -261,16 +267,16 @@ impl<Io: ConnectionLifetime> Stream for PlStream<Io> {
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Self::Item>> {
|
||||
let this = self.get_mut();
|
||||
let mut this = self.project();
|
||||
|
||||
match this.framed.as_mut().unwrap().next_item(cx)? {
|
||||
match this.framed.as_mut().as_pin_mut().unwrap().next_item(cx)? {
|
||||
Poll::Pending => Poll::Pending,
|
||||
Poll::Ready(Some(chunk)) => {
|
||||
if let Some(chunk) = chunk {
|
||||
Poll::Ready(Some(Ok(chunk)))
|
||||
} else {
|
||||
let framed = this.framed.take().unwrap();
|
||||
let force_close = !framed.get_codec().keepalive();
|
||||
let framed = this.framed.as_mut().as_pin_mut().unwrap();
|
||||
let force_close = !framed.codec_ref().keepalive();
|
||||
release_connection(framed, force_close);
|
||||
Poll::Ready(None)
|
||||
}
|
||||
@ -280,14 +286,13 @@ impl<Io: ConnectionLifetime> Stream for PlStream<Io> {
|
||||
}
|
||||
}
|
||||
|
||||
fn release_connection<T, U>(framed: Framed<T, U>, force_close: bool)
|
||||
fn release_connection<T, U>(framed: Pin<&mut Framed<T, U>>, force_close: bool)
|
||||
where
|
||||
T: ConnectionLifetime,
|
||||
{
|
||||
let mut parts = framed.into_parts();
|
||||
if !force_close && parts.read_buf.is_empty() && parts.write_buf.is_empty() {
|
||||
parts.io.release()
|
||||
if !force_close && framed.is_read_buf_empty() && framed.is_write_buf_empty() {
|
||||
framed.io_pin().release()
|
||||
} else {
|
||||
parts.io.close()
|
||||
framed.io_pin().close()
|
||||
}
|
||||
}
|
||||
|
@ -37,10 +37,10 @@ where
|
||||
trace!("Sending client request: {:?} {:?}", head, body.size());
|
||||
let head_req = head.as_ref().method == Method::HEAD;
|
||||
let length = body.size();
|
||||
let eof = match length {
|
||||
BodySize::None | BodySize::Empty | BodySize::Sized(0) => true,
|
||||
_ => false,
|
||||
};
|
||||
let eof = matches!(
|
||||
length,
|
||||
BodySize::None | BodySize::Empty | BodySize::Sized(0)
|
||||
);
|
||||
|
||||
let mut req = Request::new(());
|
||||
*req.uri_mut() = head.as_ref().uri.clone();
|
||||
|
@ -53,17 +53,23 @@ where
|
||||
+ 'static,
|
||||
{
|
||||
pub(crate) fn new(connector: T, config: ConnectorConfig) -> Self {
|
||||
ConnectionPool(
|
||||
Rc::new(RefCell::new(connector)),
|
||||
Rc::new(RefCell::new(Inner {
|
||||
config,
|
||||
acquired: 0,
|
||||
waiters: Slab::new(),
|
||||
waiters_queue: IndexSet::new(),
|
||||
available: FxHashMap::default(),
|
||||
waker: LocalWaker::new(),
|
||||
})),
|
||||
)
|
||||
let connector_rc = Rc::new(RefCell::new(connector));
|
||||
let inner_rc = Rc::new(RefCell::new(Inner {
|
||||
config,
|
||||
acquired: 0,
|
||||
waiters: Slab::new(),
|
||||
waiters_queue: IndexSet::new(),
|
||||
available: FxHashMap::default(),
|
||||
waker: LocalWaker::new(),
|
||||
}));
|
||||
|
||||
// start support future
|
||||
actix_rt::spawn(ConnectorPoolSupport {
|
||||
connector: Rc::clone(&connector_rc),
|
||||
inner: Rc::clone(&inner_rc),
|
||||
});
|
||||
|
||||
ConnectionPool(connector_rc, inner_rc)
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,6 +82,13 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, Io> Drop for ConnectionPool<T, Io> {
|
||||
fn drop(&mut self) {
|
||||
// wake up the ConnectorPoolSupport when dropping so it can exit properly.
|
||||
self.1.borrow().waker.wake();
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, Io> Service for ConnectionPool<T, Io>
|
||||
where
|
||||
Io: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||
@ -92,12 +105,6 @@ where
|
||||
}
|
||||
|
||||
fn call(&mut self, req: Connect) -> Self::Future {
|
||||
// start support future
|
||||
actix_rt::spawn(ConnectorPoolSupport {
|
||||
connector: self.0.clone(),
|
||||
inner: self.1.clone(),
|
||||
});
|
||||
|
||||
let mut connector = self.0.clone();
|
||||
let inner = self.1.clone();
|
||||
|
||||
@ -112,11 +119,11 @@ where
|
||||
match poll_fn(|cx| Poll::Ready(inner.borrow_mut().acquire(&key, cx))).await {
|
||||
Acquire::Acquired(io, created) => {
|
||||
// use existing connection
|
||||
return Ok(IoConnection::new(
|
||||
Ok(IoConnection::new(
|
||||
io,
|
||||
created,
|
||||
Some(Acquired(key, Some(inner))),
|
||||
));
|
||||
))
|
||||
}
|
||||
Acquire::Available => {
|
||||
// open tcp connection
|
||||
@ -435,7 +442,13 @@ where
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
|
||||
let mut inner = this.inner.as_ref().borrow_mut();
|
||||
if Rc::strong_count(this.inner) == 1 {
|
||||
// If we are last copy of Inner<Io> it means the ConnectionPool is already gone
|
||||
// and we are safe to exit.
|
||||
return Poll::Ready(());
|
||||
}
|
||||
|
||||
let mut inner = this.inner.borrow_mut();
|
||||
inner.waker.register(cx.waker());
|
||||
|
||||
// check waiters
|
||||
|
@ -17,7 +17,7 @@ const DATE_VALUE_LENGTH: usize = 29;
|
||||
pub enum KeepAlive {
|
||||
/// Keep alive in seconds
|
||||
Timeout(usize),
|
||||
/// Relay on OS to shutdown tcp connection
|
||||
/// Rely on OS to shutdown tcp connection
|
||||
Os,
|
||||
/// Disabled
|
||||
Disabled,
|
||||
@ -209,6 +209,7 @@ impl Date {
|
||||
date.update();
|
||||
date
|
||||
}
|
||||
|
||||
fn update(&mut self) {
|
||||
self.pos = 0;
|
||||
write!(
|
||||
|
@ -1,252 +0,0 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::{Cookie, SameSite};
|
||||
|
||||
/// Structure that follows the builder pattern for building `Cookie` structs.
|
||||
///
|
||||
/// To construct a cookie:
|
||||
///
|
||||
/// 1. Call [`Cookie::build`](struct.Cookie.html#method.build) to start building.
|
||||
/// 2. Use any of the builder methods to set fields in the cookie.
|
||||
/// 3. Call [finish](#method.finish) to retrieve the built cookie.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let cookie: Cookie = Cookie::build("name", "value")
|
||||
/// .domain("www.rust-lang.org")
|
||||
/// .path("/")
|
||||
/// .secure(true)
|
||||
/// .http_only(true)
|
||||
/// .max_age(84600)
|
||||
/// .finish();
|
||||
/// ```
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CookieBuilder {
|
||||
/// The cookie being built.
|
||||
cookie: Cookie<'static>,
|
||||
}
|
||||
|
||||
impl CookieBuilder {
|
||||
/// Creates a new `CookieBuilder` instance from the given name and value.
|
||||
///
|
||||
/// This method is typically called indirectly via
|
||||
/// [Cookie::build](struct.Cookie.html#method.build).
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar").finish();
|
||||
/// assert_eq!(c.name_value(), ("foo", "bar"));
|
||||
/// ```
|
||||
pub fn new<N, V>(name: N, value: V) -> CookieBuilder
|
||||
where
|
||||
N: Into<Cow<'static, str>>,
|
||||
V: Into<Cow<'static, str>>,
|
||||
{
|
||||
CookieBuilder {
|
||||
cookie: Cookie::new(name, value),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the `expires` field in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .expires(time::OffsetDateTime::now_utc())
|
||||
/// .finish();
|
||||
///
|
||||
/// assert!(c.expires().is_some());
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn expires(mut self, when: OffsetDateTime) -> CookieBuilder {
|
||||
self.cookie.set_expires(when);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the `max_age` field in seconds in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .max_age(1800)
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.max_age(), Some(time::Duration::seconds(30 * 60)));
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn max_age(self, seconds: i64) -> CookieBuilder {
|
||||
self.max_age_time(Duration::seconds(seconds))
|
||||
}
|
||||
|
||||
/// Sets the `max_age` field in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .max_age_time(time::Duration::minutes(30))
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.max_age(), Some(time::Duration::seconds(30 * 60)));
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn max_age_time(mut self, value: Duration) -> CookieBuilder {
|
||||
// Truncate any nanoseconds from the Duration, as they aren't represented within `Max-Age`
|
||||
// and would cause two otherwise identical `Cookie` instances to not be equivalent to one another.
|
||||
self.cookie
|
||||
.set_max_age(Duration::seconds(value.whole_seconds()));
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the `domain` field in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .domain("www.rust-lang.org")
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.domain(), Some("www.rust-lang.org"));
|
||||
/// ```
|
||||
pub fn domain<D: Into<Cow<'static, str>>>(mut self, value: D) -> CookieBuilder {
|
||||
self.cookie.set_domain(value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the `path` field in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .path("/")
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.path(), Some("/"));
|
||||
/// ```
|
||||
pub fn path<P: Into<Cow<'static, str>>>(mut self, path: P) -> CookieBuilder {
|
||||
self.cookie.set_path(path);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the `secure` field in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .secure(true)
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.secure(), Some(true));
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn secure(mut self, value: bool) -> CookieBuilder {
|
||||
self.cookie.set_secure(value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the `http_only` field in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .http_only(true)
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.http_only(), Some(true));
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn http_only(mut self, value: bool) -> CookieBuilder {
|
||||
self.cookie.set_http_only(value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the `same_site` field in the cookie being built.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{Cookie, SameSite};
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .same_site(SameSite::Strict)
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.same_site(), Some(SameSite::Strict));
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn same_site(mut self, value: SameSite) -> CookieBuilder {
|
||||
self.cookie.set_same_site(value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Makes the cookie being built 'permanent' by extending its expiration and
|
||||
/// max age 20 years into the future.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
/// use time::Duration;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .permanent()
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.max_age(), Some(Duration::days(365 * 20)));
|
||||
/// # assert!(c.expires().is_some());
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn permanent(mut self) -> CookieBuilder {
|
||||
self.cookie.make_permanent();
|
||||
self
|
||||
}
|
||||
|
||||
/// Finishes building and returns the built `Cookie`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Cookie;
|
||||
///
|
||||
/// let c = Cookie::build("foo", "bar")
|
||||
/// .domain("crates.io")
|
||||
/// .path("/")
|
||||
/// .finish();
|
||||
///
|
||||
/// assert_eq!(c.name_value(), ("foo", "bar"));
|
||||
/// assert_eq!(c.domain(), Some("crates.io"));
|
||||
/// assert_eq!(c.path(), Some("/"));
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn finish(self) -> Cookie<'static> {
|
||||
self.cookie
|
||||
}
|
||||
}
|
@ -1,71 +0,0 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use super::Cookie;
|
||||
|
||||
/// A `DeltaCookie` is a helper structure used in a cookie jar. It wraps a
|
||||
/// `Cookie` so that it can be hashed and compared purely by name. It further
|
||||
/// records whether the wrapped cookie is a "removal" cookie, that is, a cookie
|
||||
/// that when sent to the client removes the named cookie on the client's
|
||||
/// machine.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DeltaCookie {
|
||||
pub cookie: Cookie<'static>,
|
||||
pub removed: bool,
|
||||
}
|
||||
|
||||
impl DeltaCookie {
|
||||
/// Create a new `DeltaCookie` that is being added to a jar.
|
||||
#[inline]
|
||||
pub fn added(cookie: Cookie<'static>) -> DeltaCookie {
|
||||
DeltaCookie {
|
||||
cookie,
|
||||
removed: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new `DeltaCookie` that is being removed from a jar. The
|
||||
/// `cookie` should be a "removal" cookie.
|
||||
#[inline]
|
||||
pub fn removed(cookie: Cookie<'static>) -> DeltaCookie {
|
||||
DeltaCookie {
|
||||
cookie,
|
||||
removed: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for DeltaCookie {
|
||||
type Target = Cookie<'static>;
|
||||
|
||||
fn deref(&self) -> &Cookie<'static> {
|
||||
&self.cookie
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for DeltaCookie {
|
||||
fn deref_mut(&mut self) -> &mut Cookie<'static> {
|
||||
&mut self.cookie
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DeltaCookie {
|
||||
fn eq(&self, other: &DeltaCookie) -> bool {
|
||||
self.name() == other.name()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DeltaCookie {}
|
||||
|
||||
impl Hash for DeltaCookie {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.name().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<str> for DeltaCookie {
|
||||
fn borrow(&self) -> &str {
|
||||
self.name()
|
||||
}
|
||||
}
|
@ -1,106 +0,0 @@
|
||||
//! This module contains types that represent cookie properties that are not yet
|
||||
//! standardized. That is, _draft_ features.
|
||||
|
||||
use std::fmt;
|
||||
|
||||
/// The `SameSite` cookie attribute.
|
||||
///
|
||||
/// A cookie with a `SameSite` attribute is imposed restrictions on when it is
|
||||
/// sent to the origin server in a cross-site request. If the `SameSite`
|
||||
/// attribute is "Strict", then the cookie is never sent in cross-site requests.
|
||||
/// If the `SameSite` attribute is "Lax", the cookie is only sent in cross-site
|
||||
/// requests with "safe" HTTP methods, i.e, `GET`, `HEAD`, `OPTIONS`, `TRACE`.
|
||||
/// If the `SameSite` attribute is not present then the cookie will be sent as
|
||||
/// normal. In some browsers, this will implicitly handle the cookie as if "Lax"
|
||||
/// and in others, "None". It's best to explicitly set the `SameSite` attribute
|
||||
/// to avoid inconsistent behavior.
|
||||
///
|
||||
/// **Note:** Depending on browser, the `Secure` attribute may be required for
|
||||
/// `SameSite` "None" cookies to be accepted.
|
||||
///
|
||||
/// **Note:** This cookie attribute is an HTTP draft! Its meaning and definition
|
||||
/// are subject to change.
|
||||
///
|
||||
/// More info about these draft changes can be found in the draft spec:
|
||||
/// - https://tools.ietf.org/html/draft-west-cookie-incrementalism-00
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum SameSite {
|
||||
/// The "Strict" `SameSite` attribute.
|
||||
Strict,
|
||||
/// The "Lax" `SameSite` attribute.
|
||||
Lax,
|
||||
/// The "None" `SameSite` attribute.
|
||||
None,
|
||||
}
|
||||
|
||||
impl SameSite {
|
||||
/// Returns `true` if `self` is `SameSite::Strict` and `false` otherwise.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::SameSite;
|
||||
///
|
||||
/// let strict = SameSite::Strict;
|
||||
/// assert!(strict.is_strict());
|
||||
/// assert!(!strict.is_lax());
|
||||
/// assert!(!strict.is_none());
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn is_strict(self) -> bool {
|
||||
match self {
|
||||
SameSite::Strict => true,
|
||||
SameSite::Lax | SameSite::None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is `SameSite::Lax` and `false` otherwise.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::SameSite;
|
||||
///
|
||||
/// let lax = SameSite::Lax;
|
||||
/// assert!(lax.is_lax());
|
||||
/// assert!(!lax.is_strict());
|
||||
/// assert!(!lax.is_none());
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn is_lax(self) -> bool {
|
||||
match self {
|
||||
SameSite::Lax => true,
|
||||
SameSite::Strict | SameSite::None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is `SameSite::None` and `false` otherwise.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::SameSite;
|
||||
///
|
||||
/// let none = SameSite::None;
|
||||
/// assert!(none.is_none());
|
||||
/// assert!(!none.is_lax());
|
||||
/// assert!(!none.is_strict());
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn is_none(self) -> bool {
|
||||
match self {
|
||||
SameSite::None => true,
|
||||
SameSite::Lax | SameSite::Strict => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SameSite {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match *self {
|
||||
SameSite::Strict => write!(f, "Strict"),
|
||||
SameSite::Lax => write!(f, "Lax"),
|
||||
SameSite::None => write!(f, "None"),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,651 +0,0 @@
|
||||
use std::collections::HashSet;
|
||||
use std::mem;
|
||||
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::delta::DeltaCookie;
|
||||
use super::Cookie;
|
||||
|
||||
#[cfg(feature = "secure-cookies")]
|
||||
use super::secure::{Key, PrivateJar, SignedJar};
|
||||
|
||||
/// A collection of cookies that tracks its modifications.
|
||||
///
|
||||
/// A `CookieJar` provides storage for any number of cookies. Any changes made
|
||||
/// to the jar are tracked; the changes can be retrieved via the
|
||||
/// [delta](#method.delta) method which returns an iterator over the changes.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// A jar's life begins via [new](#method.new) and calls to
|
||||
/// [`add_original`](#method.add_original):
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{Cookie, CookieJar};
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.add_original(Cookie::new("name", "value"));
|
||||
/// jar.add_original(Cookie::new("second", "another"));
|
||||
/// ```
|
||||
///
|
||||
/// Cookies can be added via [add](#method.add) and removed via
|
||||
/// [remove](#method.remove). Finally, cookies can be looked up via
|
||||
/// [get](#method.get):
|
||||
///
|
||||
/// ```rust
|
||||
/// # use actix_http::cookie::{Cookie, CookieJar};
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.add(Cookie::new("a", "one"));
|
||||
/// jar.add(Cookie::new("b", "two"));
|
||||
///
|
||||
/// assert_eq!(jar.get("a").map(|c| c.value()), Some("one"));
|
||||
/// assert_eq!(jar.get("b").map(|c| c.value()), Some("two"));
|
||||
///
|
||||
/// jar.remove(Cookie::named("b"));
|
||||
/// assert!(jar.get("b").is_none());
|
||||
/// ```
|
||||
///
|
||||
/// # Deltas
|
||||
///
|
||||
/// A jar keeps track of any modifications made to it over time. The
|
||||
/// modifications are recorded as cookies. The modifications can be retrieved
|
||||
/// via [delta](#method.delta). Any new `Cookie` added to a jar via `add`
|
||||
/// results in the same `Cookie` appearing in the `delta`; cookies added via
|
||||
/// `add_original` do not count towards the delta. Any _original_ cookie that is
|
||||
/// removed from a jar results in a "removal" cookie appearing in the delta. A
|
||||
/// "removal" cookie is a cookie that a server sends so that the cookie is
|
||||
/// removed from the client's machine.
|
||||
///
|
||||
/// Deltas are typically used to create `Set-Cookie` headers corresponding to
|
||||
/// the changes made to a cookie jar over a period of time.
|
||||
///
|
||||
/// ```rust
|
||||
/// # use actix_http::cookie::{Cookie, CookieJar};
|
||||
/// let mut jar = CookieJar::new();
|
||||
///
|
||||
/// // original cookies don't affect the delta
|
||||
/// jar.add_original(Cookie::new("original", "value"));
|
||||
/// assert_eq!(jar.delta().count(), 0);
|
||||
///
|
||||
/// // new cookies result in an equivalent `Cookie` in the delta
|
||||
/// jar.add(Cookie::new("a", "one"));
|
||||
/// jar.add(Cookie::new("b", "two"));
|
||||
/// assert_eq!(jar.delta().count(), 2);
|
||||
///
|
||||
/// // removing an original cookie adds a "removal" cookie to the delta
|
||||
/// jar.remove(Cookie::named("original"));
|
||||
/// assert_eq!(jar.delta().count(), 3);
|
||||
///
|
||||
/// // removing a new cookie that was added removes that `Cookie` from the delta
|
||||
/// jar.remove(Cookie::named("a"));
|
||||
/// assert_eq!(jar.delta().count(), 2);
|
||||
/// ```
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct CookieJar {
|
||||
original_cookies: HashSet<DeltaCookie>,
|
||||
delta_cookies: HashSet<DeltaCookie>,
|
||||
}
|
||||
|
||||
impl CookieJar {
|
||||
/// Creates an empty cookie jar.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::CookieJar;
|
||||
///
|
||||
/// let jar = CookieJar::new();
|
||||
/// assert_eq!(jar.iter().count(), 0);
|
||||
/// ```
|
||||
pub fn new() -> CookieJar {
|
||||
CookieJar::default()
|
||||
}
|
||||
|
||||
/// Returns a reference to the `Cookie` inside this jar with the name
|
||||
/// `name`. If no such cookie exists, returns `None`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// assert!(jar.get("name").is_none());
|
||||
///
|
||||
/// jar.add(Cookie::new("name", "value"));
|
||||
/// assert_eq!(jar.get("name").map(|c| c.value()), Some("value"));
|
||||
/// ```
|
||||
pub fn get(&self, name: &str) -> Option<&Cookie<'static>> {
|
||||
self.delta_cookies
|
||||
.get(name)
|
||||
.or_else(|| self.original_cookies.get(name))
|
||||
.and_then(|c| if !c.removed { Some(&c.cookie) } else { None })
|
||||
}
|
||||
|
||||
/// Adds an "original" `cookie` to this jar. If an original cookie with the
|
||||
/// same name already exists, it is replaced with `cookie`. Cookies added
|
||||
/// with `add` take precedence and are not replaced by this method.
|
||||
///
|
||||
/// Adding an original cookie does not affect the [delta](#method.delta)
|
||||
/// computation. This method is intended to be used to seed the cookie jar
|
||||
/// with cookies received from a client's HTTP message.
|
||||
///
|
||||
/// For accurate `delta` computations, this method should not be called
|
||||
/// after calling `remove`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.add_original(Cookie::new("name", "value"));
|
||||
/// jar.add_original(Cookie::new("second", "two"));
|
||||
///
|
||||
/// assert_eq!(jar.get("name").map(|c| c.value()), Some("value"));
|
||||
/// assert_eq!(jar.get("second").map(|c| c.value()), Some("two"));
|
||||
/// assert_eq!(jar.iter().count(), 2);
|
||||
/// assert_eq!(jar.delta().count(), 0);
|
||||
/// ```
|
||||
pub fn add_original(&mut self, cookie: Cookie<'static>) {
|
||||
self.original_cookies.replace(DeltaCookie::added(cookie));
|
||||
}
|
||||
|
||||
/// Adds `cookie` to this jar. If a cookie with the same name already
|
||||
/// exists, it is replaced with `cookie`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.add(Cookie::new("name", "value"));
|
||||
/// jar.add(Cookie::new("second", "two"));
|
||||
///
|
||||
/// assert_eq!(jar.get("name").map(|c| c.value()), Some("value"));
|
||||
/// assert_eq!(jar.get("second").map(|c| c.value()), Some("two"));
|
||||
/// assert_eq!(jar.iter().count(), 2);
|
||||
/// assert_eq!(jar.delta().count(), 2);
|
||||
/// ```
|
||||
pub fn add(&mut self, cookie: Cookie<'static>) {
|
||||
self.delta_cookies.replace(DeltaCookie::added(cookie));
|
||||
}
|
||||
|
||||
/// Removes `cookie` from this jar. If an _original_ cookie with the same
|
||||
/// name as `cookie` is present in the jar, a _removal_ cookie will be
|
||||
/// present in the `delta` computation. To properly generate the removal
|
||||
/// cookie, `cookie` must contain the same `path` and `domain` as the cookie
|
||||
/// that was initially set.
|
||||
///
|
||||
/// A "removal" cookie is a cookie that has the same name as the original
|
||||
/// cookie but has an empty value, a max-age of 0, and an expiration date
|
||||
/// far in the past.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Removing an _original_ cookie results in a _removal_ cookie:
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
/// use time::Duration;
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
///
|
||||
/// // Assume this cookie originally had a path of "/" and domain of "a.b".
|
||||
/// jar.add_original(Cookie::new("name", "value"));
|
||||
///
|
||||
/// // If the path and domain were set, they must be provided to `remove`.
|
||||
/// jar.remove(Cookie::build("name", "").path("/").domain("a.b").finish());
|
||||
///
|
||||
/// // The delta will contain the removal cookie.
|
||||
/// let delta: Vec<_> = jar.delta().collect();
|
||||
/// assert_eq!(delta.len(), 1);
|
||||
/// assert_eq!(delta[0].name(), "name");
|
||||
/// assert_eq!(delta[0].max_age(), Some(Duration::zero()));
|
||||
/// ```
|
||||
///
|
||||
/// Removing a new cookie does not result in a _removal_ cookie:
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.add(Cookie::new("name", "value"));
|
||||
/// assert_eq!(jar.delta().count(), 1);
|
||||
///
|
||||
/// jar.remove(Cookie::named("name"));
|
||||
/// assert_eq!(jar.delta().count(), 0);
|
||||
/// ```
|
||||
pub fn remove(&mut self, mut cookie: Cookie<'static>) {
|
||||
if self.original_cookies.contains(cookie.name()) {
|
||||
cookie.set_value("");
|
||||
cookie.set_max_age(Duration::zero());
|
||||
cookie.set_expires(OffsetDateTime::now_utc() - Duration::days(365));
|
||||
self.delta_cookies.replace(DeltaCookie::removed(cookie));
|
||||
} else {
|
||||
self.delta_cookies.remove(cookie.name());
|
||||
}
|
||||
}
|
||||
|
||||
/// Removes `cookie` from this jar completely. This method differs from
|
||||
/// `remove` in that no delta cookie is created under any condition. Neither
|
||||
/// the `delta` nor `iter` methods will return a cookie that is removed
|
||||
/// using this method.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Removing an _original_ cookie; no _removal_ cookie is generated:
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
/// use time::Duration;
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
///
|
||||
/// // Add an original cookie and a new cookie.
|
||||
/// jar.add_original(Cookie::new("name", "value"));
|
||||
/// jar.add(Cookie::new("key", "value"));
|
||||
/// assert_eq!(jar.delta().count(), 1);
|
||||
/// assert_eq!(jar.iter().count(), 2);
|
||||
///
|
||||
/// // Now force remove the original cookie.
|
||||
/// jar.force_remove(Cookie::new("name", "value"));
|
||||
/// assert_eq!(jar.delta().count(), 1);
|
||||
/// assert_eq!(jar.iter().count(), 1);
|
||||
///
|
||||
/// // Now force remove the new cookie.
|
||||
/// jar.force_remove(Cookie::new("key", "value"));
|
||||
/// assert_eq!(jar.delta().count(), 0);
|
||||
/// assert_eq!(jar.iter().count(), 0);
|
||||
/// ```
|
||||
pub fn force_remove<'a>(&mut self, cookie: Cookie<'a>) {
|
||||
self.original_cookies.remove(cookie.name());
|
||||
self.delta_cookies.remove(cookie.name());
|
||||
}
|
||||
|
||||
/// Removes all cookies from this cookie jar.
|
||||
#[deprecated(
|
||||
since = "0.7.0",
|
||||
note = "calling this method may not remove \
|
||||
all cookies since the path and domain are not specified; use \
|
||||
`remove` instead"
|
||||
)]
|
||||
pub fn clear(&mut self) {
|
||||
self.delta_cookies.clear();
|
||||
for delta in mem::take(&mut self.original_cookies) {
|
||||
self.remove(delta.cookie);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an iterator over cookies that represent the changes to this jar
|
||||
/// over time. These cookies can be rendered directly as `Set-Cookie` header
|
||||
/// values to affect the changes made to this jar on the client.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.add_original(Cookie::new("name", "value"));
|
||||
/// jar.add_original(Cookie::new("second", "two"));
|
||||
///
|
||||
/// // Add new cookies.
|
||||
/// jar.add(Cookie::new("new", "third"));
|
||||
/// jar.add(Cookie::new("another", "fourth"));
|
||||
/// jar.add(Cookie::new("yac", "fifth"));
|
||||
///
|
||||
/// // Remove some cookies.
|
||||
/// jar.remove(Cookie::named("name"));
|
||||
/// jar.remove(Cookie::named("another"));
|
||||
///
|
||||
/// // Delta contains two new cookies ("new", "yac") and a removal ("name").
|
||||
/// assert_eq!(jar.delta().count(), 3);
|
||||
/// ```
|
||||
pub fn delta(&self) -> Delta<'_> {
|
||||
Delta {
|
||||
iter: self.delta_cookies.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an iterator over all of the cookies present in this jar.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie};
|
||||
///
|
||||
/// let mut jar = CookieJar::new();
|
||||
///
|
||||
/// jar.add_original(Cookie::new("name", "value"));
|
||||
/// jar.add_original(Cookie::new("second", "two"));
|
||||
///
|
||||
/// jar.add(Cookie::new("new", "third"));
|
||||
/// jar.add(Cookie::new("another", "fourth"));
|
||||
/// jar.add(Cookie::new("yac", "fifth"));
|
||||
///
|
||||
/// jar.remove(Cookie::named("name"));
|
||||
/// jar.remove(Cookie::named("another"));
|
||||
///
|
||||
/// // There are three cookies in the jar: "second", "new", and "yac".
|
||||
/// # assert_eq!(jar.iter().count(), 3);
|
||||
/// for cookie in jar.iter() {
|
||||
/// match cookie.name() {
|
||||
/// "second" => assert_eq!(cookie.value(), "two"),
|
||||
/// "new" => assert_eq!(cookie.value(), "third"),
|
||||
/// "yac" => assert_eq!(cookie.value(), "fifth"),
|
||||
/// _ => unreachable!("there are only three cookies in the jar")
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn iter(&self) -> Iter<'_> {
|
||||
Iter {
|
||||
delta_cookies: self
|
||||
.delta_cookies
|
||||
.iter()
|
||||
.chain(self.original_cookies.difference(&self.delta_cookies)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a `PrivateJar` with `self` as its parent jar using the key `key`
|
||||
/// to sign/encrypt and verify/decrypt cookies added/retrieved from the
|
||||
/// child jar.
|
||||
///
|
||||
/// Any modifications to the child jar will be reflected on the parent jar,
|
||||
/// and any retrievals from the child jar will be made from the parent jar.
|
||||
///
|
||||
/// This method is only available when the `secure` feature is enabled.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{Cookie, CookieJar, Key};
|
||||
///
|
||||
/// // Generate a secure key.
|
||||
/// let key = Key::generate();
|
||||
///
|
||||
/// // Add a private (signed + encrypted) cookie.
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.private(&key).add(Cookie::new("private", "text"));
|
||||
///
|
||||
/// // The cookie's contents are encrypted.
|
||||
/// assert_ne!(jar.get("private").unwrap().value(), "text");
|
||||
///
|
||||
/// // They can be decrypted and verified through the child jar.
|
||||
/// assert_eq!(jar.private(&key).get("private").unwrap().value(), "text");
|
||||
///
|
||||
/// // A tampered with cookie does not validate but still exists.
|
||||
/// let mut cookie = jar.get("private").unwrap().clone();
|
||||
/// jar.add(Cookie::new("private", cookie.value().to_string() + "!"));
|
||||
/// assert!(jar.private(&key).get("private").is_none());
|
||||
/// assert!(jar.get("private").is_some());
|
||||
/// ```
|
||||
#[cfg(feature = "secure-cookies")]
|
||||
pub fn private(&mut self, key: &Key) -> PrivateJar<'_> {
|
||||
PrivateJar::new(self, key)
|
||||
}
|
||||
|
||||
/// Returns a `SignedJar` with `self` as its parent jar using the key `key`
|
||||
/// to sign/verify cookies added/retrieved from the child jar.
|
||||
///
|
||||
/// Any modifications to the child jar will be reflected on the parent jar,
|
||||
/// and any retrievals from the child jar will be made from the parent jar.
|
||||
///
|
||||
/// This method is only available when the `secure` feature is enabled.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{Cookie, CookieJar, Key};
|
||||
///
|
||||
/// // Generate a secure key.
|
||||
/// let key = Key::generate();
|
||||
///
|
||||
/// // Add a signed cookie.
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.signed(&key).add(Cookie::new("signed", "text"));
|
||||
///
|
||||
/// // The cookie's contents are signed but still in plaintext.
|
||||
/// assert_ne!(jar.get("signed").unwrap().value(), "text");
|
||||
/// assert!(jar.get("signed").unwrap().value().contains("text"));
|
||||
///
|
||||
/// // They can be verified through the child jar.
|
||||
/// assert_eq!(jar.signed(&key).get("signed").unwrap().value(), "text");
|
||||
///
|
||||
/// // A tampered with cookie does not validate but still exists.
|
||||
/// let mut cookie = jar.get("signed").unwrap().clone();
|
||||
/// jar.add(Cookie::new("signed", cookie.value().to_string() + "!"));
|
||||
/// assert!(jar.signed(&key).get("signed").is_none());
|
||||
/// assert!(jar.get("signed").is_some());
|
||||
/// ```
|
||||
#[cfg(feature = "secure-cookies")]
|
||||
pub fn signed(&mut self, key: &Key) -> SignedJar<'_> {
|
||||
SignedJar::new(self, key)
|
||||
}
|
||||
}
|
||||
|
||||
use std::collections::hash_set::Iter as HashSetIter;
|
||||
|
||||
/// Iterator over the changes to a cookie jar.
|
||||
pub struct Delta<'a> {
|
||||
iter: HashSetIter<'a, DeltaCookie>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Delta<'a> {
|
||||
type Item = &'a Cookie<'static>;
|
||||
|
||||
fn next(&mut self) -> Option<&'a Cookie<'static>> {
|
||||
self.iter.next().map(|c| &c.cookie)
|
||||
}
|
||||
}
|
||||
|
||||
use std::collections::hash_map::RandomState;
|
||||
use std::collections::hash_set::Difference;
|
||||
use std::iter::Chain;
|
||||
|
||||
/// Iterator over all of the cookies in a jar.
|
||||
pub struct Iter<'a> {
|
||||
delta_cookies:
|
||||
Chain<HashSetIter<'a, DeltaCookie>, Difference<'a, DeltaCookie, RandomState>>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Iter<'a> {
|
||||
type Item = &'a Cookie<'static>;
|
||||
|
||||
fn next(&mut self) -> Option<&'a Cookie<'static>> {
|
||||
for cookie in self.delta_cookies.by_ref() {
|
||||
if !cookie.removed {
|
||||
return Some(&*cookie);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
#[cfg(feature = "secure-cookies")]
|
||||
use super::Key;
|
||||
use super::{Cookie, CookieJar};
|
||||
|
||||
#[test]
|
||||
#[allow(deprecated)]
|
||||
fn simple() {
|
||||
let mut c = CookieJar::new();
|
||||
|
||||
c.add(Cookie::new("test", ""));
|
||||
c.add(Cookie::new("test2", ""));
|
||||
c.remove(Cookie::named("test"));
|
||||
|
||||
assert!(c.get("test").is_none());
|
||||
assert!(c.get("test2").is_some());
|
||||
|
||||
c.add(Cookie::new("test3", ""));
|
||||
c.clear();
|
||||
|
||||
assert!(c.get("test").is_none());
|
||||
assert!(c.get("test2").is_none());
|
||||
assert!(c.get("test3").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn jar_is_send() {
|
||||
fn is_send<T: Send>(_: T) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
assert!(is_send(CookieJar::new()))
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "secure-cookies")]
|
||||
fn iter() {
|
||||
let key = Key::generate();
|
||||
let mut c = CookieJar::new();
|
||||
|
||||
c.add_original(Cookie::new("original", "original"));
|
||||
|
||||
c.add(Cookie::new("test", "test"));
|
||||
c.add(Cookie::new("test2", "test2"));
|
||||
c.add(Cookie::new("test3", "test3"));
|
||||
assert_eq!(c.iter().count(), 4);
|
||||
|
||||
c.signed(&key).add(Cookie::new("signed", "signed"));
|
||||
c.private(&key).add(Cookie::new("encrypted", "encrypted"));
|
||||
assert_eq!(c.iter().count(), 6);
|
||||
|
||||
c.remove(Cookie::named("test"));
|
||||
assert_eq!(c.iter().count(), 5);
|
||||
|
||||
c.remove(Cookie::named("signed"));
|
||||
c.remove(Cookie::named("test2"));
|
||||
assert_eq!(c.iter().count(), 3);
|
||||
|
||||
c.add(Cookie::new("test2", "test2"));
|
||||
assert_eq!(c.iter().count(), 4);
|
||||
|
||||
c.remove(Cookie::named("test2"));
|
||||
assert_eq!(c.iter().count(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "secure-cookies")]
|
||||
fn delta() {
|
||||
use std::collections::HashMap;
|
||||
use time::Duration;
|
||||
|
||||
let mut c = CookieJar::new();
|
||||
|
||||
c.add_original(Cookie::new("original", "original"));
|
||||
c.add_original(Cookie::new("original1", "original1"));
|
||||
|
||||
c.add(Cookie::new("test", "test"));
|
||||
c.add(Cookie::new("test2", "test2"));
|
||||
c.add(Cookie::new("test3", "test3"));
|
||||
c.add(Cookie::new("test4", "test4"));
|
||||
|
||||
c.remove(Cookie::named("test"));
|
||||
c.remove(Cookie::named("original"));
|
||||
|
||||
assert_eq!(c.delta().count(), 4);
|
||||
|
||||
let names: HashMap<_, _> = c.delta().map(|c| (c.name(), c.max_age())).collect();
|
||||
|
||||
assert!(names.get("test2").unwrap().is_none());
|
||||
assert!(names.get("test3").unwrap().is_none());
|
||||
assert!(names.get("test4").unwrap().is_none());
|
||||
assert_eq!(names.get("original").unwrap(), &Some(Duration::zero()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_original() {
|
||||
let mut jar = CookieJar::new();
|
||||
jar.add_original(Cookie::new("original_a", "a"));
|
||||
jar.add_original(Cookie::new("original_b", "b"));
|
||||
assert_eq!(jar.get("original_a").unwrap().value(), "a");
|
||||
|
||||
jar.add(Cookie::new("original_a", "av2"));
|
||||
assert_eq!(jar.get("original_a").unwrap().value(), "av2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_delta() {
|
||||
let mut jar = CookieJar::new();
|
||||
jar.add(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
|
||||
jar.remove(Cookie::named("name"));
|
||||
assert_eq!(jar.delta().count(), 0);
|
||||
|
||||
jar.add_original(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().count(), 0);
|
||||
|
||||
jar.remove(Cookie::named("name"));
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
|
||||
jar.add(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
|
||||
jar.remove(Cookie::named("name"));
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_remove_add() {
|
||||
let mut jar = CookieJar::new();
|
||||
jar.add_original(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().count(), 0);
|
||||
|
||||
jar.remove(Cookie::named("name"));
|
||||
assert_eq!(jar.delta().filter(|c| c.value().is_empty()).count(), 1);
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
|
||||
// The cookie's been deleted. Another original doesn't change that.
|
||||
jar.add_original(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().filter(|c| c.value().is_empty()).count(), 1);
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
|
||||
jar.remove(Cookie::named("name"));
|
||||
assert_eq!(jar.delta().filter(|c| c.value().is_empty()).count(), 1);
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
|
||||
jar.add(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().filter(|c| !c.value().is_empty()).count(), 1);
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
|
||||
jar.remove(Cookie::named("name"));
|
||||
assert_eq!(jar.delta().filter(|c| c.value().is_empty()).count(), 1);
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_remove() {
|
||||
let mut jar = CookieJar::new();
|
||||
jar.add_original(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().count(), 0);
|
||||
|
||||
jar.add(Cookie::new("name", "val"));
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
assert_eq!(jar.delta().filter(|c| !c.value().is_empty()).count(), 1);
|
||||
|
||||
jar.remove(Cookie::named("name"));
|
||||
assert_eq!(jar.delta().filter(|c| c.value().is_empty()).count(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_with_path() {
|
||||
let mut jar = CookieJar::new();
|
||||
jar.add_original(Cookie::build("name", "val").finish());
|
||||
assert_eq!(jar.iter().count(), 1);
|
||||
assert_eq!(jar.delta().count(), 0);
|
||||
assert_eq!(jar.iter().filter(|c| c.path().is_none()).count(), 1);
|
||||
|
||||
jar.remove(Cookie::build("name", "").path("/").finish());
|
||||
assert_eq!(jar.iter().count(), 0);
|
||||
assert_eq!(jar.delta().count(), 1);
|
||||
assert_eq!(jar.delta().filter(|c| c.value().is_empty()).count(), 1);
|
||||
assert_eq!(jar.delta().filter(|c| c.path() == Some("/")).count(), 1);
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,467 +0,0 @@
|
||||
use std::borrow::Cow;
|
||||
use std::cmp;
|
||||
use std::convert::From;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::str::Utf8Error;
|
||||
|
||||
use percent_encoding::percent_decode;
|
||||
use time::Duration;
|
||||
|
||||
use super::{Cookie, CookieStr, SameSite};
|
||||
|
||||
use crate::time_parser;
|
||||
|
||||
/// Enum corresponding to a parsing error.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum ParseError {
|
||||
/// The cookie did not contain a name/value pair.
|
||||
MissingPair,
|
||||
/// The cookie's name was empty.
|
||||
EmptyName,
|
||||
/// Decoding the cookie's name or value resulted in invalid UTF-8.
|
||||
Utf8Error(Utf8Error),
|
||||
/// It is discouraged to exhaustively match on this enum as its variants may
|
||||
/// grow without a breaking-change bump in version numbers.
|
||||
#[doc(hidden)]
|
||||
__Nonexhasutive,
|
||||
}
|
||||
|
||||
impl ParseError {
|
||||
/// Returns a description of this error as a string
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match *self {
|
||||
ParseError::MissingPair => "the cookie is missing a name/value pair",
|
||||
ParseError::EmptyName => "the cookie's name is empty",
|
||||
ParseError::Utf8Error(_) => {
|
||||
"decoding the cookie's name or value resulted in invalid UTF-8"
|
||||
}
|
||||
ParseError::__Nonexhasutive => unreachable!("__Nonexhasutive ParseError"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Utf8Error> for ParseError {
|
||||
fn from(error: Utf8Error) -> ParseError {
|
||||
ParseError::Utf8Error(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for ParseError {}
|
||||
|
||||
fn indexes_of(needle: &str, haystack: &str) -> Option<(usize, usize)> {
|
||||
let haystack_start = haystack.as_ptr() as usize;
|
||||
let needle_start = needle.as_ptr() as usize;
|
||||
|
||||
if needle_start < haystack_start {
|
||||
return None;
|
||||
}
|
||||
|
||||
if (needle_start + needle.len()) > (haystack_start + haystack.len()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let start = needle_start - haystack_start;
|
||||
let end = start + needle.len();
|
||||
Some((start, end))
|
||||
}
|
||||
|
||||
fn name_val_decoded(
|
||||
name: &str,
|
||||
val: &str,
|
||||
) -> Result<(CookieStr, CookieStr), ParseError> {
|
||||
let decoded_name = percent_decode(name.as_bytes()).decode_utf8()?;
|
||||
let decoded_value = percent_decode(val.as_bytes()).decode_utf8()?;
|
||||
let name = CookieStr::Concrete(Cow::Owned(decoded_name.into_owned()));
|
||||
let val = CookieStr::Concrete(Cow::Owned(decoded_value.into_owned()));
|
||||
|
||||
Ok((name, val))
|
||||
}
|
||||
|
||||
// This function does the real parsing but _does not_ set the `cookie_string` in
|
||||
// the returned cookie object. This only exists so that the borrow to `s` is
|
||||
// returned at the end of the call, allowing the `cookie_string` field to be
|
||||
// set in the outer `parse` function.
|
||||
fn parse_inner<'c>(s: &str, decode: bool) -> Result<Cookie<'c>, ParseError> {
|
||||
let mut attributes = s.split(';');
|
||||
let key_value = match attributes.next() {
|
||||
Some(s) => s,
|
||||
_ => panic!(),
|
||||
};
|
||||
|
||||
// Determine the name = val.
|
||||
let (name, value) = match key_value.find('=') {
|
||||
Some(i) => (key_value[..i].trim(), key_value[(i + 1)..].trim()),
|
||||
None => return Err(ParseError::MissingPair),
|
||||
};
|
||||
|
||||
if name.is_empty() {
|
||||
return Err(ParseError::EmptyName);
|
||||
}
|
||||
|
||||
// Create a cookie with all of the defaults. We'll fill things in while we
|
||||
// iterate through the parameters below.
|
||||
let (name, value) = if decode {
|
||||
name_val_decoded(name, value)?
|
||||
} else {
|
||||
let name_indexes = indexes_of(name, s).expect("name sub");
|
||||
let value_indexes = indexes_of(value, s).expect("value sub");
|
||||
let name = CookieStr::Indexed(name_indexes.0, name_indexes.1);
|
||||
let value = CookieStr::Indexed(value_indexes.0, value_indexes.1);
|
||||
|
||||
(name, value)
|
||||
};
|
||||
|
||||
let mut cookie = Cookie {
|
||||
name,
|
||||
value,
|
||||
cookie_string: None,
|
||||
expires: None,
|
||||
max_age: None,
|
||||
domain: None,
|
||||
path: None,
|
||||
secure: None,
|
||||
http_only: None,
|
||||
same_site: None,
|
||||
};
|
||||
|
||||
for attr in attributes {
|
||||
let (key, value) = match attr.find('=') {
|
||||
Some(i) => (attr[..i].trim(), Some(attr[(i + 1)..].trim())),
|
||||
None => (attr.trim(), None),
|
||||
};
|
||||
|
||||
match (&*key.to_ascii_lowercase(), value) {
|
||||
("secure", _) => cookie.secure = Some(true),
|
||||
("httponly", _) => cookie.http_only = Some(true),
|
||||
("max-age", Some(v)) => {
|
||||
// See RFC 6265 Section 5.2.2, negative values indicate that the
|
||||
// earliest possible expiration time should be used, so set the
|
||||
// max age as 0 seconds.
|
||||
cookie.max_age = match v.parse() {
|
||||
Ok(val) if val <= 0 => Some(Duration::zero()),
|
||||
Ok(val) => {
|
||||
// Don't panic if the max age seconds is greater than what's supported by
|
||||
// `Duration`.
|
||||
let val = cmp::min(val, Duration::max_value().whole_seconds());
|
||||
Some(Duration::seconds(val))
|
||||
}
|
||||
Err(_) => continue,
|
||||
};
|
||||
}
|
||||
("domain", Some(mut domain)) if !domain.is_empty() => {
|
||||
if domain.starts_with('.') {
|
||||
domain = &domain[1..];
|
||||
}
|
||||
|
||||
let (i, j) = indexes_of(domain, s).expect("domain sub");
|
||||
cookie.domain = Some(CookieStr::Indexed(i, j));
|
||||
}
|
||||
("path", Some(v)) => {
|
||||
let (i, j) = indexes_of(v, s).expect("path sub");
|
||||
cookie.path = Some(CookieStr::Indexed(i, j));
|
||||
}
|
||||
("samesite", Some(v)) => {
|
||||
if v.eq_ignore_ascii_case("strict") {
|
||||
cookie.same_site = Some(SameSite::Strict);
|
||||
} else if v.eq_ignore_ascii_case("lax") {
|
||||
cookie.same_site = Some(SameSite::Lax);
|
||||
} else if v.eq_ignore_ascii_case("none") {
|
||||
cookie.same_site = Some(SameSite::None);
|
||||
} else {
|
||||
// We do nothing here, for now. When/if the `SameSite`
|
||||
// attribute becomes standard, the spec says that we should
|
||||
// ignore this cookie, i.e, fail to parse it, when an
|
||||
// invalid value is passed in. The draft is at
|
||||
// http://httpwg.org/http-extensions/draft-ietf-httpbis-cookie-same-site.html.
|
||||
}
|
||||
}
|
||||
("expires", Some(v)) => {
|
||||
// Try parsing with three date formats according to
|
||||
// http://tools.ietf.org/html/rfc2616#section-3.3.1. Try
|
||||
// additional ones as encountered in the real world.
|
||||
let tm = time_parser::parse_http_date(v)
|
||||
.or_else(|| time::parse(v, "%a, %d-%b-%Y %H:%M:%S").ok());
|
||||
|
||||
if let Some(time) = tm {
|
||||
cookie.expires = Some(time.assume_utc())
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// We're going to be permissive here. If we have no idea what
|
||||
// this is, then it's something nonstandard. We're not going to
|
||||
// store it (because it's not compliant), but we're also not
|
||||
// going to emit an error.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cookie)
|
||||
}
|
||||
|
||||
pub fn parse_cookie<'c, S>(cow: S, decode: bool) -> Result<Cookie<'c>, ParseError>
|
||||
where
|
||||
S: Into<Cow<'c, str>>,
|
||||
{
|
||||
let s = cow.into();
|
||||
let mut cookie = parse_inner(&s, decode)?;
|
||||
cookie.cookie_string = Some(s);
|
||||
Ok(cookie)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{Cookie, SameSite};
|
||||
use time::{Duration, PrimitiveDateTime};
|
||||
|
||||
macro_rules! assert_eq_parse {
|
||||
($string:expr, $expected:expr) => {
|
||||
let cookie = match Cookie::parse($string) {
|
||||
Ok(cookie) => cookie,
|
||||
Err(e) => panic!("Failed to parse {:?}: {:?}", $string, e),
|
||||
};
|
||||
|
||||
assert_eq!(cookie, $expected);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! assert_ne_parse {
|
||||
($string:expr, $expected:expr) => {
|
||||
let cookie = match Cookie::parse($string) {
|
||||
Ok(cookie) => cookie,
|
||||
Err(e) => panic!("Failed to parse {:?}: {:?}", $string, e),
|
||||
};
|
||||
|
||||
assert_ne!(cookie, $expected);
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_same_site() {
|
||||
let expected = Cookie::build("foo", "bar")
|
||||
.same_site(SameSite::Lax)
|
||||
.finish();
|
||||
|
||||
assert_eq_parse!("foo=bar; SameSite=Lax", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=lax", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=LAX", expected);
|
||||
assert_eq_parse!("foo=bar; samesite=Lax", expected);
|
||||
assert_eq_parse!("foo=bar; SAMESITE=Lax", expected);
|
||||
|
||||
let expected = Cookie::build("foo", "bar")
|
||||
.same_site(SameSite::Strict)
|
||||
.finish();
|
||||
|
||||
assert_eq_parse!("foo=bar; SameSite=Strict", expected);
|
||||
assert_eq_parse!("foo=bar; SameSITE=Strict", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=strict", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=STrICT", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=STRICT", expected);
|
||||
|
||||
let expected = Cookie::build("foo", "bar")
|
||||
.same_site(SameSite::None)
|
||||
.finish();
|
||||
|
||||
assert_eq_parse!("foo=bar; SameSite=None", expected);
|
||||
assert_eq_parse!("foo=bar; SameSITE=None", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=nOne", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=NoNE", expected);
|
||||
assert_eq_parse!("foo=bar; SameSite=NONE", expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse() {
|
||||
assert!(Cookie::parse("bar").is_err());
|
||||
assert!(Cookie::parse("=bar").is_err());
|
||||
assert!(Cookie::parse(" =bar").is_err());
|
||||
assert!(Cookie::parse("foo=").is_ok());
|
||||
|
||||
let expected = Cookie::build("foo", "bar=baz").finish();
|
||||
assert_eq_parse!("foo=bar=baz", expected);
|
||||
|
||||
let mut expected = Cookie::build("foo", "bar").finish();
|
||||
assert_eq_parse!("foo=bar", expected);
|
||||
assert_eq_parse!("foo = bar", expected);
|
||||
assert_eq_parse!(" foo=bar ", expected);
|
||||
assert_eq_parse!(" foo=bar ;Domain=", expected);
|
||||
assert_eq_parse!(" foo=bar ;Domain= ", expected);
|
||||
assert_eq_parse!(" foo=bar ;Ignored", expected);
|
||||
|
||||
let mut unexpected = Cookie::build("foo", "bar").http_only(false).finish();
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly", unexpected);
|
||||
assert_ne_parse!(" foo=bar; httponly", unexpected);
|
||||
|
||||
expected.set_http_only(true);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly", expected);
|
||||
assert_eq_parse!(" foo=bar ;httponly", expected);
|
||||
assert_eq_parse!(" foo=bar ;HTTPONLY=whatever", expected);
|
||||
assert_eq_parse!(" foo=bar ; sekure; HTTPONLY", expected);
|
||||
|
||||
expected.set_secure(true);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure", expected);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure=aaaa", expected);
|
||||
|
||||
unexpected.set_http_only(true);
|
||||
unexpected.set_secure(true);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; skeure", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; =secure", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly;", unexpected);
|
||||
|
||||
unexpected.set_secure(false);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; secure", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; secure", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; secure", unexpected);
|
||||
|
||||
expected.set_max_age(Duration::zero());
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age=0", expected);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age = 0 ", expected);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age=-1", expected);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age = -1 ", expected);
|
||||
|
||||
expected.set_max_age(Duration::minutes(1));
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age=60", expected);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age = 60 ", expected);
|
||||
|
||||
expected.set_max_age(Duration::seconds(4));
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age=4", expected);
|
||||
assert_eq_parse!(" foo=bar ;HttpOnly; Secure; Max-Age = 4 ", expected);
|
||||
|
||||
unexpected.set_secure(true);
|
||||
unexpected.set_max_age(Duration::minutes(1));
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; Secure; Max-Age=122", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; Secure; Max-Age = 38 ", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; Secure; Max-Age=51", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; Secure; Max-Age = -1 ", unexpected);
|
||||
assert_ne_parse!(" foo=bar ;HttpOnly; Secure; Max-Age = 0", unexpected);
|
||||
|
||||
expected.set_path("/");
|
||||
assert_eq_parse!("foo=bar;HttpOnly; Secure; Max-Age=4; Path=/", expected);
|
||||
assert_eq_parse!("foo=bar;HttpOnly; Secure; Max-Age=4;Path=/", expected);
|
||||
|
||||
expected.set_path("/foo");
|
||||
assert_eq_parse!("foo=bar;HttpOnly; Secure; Max-Age=4; Path=/foo", expected);
|
||||
assert_eq_parse!("foo=bar;HttpOnly; Secure; Max-Age=4;Path=/foo", expected);
|
||||
assert_eq_parse!("foo=bar;HttpOnly; Secure; Max-Age=4;path=/foo", expected);
|
||||
assert_eq_parse!("foo=bar;HttpOnly; Secure; Max-Age=4;path = /foo", expected);
|
||||
|
||||
unexpected.set_max_age(Duration::seconds(4));
|
||||
unexpected.set_path("/bar");
|
||||
assert_ne_parse!("foo=bar;HttpOnly; Secure; Max-Age=4; Path=/foo", unexpected);
|
||||
assert_ne_parse!("foo=bar;HttpOnly; Secure; Max-Age=4;Path=/baz", unexpected);
|
||||
|
||||
expected.set_domain("www.foo.com");
|
||||
assert_eq_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=www.foo.com",
|
||||
expected
|
||||
);
|
||||
|
||||
expected.set_domain("foo.com");
|
||||
assert_eq_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=foo.com",
|
||||
expected
|
||||
);
|
||||
assert_eq_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=FOO.COM",
|
||||
expected
|
||||
);
|
||||
|
||||
unexpected.set_path("/foo");
|
||||
unexpected.set_domain("bar.com");
|
||||
assert_ne_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=foo.com",
|
||||
unexpected
|
||||
);
|
||||
assert_ne_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=FOO.COM",
|
||||
unexpected
|
||||
);
|
||||
|
||||
let time_str = "Wed, 21 Oct 2015 07:28:00 GMT";
|
||||
let expires = PrimitiveDateTime::parse(time_str, "%a, %d %b %Y %H:%M:%S")
|
||||
.unwrap()
|
||||
.assume_utc();
|
||||
expected.set_expires(expires);
|
||||
assert_eq_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=foo.com; Expires=Wed, 21 Oct 2015 07:28:00 GMT",
|
||||
expected
|
||||
);
|
||||
|
||||
unexpected.set_domain("foo.com");
|
||||
let bad_expires = PrimitiveDateTime::parse(time_str, "%a, %d %b %Y %H:%S:%M")
|
||||
.unwrap()
|
||||
.assume_utc();
|
||||
expected.set_expires(bad_expires);
|
||||
assert_ne_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=foo.com; Expires=Wed, 21 Oct 2015 07:28:00 GMT",
|
||||
unexpected
|
||||
);
|
||||
|
||||
expected.set_expires(expires);
|
||||
expected.set_same_site(SameSite::Lax);
|
||||
assert_eq_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=foo.com; Expires=Wed, 21 Oct 2015 07:28:00 GMT; \
|
||||
SameSite=Lax",
|
||||
expected
|
||||
);
|
||||
expected.set_same_site(SameSite::Strict);
|
||||
assert_eq_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=foo.com; Expires=Wed, 21 Oct 2015 07:28:00 GMT; \
|
||||
SameSite=Strict",
|
||||
expected
|
||||
);
|
||||
expected.set_same_site(SameSite::None);
|
||||
assert_eq_parse!(
|
||||
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
|
||||
Domain=foo.com; Expires=Wed, 21 Oct 2015 07:28:00 GMT; \
|
||||
SameSite=None",
|
||||
expected
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn odd_characters() {
|
||||
let expected = Cookie::new("foo", "b%2Fr");
|
||||
assert_eq_parse!("foo=b%2Fr", expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn odd_characters_encoded() {
|
||||
let expected = Cookie::new("foo", "b/r");
|
||||
let cookie = match Cookie::parse_encoded("foo=b%2Fr") {
|
||||
Ok(cookie) => cookie,
|
||||
Err(e) => panic!("Failed to parse: {:?}", e),
|
||||
};
|
||||
|
||||
assert_eq!(cookie, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn do_not_panic_on_large_max_ages() {
|
||||
let max_duration = Duration::max_value();
|
||||
let expected = Cookie::build("foo", "bar")
|
||||
.max_age_time(max_duration)
|
||||
.finish();
|
||||
let overflow_duration = max_duration
|
||||
.checked_add(Duration::nanoseconds(1))
|
||||
.unwrap_or(max_duration);
|
||||
assert_eq_parse!(
|
||||
format!(" foo=bar; Max-Age={:?}", overflow_duration.whole_seconds()),
|
||||
expected
|
||||
);
|
||||
}
|
||||
}
|
@ -1,190 +0,0 @@
|
||||
use ring::hkdf::{Algorithm, KeyType, Prk, HKDF_SHA256};
|
||||
use ring::rand::{SecureRandom, SystemRandom};
|
||||
|
||||
use super::private::KEY_LEN as PRIVATE_KEY_LEN;
|
||||
use super::signed::KEY_LEN as SIGNED_KEY_LEN;
|
||||
|
||||
static HKDF_DIGEST: Algorithm = HKDF_SHA256;
|
||||
const KEYS_INFO: &[&[u8]] = &[b"COOKIE;SIGNED:HMAC-SHA256;PRIVATE:AEAD-AES-256-GCM"];
|
||||
|
||||
/// A cryptographic master key for use with `Signed` and/or `Private` jars.
|
||||
///
|
||||
/// This structure encapsulates secure, cryptographic keys for use with both
|
||||
/// [PrivateJar](struct.PrivateJar.html) and [SignedJar](struct.SignedJar.html).
|
||||
/// It can be derived from a single master key via
|
||||
/// [from_master](#method.from_master) or generated from a secure random source
|
||||
/// via [generate](#method.generate). A single instance of `Key` can be used for
|
||||
/// both a `PrivateJar` and a `SignedJar`.
|
||||
///
|
||||
/// This type is only available when the `secure` feature is enabled.
|
||||
#[derive(Clone)]
|
||||
pub struct Key {
|
||||
signing_key: [u8; SIGNED_KEY_LEN],
|
||||
encryption_key: [u8; PRIVATE_KEY_LEN],
|
||||
}
|
||||
|
||||
impl KeyType for &Key {
|
||||
#[inline]
|
||||
fn len(&self) -> usize {
|
||||
SIGNED_KEY_LEN + PRIVATE_KEY_LEN
|
||||
}
|
||||
}
|
||||
|
||||
impl Key {
|
||||
/// Derives new signing/encryption keys from a master key.
|
||||
///
|
||||
/// The master key must be at least 256-bits (32 bytes). For security, the
|
||||
/// master key _must_ be cryptographically random. The keys are derived
|
||||
/// deterministically from the master key.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `key` is less than 32 bytes in length.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Key;
|
||||
///
|
||||
/// # /*
|
||||
/// let master_key = { /* a cryptographically random key >= 32 bytes */ };
|
||||
/// # */
|
||||
/// # let master_key: &Vec<u8> = &(0..32).collect();
|
||||
///
|
||||
/// let key = Key::from_master(master_key);
|
||||
/// ```
|
||||
pub fn from_master(key: &[u8]) -> Key {
|
||||
if key.len() < 32 {
|
||||
panic!(
|
||||
"bad master key length: expected at least 32 bytes, found {}",
|
||||
key.len()
|
||||
);
|
||||
}
|
||||
|
||||
// An empty `Key` structure; will be filled in with HKDF derived keys.
|
||||
let mut output_key = Key {
|
||||
signing_key: [0; SIGNED_KEY_LEN],
|
||||
encryption_key: [0; PRIVATE_KEY_LEN],
|
||||
};
|
||||
|
||||
// Expand the master key into two HKDF generated keys.
|
||||
let mut both_keys = [0; SIGNED_KEY_LEN + PRIVATE_KEY_LEN];
|
||||
let prk = Prk::new_less_safe(HKDF_DIGEST, key);
|
||||
let okm = prk.expand(KEYS_INFO, &output_key).expect("okm expand");
|
||||
okm.fill(&mut both_keys).expect("fill keys");
|
||||
|
||||
// Copy the key parts into their respective fields.
|
||||
output_key
|
||||
.signing_key
|
||||
.copy_from_slice(&both_keys[..SIGNED_KEY_LEN]);
|
||||
output_key
|
||||
.encryption_key
|
||||
.copy_from_slice(&both_keys[SIGNED_KEY_LEN..]);
|
||||
output_key
|
||||
}
|
||||
|
||||
/// Generates signing/encryption keys from a secure, random source. Keys are
|
||||
/// generated non-deterministically.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if randomness cannot be retrieved from the operating system. See
|
||||
/// [try_generate](#method.try_generate) for a non-panicking version.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Key;
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// ```
|
||||
pub fn generate() -> Key {
|
||||
Self::try_generate().expect("failed to generate `Key` from randomness")
|
||||
}
|
||||
|
||||
/// Attempts to generate signing/encryption keys from a secure, random
|
||||
/// source. Keys are generated non-deterministically. If randomness cannot be
|
||||
/// retrieved from the underlying operating system, returns `None`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Key;
|
||||
///
|
||||
/// let key = Key::try_generate();
|
||||
/// ```
|
||||
pub fn try_generate() -> Option<Key> {
|
||||
let mut sign_key = [0; SIGNED_KEY_LEN];
|
||||
let mut enc_key = [0; PRIVATE_KEY_LEN];
|
||||
|
||||
let rng = SystemRandom::new();
|
||||
if rng.fill(&mut sign_key).is_err() || rng.fill(&mut enc_key).is_err() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Key {
|
||||
signing_key: sign_key,
|
||||
encryption_key: enc_key,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the raw bytes of a key suitable for signing cookies.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Key;
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let signing_key = key.signing();
|
||||
/// ```
|
||||
pub fn signing(&self) -> &[u8] {
|
||||
&self.signing_key[..]
|
||||
}
|
||||
|
||||
/// Returns the raw bytes of a key suitable for encrypting cookies.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::Key;
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let encryption_key = key.encryption();
|
||||
/// ```
|
||||
pub fn encryption(&self) -> &[u8] {
|
||||
&self.encryption_key[..]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::Key;
|
||||
|
||||
#[test]
|
||||
fn deterministic_from_master() {
|
||||
let master_key: Vec<u8> = (0..32).collect();
|
||||
|
||||
let key_a = Key::from_master(&master_key);
|
||||
let key_b = Key::from_master(&master_key);
|
||||
|
||||
assert_eq!(key_a.signing(), key_b.signing());
|
||||
assert_eq!(key_a.encryption(), key_b.encryption());
|
||||
assert_ne!(key_a.encryption(), key_a.signing());
|
||||
|
||||
let master_key_2: Vec<u8> = (32..64).collect();
|
||||
let key_2 = Key::from_master(&master_key_2);
|
||||
|
||||
assert_ne!(key_2.signing(), key_a.signing());
|
||||
assert_ne!(key_2.encryption(), key_a.encryption());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_deterministic_generate() {
|
||||
let key_a = Key::generate();
|
||||
let key_b = Key::generate();
|
||||
|
||||
assert_ne!(key_a.signing(), key_b.signing());
|
||||
assert_ne!(key_a.encryption(), key_b.encryption());
|
||||
}
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
#[cfg(test)]
|
||||
macro_rules! assert_simple_behaviour {
|
||||
($clear:expr, $secure:expr) => {{
|
||||
assert_eq!($clear.iter().count(), 0);
|
||||
|
||||
$secure.add(Cookie::new("name", "val"));
|
||||
assert_eq!($clear.iter().count(), 1);
|
||||
assert_eq!($secure.get("name").unwrap().value(), "val");
|
||||
assert_ne!($clear.get("name").unwrap().value(), "val");
|
||||
|
||||
$secure.add(Cookie::new("another", "two"));
|
||||
assert_eq!($clear.iter().count(), 2);
|
||||
|
||||
$clear.remove(Cookie::named("another"));
|
||||
assert_eq!($clear.iter().count(), 1);
|
||||
|
||||
$secure.remove(Cookie::named("name"));
|
||||
assert_eq!($clear.iter().count(), 0);
|
||||
}};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
macro_rules! assert_secure_behaviour {
|
||||
($clear:expr, $secure:expr) => {{
|
||||
$secure.add(Cookie::new("secure", "secure"));
|
||||
assert!($clear.get("secure").unwrap().value() != "secure");
|
||||
assert!($secure.get("secure").unwrap().value() == "secure");
|
||||
|
||||
let mut cookie = $clear.get("secure").unwrap().clone();
|
||||
let new_val = format!("{}l", cookie.value());
|
||||
cookie.set_value(new_val);
|
||||
$clear.add(cookie);
|
||||
assert!($secure.get("secure").is_none());
|
||||
|
||||
let mut cookie = $clear.get("secure").unwrap().clone();
|
||||
cookie.set_value("foobar");
|
||||
$clear.add(cookie);
|
||||
assert!($secure.get("secure").is_none());
|
||||
}};
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
//! Fork of https://github.com/alexcrichton/cookie-rs
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
mod key;
|
||||
mod private;
|
||||
mod signed;
|
||||
|
||||
pub use self::key::*;
|
||||
pub use self::private::*;
|
||||
pub use self::signed::*;
|
@ -1,275 +0,0 @@
|
||||
use std::str;
|
||||
|
||||
use log::warn;
|
||||
use ring::aead::{Aad, Algorithm, Nonce, AES_256_GCM};
|
||||
use ring::aead::{LessSafeKey, UnboundKey};
|
||||
use ring::rand::{SecureRandom, SystemRandom};
|
||||
|
||||
use super::Key;
|
||||
use crate::cookie::{Cookie, CookieJar};
|
||||
|
||||
// Keep these in sync, and keep the key len synced with the `private` docs as
|
||||
// well as the `KEYS_INFO` const in secure::Key.
|
||||
static ALGO: &Algorithm = &AES_256_GCM;
|
||||
const NONCE_LEN: usize = 12;
|
||||
pub const KEY_LEN: usize = 32;
|
||||
|
||||
/// A child cookie jar that provides authenticated encryption for its cookies.
|
||||
///
|
||||
/// A _private_ child jar signs and encrypts all the cookies added to it and
|
||||
/// verifies and decrypts cookies retrieved from it. Any cookies stored in a
|
||||
/// `PrivateJar` are simultaneously assured confidentiality, integrity, and
|
||||
/// authenticity. In other words, clients cannot discover nor tamper with the
|
||||
/// contents of a cookie, nor can they fabricate cookie data.
|
||||
///
|
||||
/// This type is only available when the `secure` feature is enabled.
|
||||
pub struct PrivateJar<'a> {
|
||||
parent: &'a mut CookieJar,
|
||||
key: [u8; KEY_LEN],
|
||||
}
|
||||
|
||||
impl<'a> PrivateJar<'a> {
|
||||
/// Creates a new child `PrivateJar` with parent `parent` and key `key`.
|
||||
/// This method is typically called indirectly via the `signed` method of
|
||||
/// `CookieJar`.
|
||||
#[doc(hidden)]
|
||||
pub fn new(parent: &'a mut CookieJar, key: &Key) -> PrivateJar<'a> {
|
||||
let mut key_array = [0u8; KEY_LEN];
|
||||
key_array.copy_from_slice(key.encryption());
|
||||
PrivateJar {
|
||||
parent,
|
||||
key: key_array,
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a sealed value `str` and a key name `name`, where the nonce is
|
||||
/// prepended to the original value and then both are Base64 encoded,
|
||||
/// verifies and decrypts the sealed value and returns it. If there's a
|
||||
/// problem, returns an `Err` with a string describing the issue.
|
||||
fn unseal(&self, name: &str, value: &str) -> Result<String, &'static str> {
|
||||
let mut data = base64::decode(value).map_err(|_| "bad base64 value")?;
|
||||
if data.len() <= NONCE_LEN {
|
||||
return Err("length of decoded data is <= NONCE_LEN");
|
||||
}
|
||||
|
||||
let ad = Aad::from(name.as_bytes());
|
||||
let key = LessSafeKey::new(
|
||||
UnboundKey::new(&ALGO, &self.key).expect("matching key length"),
|
||||
);
|
||||
let (nonce, mut sealed) = data.split_at_mut(NONCE_LEN);
|
||||
let nonce =
|
||||
Nonce::try_assume_unique_for_key(nonce).expect("invalid length of `nonce`");
|
||||
let unsealed = key
|
||||
.open_in_place(nonce, ad, &mut sealed)
|
||||
.map_err(|_| "invalid key/nonce/value: bad seal")?;
|
||||
|
||||
if let Ok(unsealed_utf8) = str::from_utf8(unsealed) {
|
||||
Ok(unsealed_utf8.to_string())
|
||||
} else {
|
||||
warn!(
|
||||
"Private cookie does not have utf8 content!
|
||||
It is likely the secret key used to encrypt them has been leaked.
|
||||
Please change it as soon as possible."
|
||||
);
|
||||
Err("bad unsealed utf8")
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a reference to the `Cookie` inside this jar with the name `name`
|
||||
/// and authenticates and decrypts the cookie's value, returning a `Cookie`
|
||||
/// with the decrypted value. If the cookie cannot be found, or the cookie
|
||||
/// fails to authenticate or decrypt, `None` is returned.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// let mut private_jar = jar.private(&key);
|
||||
/// assert!(private_jar.get("name").is_none());
|
||||
///
|
||||
/// private_jar.add(Cookie::new("name", "value"));
|
||||
/// assert_eq!(private_jar.get("name").unwrap().value(), "value");
|
||||
/// ```
|
||||
pub fn get(&self, name: &str) -> Option<Cookie<'static>> {
|
||||
if let Some(cookie_ref) = self.parent.get(name) {
|
||||
let mut cookie = cookie_ref.clone();
|
||||
if let Ok(value) = self.unseal(name, cookie.value()) {
|
||||
cookie.set_value(value);
|
||||
return Some(cookie);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Adds `cookie` to the parent jar. The cookie's value is encrypted with
|
||||
/// authenticated encryption assuring confidentiality, integrity, and
|
||||
/// authenticity.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.private(&key).add(Cookie::new("name", "value"));
|
||||
///
|
||||
/// assert_ne!(jar.get("name").unwrap().value(), "value");
|
||||
/// assert_eq!(jar.private(&key).get("name").unwrap().value(), "value");
|
||||
/// ```
|
||||
pub fn add(&mut self, mut cookie: Cookie<'static>) {
|
||||
self.encrypt_cookie(&mut cookie);
|
||||
|
||||
// Add the sealed cookie to the parent.
|
||||
self.parent.add(cookie);
|
||||
}
|
||||
|
||||
/// Adds an "original" `cookie` to parent jar. The cookie's value is
|
||||
/// encrypted with authenticated encryption assuring confidentiality,
|
||||
/// integrity, and authenticity. Adding an original cookie does not affect
|
||||
/// the [`CookieJar::delta()`](struct.CookieJar.html#method.delta)
|
||||
/// computation. This method is intended to be used to seed the cookie jar
|
||||
/// with cookies received from a client's HTTP message.
|
||||
///
|
||||
/// For accurate `delta` computations, this method should not be called
|
||||
/// after calling `remove`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.private(&key).add_original(Cookie::new("name", "value"));
|
||||
///
|
||||
/// assert_eq!(jar.iter().count(), 1);
|
||||
/// assert_eq!(jar.delta().count(), 0);
|
||||
/// ```
|
||||
pub fn add_original(&mut self, mut cookie: Cookie<'static>) {
|
||||
self.encrypt_cookie(&mut cookie);
|
||||
|
||||
// Add the sealed cookie to the parent.
|
||||
self.parent.add_original(cookie);
|
||||
}
|
||||
|
||||
/// Encrypts the cookie's value with
|
||||
/// authenticated encryption assuring confidentiality, integrity, and authenticity.
|
||||
fn encrypt_cookie(&self, cookie: &mut Cookie<'_>) {
|
||||
let name = cookie.name().as_bytes();
|
||||
let value = cookie.value().as_bytes();
|
||||
let data = encrypt_name_value(name, value, &self.key);
|
||||
|
||||
// Base64 encode the nonce and encrypted value.
|
||||
let sealed_value = base64::encode(&data);
|
||||
cookie.set_value(sealed_value);
|
||||
}
|
||||
|
||||
/// Removes `cookie` from the parent jar.
|
||||
///
|
||||
/// For correct removal, the passed in `cookie` must contain the same `path`
|
||||
/// and `domain` as the cookie that was initially set.
|
||||
///
|
||||
/// See [CookieJar::remove](struct.CookieJar.html#method.remove) for more
|
||||
/// details.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// let mut private_jar = jar.private(&key);
|
||||
///
|
||||
/// private_jar.add(Cookie::new("name", "value"));
|
||||
/// assert!(private_jar.get("name").is_some());
|
||||
///
|
||||
/// private_jar.remove(Cookie::named("name"));
|
||||
/// assert!(private_jar.get("name").is_none());
|
||||
/// ```
|
||||
pub fn remove(&mut self, cookie: Cookie<'static>) {
|
||||
self.parent.remove(cookie);
|
||||
}
|
||||
}
|
||||
|
||||
fn encrypt_name_value(name: &[u8], value: &[u8], key: &[u8]) -> Vec<u8> {
|
||||
// Create the `SealingKey` structure.
|
||||
let unbound = UnboundKey::new(&ALGO, key).expect("matching key length");
|
||||
let key = LessSafeKey::new(unbound);
|
||||
|
||||
// Create a vec to hold the [nonce | cookie value | overhead].
|
||||
let mut data = vec![0; NONCE_LEN + value.len() + ALGO.tag_len()];
|
||||
|
||||
// Randomly generate the nonce, then copy the cookie value as input.
|
||||
let (nonce, in_out) = data.split_at_mut(NONCE_LEN);
|
||||
let (in_out, tag) = in_out.split_at_mut(value.len());
|
||||
in_out.copy_from_slice(value);
|
||||
|
||||
// Randomly generate the nonce into the nonce piece.
|
||||
SystemRandom::new()
|
||||
.fill(nonce)
|
||||
.expect("couldn't random fill nonce");
|
||||
let nonce = Nonce::try_assume_unique_for_key(nonce).expect("invalid `nonce` length");
|
||||
|
||||
// Use cookie's name as associated data to prevent value swapping.
|
||||
let ad = Aad::from(name);
|
||||
let ad_tag = key
|
||||
.seal_in_place_separate_tag(nonce, ad, in_out)
|
||||
.expect("in-place seal");
|
||||
|
||||
// Copy the tag into the tag piece.
|
||||
tag.copy_from_slice(ad_tag.as_ref());
|
||||
|
||||
// Remove the overhead and return the sealed content.
|
||||
data
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::{encrypt_name_value, Cookie, CookieJar, Key};
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let key = Key::generate();
|
||||
let mut jar = CookieJar::new();
|
||||
assert_simple_behaviour!(jar, jar.private(&key));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn private() {
|
||||
let key = Key::generate();
|
||||
let mut jar = CookieJar::new();
|
||||
assert_secure_behaviour!(jar, jar.private(&key));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_utf8() {
|
||||
let key = Key::generate();
|
||||
let mut jar = CookieJar::new();
|
||||
|
||||
let name = "malicious";
|
||||
let mut assert_non_utf8 = |value: &[u8]| {
|
||||
let sealed = encrypt_name_value(name.as_bytes(), value, &key.encryption());
|
||||
let encoded = base64::encode(&sealed);
|
||||
assert_eq!(
|
||||
jar.private(&key).unseal(name, &encoded),
|
||||
Err("bad unsealed utf8")
|
||||
);
|
||||
jar.add(Cookie::new(name, encoded));
|
||||
assert_eq!(jar.private(&key).get(name), None);
|
||||
};
|
||||
|
||||
assert_non_utf8(&[0x72, 0xfb, 0xdf, 0x74]); // rûst in ISO/IEC 8859-1
|
||||
|
||||
let mut malicious =
|
||||
String::from(r#"{"id":"abc123??%X","admin":true}"#).into_bytes();
|
||||
malicious[8] |= 0b1100_0000;
|
||||
malicious[9] |= 0b1100_0000;
|
||||
assert_non_utf8(&malicious);
|
||||
}
|
||||
}
|
@ -1,184 +0,0 @@
|
||||
use ring::hmac::{self, sign, verify};
|
||||
|
||||
use super::Key;
|
||||
use crate::cookie::{Cookie, CookieJar};
|
||||
|
||||
// Keep these in sync, and keep the key len synced with the `signed` docs as
|
||||
// well as the `KEYS_INFO` const in secure::Key.
|
||||
static HMAC_DIGEST: hmac::Algorithm = hmac::HMAC_SHA256;
|
||||
const BASE64_DIGEST_LEN: usize = 44;
|
||||
pub const KEY_LEN: usize = 32;
|
||||
|
||||
/// A child cookie jar that authenticates its cookies.
|
||||
///
|
||||
/// A _signed_ child jar signs all the cookies added to it and verifies cookies
|
||||
/// retrieved from it. Any cookies stored in a `SignedJar` are assured integrity
|
||||
/// and authenticity. In other words, clients cannot tamper with the contents of
|
||||
/// a cookie nor can they fabricate cookie values, but the data is visible in
|
||||
/// plaintext.
|
||||
///
|
||||
/// This type is only available when the `secure` feature is enabled.
|
||||
pub struct SignedJar<'a> {
|
||||
parent: &'a mut CookieJar,
|
||||
key: hmac::Key,
|
||||
}
|
||||
|
||||
impl<'a> SignedJar<'a> {
|
||||
/// Creates a new child `SignedJar` with parent `parent` and key `key`. This
|
||||
/// method is typically called indirectly via the `signed` method of
|
||||
/// `CookieJar`.
|
||||
#[doc(hidden)]
|
||||
pub fn new(parent: &'a mut CookieJar, key: &Key) -> SignedJar<'a> {
|
||||
SignedJar {
|
||||
parent,
|
||||
key: hmac::Key::new(HMAC_DIGEST, key.signing()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a signed value `str` where the signature is prepended to `value`,
|
||||
/// verifies the signed value and returns it. If there's a problem, returns
|
||||
/// an `Err` with a string describing the issue.
|
||||
fn verify(&self, cookie_value: &str) -> Result<String, &'static str> {
|
||||
if cookie_value.len() < BASE64_DIGEST_LEN {
|
||||
return Err("length of value is <= BASE64_DIGEST_LEN");
|
||||
}
|
||||
|
||||
let (digest_str, value) = cookie_value.split_at(BASE64_DIGEST_LEN);
|
||||
let sig = base64::decode(digest_str).map_err(|_| "bad base64 digest")?;
|
||||
|
||||
verify(&self.key, value.as_bytes(), &sig)
|
||||
.map(|_| value.to_string())
|
||||
.map_err(|_| "value did not verify")
|
||||
}
|
||||
|
||||
/// Returns a reference to the `Cookie` inside this jar with the name `name`
|
||||
/// and verifies the authenticity and integrity of the cookie's value,
|
||||
/// returning a `Cookie` with the authenticated value. If the cookie cannot
|
||||
/// be found, or the cookie fails to verify, `None` is returned.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// let mut signed_jar = jar.signed(&key);
|
||||
/// assert!(signed_jar.get("name").is_none());
|
||||
///
|
||||
/// signed_jar.add(Cookie::new("name", "value"));
|
||||
/// assert_eq!(signed_jar.get("name").unwrap().value(), "value");
|
||||
/// ```
|
||||
pub fn get(&self, name: &str) -> Option<Cookie<'static>> {
|
||||
if let Some(cookie_ref) = self.parent.get(name) {
|
||||
let mut cookie = cookie_ref.clone();
|
||||
if let Ok(value) = self.verify(cookie.value()) {
|
||||
cookie.set_value(value);
|
||||
return Some(cookie);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Adds `cookie` to the parent jar. The cookie's value is signed assuring
|
||||
/// integrity and authenticity.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.signed(&key).add(Cookie::new("name", "value"));
|
||||
///
|
||||
/// assert_ne!(jar.get("name").unwrap().value(), "value");
|
||||
/// assert!(jar.get("name").unwrap().value().contains("value"));
|
||||
/// assert_eq!(jar.signed(&key).get("name").unwrap().value(), "value");
|
||||
/// ```
|
||||
pub fn add(&mut self, mut cookie: Cookie<'static>) {
|
||||
self.sign_cookie(&mut cookie);
|
||||
self.parent.add(cookie);
|
||||
}
|
||||
|
||||
/// Adds an "original" `cookie` to this jar. The cookie's value is signed
|
||||
/// assuring integrity and authenticity. Adding an original cookie does not
|
||||
/// affect the [`CookieJar::delta()`](struct.CookieJar.html#method.delta)
|
||||
/// computation. This method is intended to be used to seed the cookie jar
|
||||
/// with cookies received from a client's HTTP message.
|
||||
///
|
||||
/// For accurate `delta` computations, this method should not be called
|
||||
/// after calling `remove`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// jar.signed(&key).add_original(Cookie::new("name", "value"));
|
||||
///
|
||||
/// assert_eq!(jar.iter().count(), 1);
|
||||
/// assert_eq!(jar.delta().count(), 0);
|
||||
/// ```
|
||||
pub fn add_original(&mut self, mut cookie: Cookie<'static>) {
|
||||
self.sign_cookie(&mut cookie);
|
||||
self.parent.add_original(cookie);
|
||||
}
|
||||
|
||||
/// Signs the cookie's value assuring integrity and authenticity.
|
||||
fn sign_cookie(&self, cookie: &mut Cookie<'_>) {
|
||||
let digest = sign(&self.key, cookie.value().as_bytes());
|
||||
let mut new_value = base64::encode(digest.as_ref());
|
||||
new_value.push_str(cookie.value());
|
||||
cookie.set_value(new_value);
|
||||
}
|
||||
|
||||
/// Removes `cookie` from the parent jar.
|
||||
///
|
||||
/// For correct removal, the passed in `cookie` must contain the same `path`
|
||||
/// and `domain` as the cookie that was initially set.
|
||||
///
|
||||
/// See [CookieJar::remove](struct.CookieJar.html#method.remove) for more
|
||||
/// details.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use actix_http::cookie::{CookieJar, Cookie, Key};
|
||||
///
|
||||
/// let key = Key::generate();
|
||||
/// let mut jar = CookieJar::new();
|
||||
/// let mut signed_jar = jar.signed(&key);
|
||||
///
|
||||
/// signed_jar.add(Cookie::new("name", "value"));
|
||||
/// assert!(signed_jar.get("name").is_some());
|
||||
///
|
||||
/// signed_jar.remove(Cookie::named("name"));
|
||||
/// assert!(signed_jar.get("name").is_none());
|
||||
/// ```
|
||||
pub fn remove(&mut self, cookie: Cookie<'static>) {
|
||||
self.parent.remove(cookie);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::{Cookie, CookieJar, Key};
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let key = Key::generate();
|
||||
let mut jar = CookieJar::new();
|
||||
assert_simple_behaviour!(jar, jar.signed(&key));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn private() {
|
||||
let key = Key::generate();
|
||||
let mut jar = CookieJar::new();
|
||||
assert_secure_behaviour!(jar, jar.signed(&key));
|
||||
}
|
||||
}
|
@ -9,7 +9,7 @@ use brotli2::write::BrotliEncoder;
|
||||
use bytes::Bytes;
|
||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||
use futures_core::ready;
|
||||
use pin_project::{pin_project, project};
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::body::{Body, BodySize, MessageBody, ResponseBody};
|
||||
use crate::http::header::{ContentEncoding, CONTENT_ENCODING};
|
||||
@ -79,7 +79,7 @@ impl<B: MessageBody> Encoder<B> {
|
||||
}
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = EncoderBodyProj)]
|
||||
enum EncoderBody<B> {
|
||||
Bytes(Bytes),
|
||||
Stream(#[pin] B),
|
||||
@ -95,22 +95,22 @@ impl<B: MessageBody> MessageBody for EncoderBody<B> {
|
||||
}
|
||||
}
|
||||
|
||||
#[project]
|
||||
fn poll_next(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<Result<Bytes, Error>>> {
|
||||
#[project]
|
||||
match self.project() {
|
||||
EncoderBody::Bytes(b) => {
|
||||
EncoderBodyProj::Bytes(b) => {
|
||||
if b.is_empty() {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
Poll::Ready(Some(Ok(std::mem::take(b))))
|
||||
}
|
||||
}
|
||||
EncoderBody::Stream(b) => b.poll_next(cx),
|
||||
EncoderBody::BoxedStream(ref mut b) => Pin::new(b.as_mut()).poll_next(cx),
|
||||
EncoderBodyProj::Stream(b) => b.poll_next(cx),
|
||||
EncoderBodyProj::BoxedStream(ref mut b) => {
|
||||
Pin::new(b.as_mut()).poll_next(cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
//! Error and Result module
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::io::Write;
|
||||
use std::str::Utf8Error;
|
||||
@ -7,7 +8,7 @@ use std::{fmt, io, result};
|
||||
|
||||
use actix_codec::{Decoder, Encoder};
|
||||
pub use actix_threadpool::BlockingError;
|
||||
use actix_utils::framed::DispatcherError as FramedDispatcherError;
|
||||
use actix_utils::dispatcher::DispatcherError as FramedDispatcherError;
|
||||
use actix_utils::timeout::TimeoutError;
|
||||
use bytes::BytesMut;
|
||||
use derive_more::{Display, From};
|
||||
@ -452,10 +453,10 @@ impl ResponseError for ContentTypeError {
|
||||
}
|
||||
}
|
||||
|
||||
impl<E, U: Encoder + Decoder> ResponseError for FramedDispatcherError<E, U>
|
||||
impl<E, U: Encoder<I> + Decoder, I> ResponseError for FramedDispatcherError<E, U, I>
|
||||
where
|
||||
E: fmt::Debug + fmt::Display,
|
||||
<U as Encoder>::Error: fmt::Debug,
|
||||
<U as Encoder<I>>::Error: fmt::Debug,
|
||||
<U as Decoder>::Error: fmt::Debug,
|
||||
{
|
||||
}
|
||||
@ -964,7 +965,6 @@ impl ResponseError for actix::actors::resolver::ResolverError {}
|
||||
mod tests {
|
||||
use super::*;
|
||||
use http::{Error as HttpError, StatusCode};
|
||||
use httparse;
|
||||
use std::io;
|
||||
|
||||
#[test]
|
||||
|
@ -72,7 +72,7 @@ impl fmt::Debug for Extensions {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_remove() {
|
||||
let mut map = Extensions::new();
|
||||
|
@ -173,13 +173,12 @@ impl Decoder for ClientPayloadCodec {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encoder for ClientCodec {
|
||||
type Item = Message<(RequestHeadType, BodySize)>;
|
||||
impl Encoder<Message<(RequestHeadType, BodySize)>> for ClientCodec {
|
||||
type Error = io::Error;
|
||||
|
||||
fn encode(
|
||||
&mut self,
|
||||
item: Self::Item,
|
||||
item: Message<(RequestHeadType, BodySize)>,
|
||||
dst: &mut BytesMut,
|
||||
) -> Result<(), Self::Error> {
|
||||
match item {
|
||||
|
@ -144,13 +144,12 @@ impl Decoder for Codec {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encoder for Codec {
|
||||
type Item = Message<(Response<()>, BodySize)>;
|
||||
impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||
type Error = io::Error;
|
||||
|
||||
fn encode(
|
||||
&mut self,
|
||||
item: Self::Item,
|
||||
item: Message<(Response<()>, BodySize)>,
|
||||
dst: &mut BytesMut,
|
||||
) -> Result<(), Self::Error> {
|
||||
match item {
|
||||
|
@ -1,7 +1,6 @@
|
||||
use std::convert::TryFrom;
|
||||
use std::io;
|
||||
use std::marker::PhantomData;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::task::Poll;
|
||||
|
||||
use actix_codec::Decoder;
|
||||
@ -46,7 +45,7 @@ impl<T: MessageType> Decoder for MessageDecoder<T> {
|
||||
|
||||
pub(crate) enum PayloadLength {
|
||||
Payload(PayloadType),
|
||||
Upgrade,
|
||||
UpgradeWebSocket,
|
||||
None,
|
||||
}
|
||||
|
||||
@ -65,7 +64,7 @@ pub(crate) trait MessageType: Sized {
|
||||
raw_headers: &[HeaderIndex],
|
||||
) -> Result<PayloadLength, ParseError> {
|
||||
let mut ka = None;
|
||||
let mut has_upgrade = false;
|
||||
let mut has_upgrade_websocket = false;
|
||||
let mut expect = false;
|
||||
let mut chunked = false;
|
||||
let mut content_length = None;
|
||||
@ -77,12 +76,14 @@ pub(crate) trait MessageType: Sized {
|
||||
let name =
|
||||
HeaderName::from_bytes(&slice[idx.name.0..idx.name.1]).unwrap();
|
||||
|
||||
// Unsafe: httparse check header value for valid utf-8
|
||||
// SAFETY: httparse already checks header value is only visible ASCII bytes
|
||||
// from_maybe_shared_unchecked contains debug assertions so they are omitted here
|
||||
let value = unsafe {
|
||||
HeaderValue::from_maybe_shared_unchecked(
|
||||
slice.slice(idx.value.0..idx.value.1),
|
||||
)
|
||||
};
|
||||
|
||||
match name {
|
||||
header::CONTENT_LENGTH => {
|
||||
if let Ok(s) = value.to_str() {
|
||||
@ -124,12 +125,9 @@ pub(crate) trait MessageType: Sized {
|
||||
};
|
||||
}
|
||||
header::UPGRADE => {
|
||||
has_upgrade = true;
|
||||
// check content-length, some clients (dart)
|
||||
// sends "content-length: 0" with websocket upgrade
|
||||
if let Ok(val) = value.to_str().map(|val| val.trim()) {
|
||||
if val.eq_ignore_ascii_case("websocket") {
|
||||
content_length = None;
|
||||
has_upgrade_websocket = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -156,13 +154,13 @@ pub(crate) trait MessageType: Sized {
|
||||
Ok(PayloadLength::Payload(PayloadType::Payload(
|
||||
PayloadDecoder::chunked(),
|
||||
)))
|
||||
} else if has_upgrade_websocket {
|
||||
Ok(PayloadLength::UpgradeWebSocket)
|
||||
} else if let Some(len) = content_length {
|
||||
// Content-Length
|
||||
Ok(PayloadLength::Payload(PayloadType::Payload(
|
||||
PayloadDecoder::length(len),
|
||||
)))
|
||||
} else if has_upgrade {
|
||||
Ok(PayloadLength::Upgrade)
|
||||
} else {
|
||||
Ok(PayloadLength::None)
|
||||
}
|
||||
@ -184,16 +182,11 @@ impl MessageType for Request {
|
||||
&mut self.head_mut().headers
|
||||
}
|
||||
|
||||
#[allow(clippy::uninit_assumed_init)]
|
||||
fn decode(src: &mut BytesMut) -> Result<Option<(Self, PayloadType)>, ParseError> {
|
||||
// Unsafe: we read only this data only after httparse parses headers into.
|
||||
// performance bump for pipeline benchmarks.
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||
|
||||
let (len, method, uri, ver, h_len) = {
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
||||
|
||||
let mut req = httparse::Request::new(&mut parsed);
|
||||
match req.parse(src)? {
|
||||
@ -222,7 +215,7 @@ impl MessageType for Request {
|
||||
// payload decoder
|
||||
let decoder = match length {
|
||||
PayloadLength::Payload(pl) => pl,
|
||||
PayloadLength::Upgrade => {
|
||||
PayloadLength::UpgradeWebSocket => {
|
||||
// upgrade(websocket)
|
||||
PayloadType::Stream(PayloadDecoder::eof())
|
||||
}
|
||||
@ -260,16 +253,11 @@ impl MessageType for ResponseHead {
|
||||
&mut self.headers
|
||||
}
|
||||
|
||||
#[allow(clippy::uninit_assumed_init)]
|
||||
fn decode(src: &mut BytesMut) -> Result<Option<(Self, PayloadType)>, ParseError> {
|
||||
// Unsafe: we read only this data only after httparse parses headers into.
|
||||
// performance bump for pipeline benchmarks.
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||
|
||||
let (len, ver, status, h_len) = {
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
||||
unsafe { MaybeUninit::uninit().assume_init() };
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
||||
|
||||
let mut res = httparse::Response::new(&mut parsed);
|
||||
match res.parse(src)? {
|
||||
@ -324,6 +312,17 @@ pub(crate) struct HeaderIndex {
|
||||
pub(crate) value: (usize, usize),
|
||||
}
|
||||
|
||||
pub(crate) const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
|
||||
name: (0, 0),
|
||||
value: (0, 0),
|
||||
};
|
||||
|
||||
pub(crate) const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
|
||||
[EMPTY_HEADER_INDEX; MAX_HEADERS];
|
||||
|
||||
pub(crate) const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
|
||||
[httparse::EMPTY_HEADER; MAX_HEADERS];
|
||||
|
||||
impl HeaderIndex {
|
||||
pub(crate) fn record(
|
||||
bytes: &[u8],
|
||||
@ -655,10 +654,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn is_unhandled(&self) -> bool {
|
||||
match self {
|
||||
PayloadType::Stream(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, PayloadType::Stream(_))
|
||||
}
|
||||
}
|
||||
|
||||
@ -670,10 +666,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
fn eof(&self) -> bool {
|
||||
match *self {
|
||||
PayloadItem::Eof => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(*self, PayloadItem::Eof)
|
||||
}
|
||||
}
|
||||
|
||||
@ -979,7 +972,7 @@ mod tests {
|
||||
unreachable!("Error");
|
||||
}
|
||||
|
||||
// type in chunked
|
||||
// intentional typo in "chunked"
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chnked\r\n\r\n",
|
||||
@ -1040,7 +1033,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_upgrade() {
|
||||
fn test_http_request_upgrade_websocket() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
connection: upgrade\r\n\
|
||||
@ -1054,6 +1047,26 @@ mod tests {
|
||||
assert!(pl.is_unhandled());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_upgrade_h2c() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
connection: upgrade, http2-settings\r\n\
|
||||
upgrade: h2c\r\n\
|
||||
http2-settings: dummy\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
// `connection: upgrade, http2-settings` doesn't work properly..
|
||||
// see MessageType::set_headers().
|
||||
//
|
||||
// The line below should be:
|
||||
// assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
|
||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||
assert!(req.upgrade());
|
||||
assert!(!pl.is_unhandled());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_parser_utf8() {
|
||||
let mut buf = BytesMut::from(
|
||||
|
@ -58,7 +58,7 @@ where
|
||||
inner: DispatcherState<T, S, B, X, U>,
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = DispatcherStateProj)]
|
||||
enum DispatcherState<T, S, B, X, U>
|
||||
where
|
||||
S: Service<Request = Request>,
|
||||
@ -73,7 +73,7 @@ where
|
||||
Upgrade(Pin<Box<U::Future>>),
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = InnerDispatcherProj)]
|
||||
struct InnerDispatcher<T, S, B, X, U>
|
||||
where
|
||||
S: Service<Request = Request>,
|
||||
@ -112,7 +112,7 @@ enum DispatcherMessage {
|
||||
Error(Response<()>),
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = StateProj)]
|
||||
enum State<S, B, X>
|
||||
where
|
||||
S: Service<Request = Request>,
|
||||
@ -132,19 +132,11 @@ where
|
||||
B: MessageBody,
|
||||
{
|
||||
fn is_empty(&self) -> bool {
|
||||
if let State::None = self {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
matches!(self, State::None)
|
||||
}
|
||||
|
||||
fn is_call(&self) -> bool {
|
||||
if let State::ServiceCall(_) = self {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
matches!(self, State::ServiceCall(_))
|
||||
}
|
||||
}
|
||||
enum PollResponse {
|
||||
@ -156,14 +148,8 @@ enum PollResponse {
|
||||
impl PartialEq for PollResponse {
|
||||
fn eq(&self, other: &PollResponse) -> bool {
|
||||
match self {
|
||||
PollResponse::DrainWriteBuf => match other {
|
||||
PollResponse::DrainWriteBuf => true,
|
||||
_ => false,
|
||||
},
|
||||
PollResponse::DoNothing => match other {
|
||||
PollResponse::DoNothing => true,
|
||||
_ => false,
|
||||
},
|
||||
PollResponse::DrainWriteBuf => matches!(other, PollResponse::DrainWriteBuf),
|
||||
PollResponse::DoNothing => matches!(other, PollResponse::DoNothing),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@ -296,7 +282,6 @@ where
|
||||
///
|
||||
/// true - got WouldBlock
|
||||
/// false - didn't get WouldBlock
|
||||
#[pin_project::project]
|
||||
fn poll_flush(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
@ -307,8 +292,7 @@ where
|
||||
|
||||
let len = self.write_buf.len();
|
||||
let mut written = 0;
|
||||
#[project]
|
||||
let InnerDispatcher { io, write_buf, .. } = self.project();
|
||||
let InnerDispatcherProj { io, write_buf, .. } = self.project();
|
||||
let mut io = Pin::new(io.as_mut().unwrap());
|
||||
while written < len {
|
||||
match io.as_mut().poll_write(cx, &write_buf[written..]) {
|
||||
@ -330,11 +314,15 @@ where
|
||||
Poll::Ready(Err(err)) => return Err(DispatchError::Io(err)),
|
||||
}
|
||||
}
|
||||
|
||||
if written == write_buf.len() {
|
||||
// SAFETY: setting length to 0 is safe
|
||||
// skips one length check vs truncate
|
||||
unsafe { write_buf.set_len(0) }
|
||||
} else {
|
||||
write_buf.advance(written);
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
@ -366,16 +354,14 @@ where
|
||||
.extend_from_slice(b"HTTP/1.1 100 Continue\r\n\r\n");
|
||||
}
|
||||
|
||||
#[pin_project::project]
|
||||
fn poll_response(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Result<PollResponse, DispatchError> {
|
||||
loop {
|
||||
let mut this = self.as_mut().project();
|
||||
#[project]
|
||||
let state = match this.state.project() {
|
||||
State::None => match this.messages.pop_front() {
|
||||
StateProj::None => match this.messages.pop_front() {
|
||||
Some(DispatcherMessage::Item(req)) => {
|
||||
Some(self.as_mut().handle_request(req, cx)?)
|
||||
}
|
||||
@ -388,7 +374,7 @@ where
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
State::ExpectCall(fut) => match fut.as_mut().poll(cx) {
|
||||
StateProj::ExpectCall(fut) => match fut.as_mut().poll(cx) {
|
||||
Poll::Ready(Ok(req)) => {
|
||||
self.as_mut().send_continue();
|
||||
this = self.as_mut().project();
|
||||
@ -403,7 +389,7 @@ where
|
||||
}
|
||||
Poll::Pending => None,
|
||||
},
|
||||
State::ServiceCall(fut) => match fut.as_mut().poll(cx) {
|
||||
StateProj::ServiceCall(fut) => match fut.as_mut().poll(cx) {
|
||||
Poll::Ready(Ok(res)) => {
|
||||
let (res, body) = res.into().replace_body(());
|
||||
let state = self.as_mut().send_response(res, body)?;
|
||||
@ -418,7 +404,7 @@ where
|
||||
}
|
||||
Poll::Pending => None,
|
||||
},
|
||||
State::SendPayload(mut stream) => {
|
||||
StateProj::SendPayload(mut stream) => {
|
||||
loop {
|
||||
if this.write_buf.len() < HW_BUFFER_SIZE {
|
||||
match stream.as_mut().poll_next(cx) {
|
||||
@ -724,13 +710,11 @@ where
|
||||
{
|
||||
type Output = Result<(), DispatchError>;
|
||||
|
||||
#[pin_project::project]
|
||||
#[inline]
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.as_mut().project();
|
||||
#[project]
|
||||
match this.inner.project() {
|
||||
DispatcherState::Normal(mut inner) => {
|
||||
DispatcherStateProj::Normal(mut inner) => {
|
||||
inner.as_mut().poll_keepalive(cx)?;
|
||||
|
||||
if inner.flags.contains(Flags::SHUTDOWN) {
|
||||
@ -850,7 +834,7 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
DispatcherState::Upgrade(fut) => fut.as_mut().poll(cx).map_err(|e| {
|
||||
DispatcherStateProj::Upgrade(fut) => fut.as_mut().poll(cx).map_err(|e| {
|
||||
error!("Upgrade handler error: {}", e);
|
||||
DispatchError::Upgrade
|
||||
}),
|
||||
@ -867,7 +851,14 @@ where
|
||||
T: AsyncRead + Unpin,
|
||||
{
|
||||
let mut read_some = false;
|
||||
|
||||
loop {
|
||||
// If buf is full return but do not disconnect since
|
||||
// there is more reading to be done
|
||||
if buf.len() >= HW_BUFFER_SIZE {
|
||||
return Ok(Some(false));
|
||||
}
|
||||
|
||||
let remaining = buf.capacity() - buf.len();
|
||||
if remaining < LW_BUFFER_SIZE {
|
||||
buf.reserve(HW_BUFFER_SIZE - remaining);
|
||||
|
@ -129,89 +129,133 @@ pub(crate) trait MessageType: Sized {
|
||||
.chain(extra_headers.inner.iter());
|
||||
|
||||
// write headers
|
||||
let mut pos = 0;
|
||||
|
||||
let mut has_date = false;
|
||||
let mut remaining = dst.capacity() - dst.len();
|
||||
|
||||
let mut buf = dst.bytes_mut().as_mut_ptr() as *mut u8;
|
||||
let mut remaining = dst.capacity() - dst.len();
|
||||
|
||||
// tracks bytes written since last buffer resize
|
||||
// since buf is a raw pointer to a bytes container storage but is written to without the
|
||||
// container's knowledge, this is used to sync the containers cursor after data is written
|
||||
let mut pos = 0;
|
||||
|
||||
for (key, value) in headers {
|
||||
match *key {
|
||||
CONNECTION => continue,
|
||||
TRANSFER_ENCODING | CONTENT_LENGTH if skip_len => continue,
|
||||
DATE => {
|
||||
has_date = true;
|
||||
}
|
||||
DATE => has_date = true,
|
||||
_ => (),
|
||||
}
|
||||
|
||||
let k = key.as_str().as_bytes();
|
||||
let k_len = k.len();
|
||||
|
||||
match value {
|
||||
map::Value::One(ref val) => {
|
||||
let v = val.as_ref();
|
||||
let v_len = v.len();
|
||||
let k_len = k.len();
|
||||
|
||||
// key length + value length + colon + space + \r\n
|
||||
let len = k_len + v_len + 4;
|
||||
|
||||
if len > remaining {
|
||||
// not enough room in buffer for this header; reserve more space
|
||||
|
||||
// SAFETY: all the bytes written up to position "pos" are initialized
|
||||
// the written byte count and pointer advancement are kept in sync
|
||||
unsafe {
|
||||
dst.advance_mut(pos);
|
||||
}
|
||||
|
||||
pos = 0;
|
||||
dst.reserve(len * 2);
|
||||
remaining = dst.capacity() - dst.len();
|
||||
|
||||
// re-assign buf raw pointer since it's possible that the buffer was
|
||||
// reallocated and/or resized
|
||||
buf = dst.bytes_mut().as_mut_ptr() as *mut u8;
|
||||
}
|
||||
// use upper Camel-Case
|
||||
|
||||
// SAFETY: on each write, it is enough to ensure that the advancement of the
|
||||
// cursor matches the number of bytes written
|
||||
unsafe {
|
||||
// use upper Camel-Case
|
||||
if camel_case {
|
||||
write_camel_case(k, from_raw_parts_mut(buf, k_len))
|
||||
} else {
|
||||
write_data(k, buf, k_len)
|
||||
}
|
||||
|
||||
buf = buf.add(k_len);
|
||||
|
||||
write_data(b": ", buf, 2);
|
||||
buf = buf.add(2);
|
||||
|
||||
write_data(v, buf, v_len);
|
||||
buf = buf.add(v_len);
|
||||
|
||||
write_data(b"\r\n", buf, 2);
|
||||
buf = buf.add(2);
|
||||
pos += len;
|
||||
remaining -= len;
|
||||
}
|
||||
|
||||
pos += len;
|
||||
remaining -= len;
|
||||
}
|
||||
|
||||
map::Value::Multi(ref vec) => {
|
||||
for val in vec {
|
||||
let v = val.as_ref();
|
||||
let v_len = v.len();
|
||||
let k_len = k.len();
|
||||
let len = k_len + v_len + 4;
|
||||
|
||||
if len > remaining {
|
||||
// SAFETY: all the bytes written up to position "pos" are initialized
|
||||
// the written byte count and pointer advancement are kept in sync
|
||||
unsafe {
|
||||
dst.advance_mut(pos);
|
||||
}
|
||||
pos = 0;
|
||||
dst.reserve(len * 2);
|
||||
remaining = dst.capacity() - dst.len();
|
||||
|
||||
// re-assign buf raw pointer since it's possible that the buffer was
|
||||
// reallocated and/or resized
|
||||
buf = dst.bytes_mut().as_mut_ptr() as *mut u8;
|
||||
}
|
||||
// use upper Camel-Case
|
||||
|
||||
// SAFETY: on each write, it is enough to ensure that the advancement of
|
||||
// the cursor matches the number of bytes written
|
||||
unsafe {
|
||||
if camel_case {
|
||||
write_camel_case(k, from_raw_parts_mut(buf, k_len));
|
||||
} else {
|
||||
write_data(k, buf, k_len);
|
||||
}
|
||||
|
||||
buf = buf.add(k_len);
|
||||
|
||||
write_data(b": ", buf, 2);
|
||||
buf = buf.add(2);
|
||||
|
||||
write_data(v, buf, v_len);
|
||||
buf = buf.add(v_len);
|
||||
|
||||
write_data(b"\r\n", buf, 2);
|
||||
buf = buf.add(2);
|
||||
};
|
||||
|
||||
pos += len;
|
||||
remaining -= len;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// final cursor synchronization with the bytes container
|
||||
//
|
||||
// SAFETY: all the bytes written up to position "pos" are initialized
|
||||
// the written byte count and pointer advancement are kept in sync
|
||||
unsafe {
|
||||
dst.advance_mut(pos);
|
||||
}
|
||||
@ -477,7 +521,10 @@ impl<'a> io::Write for Writer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Callers must ensure that the given length matches given value length.
|
||||
unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) {
|
||||
debug_assert_eq!(value.len(), len);
|
||||
copy_nonoverlapping(value.as_ptr(), buf, len);
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ mod openssl {
|
||||
use super::*;
|
||||
|
||||
use actix_tls::openssl::{Acceptor, SslAcceptor, SslStream};
|
||||
use actix_tls::{openssl::HandshakeError, SslError};
|
||||
use actix_tls::{openssl::HandshakeError, TlsError};
|
||||
|
||||
impl<S, B, X, U> H1Service<SslStream<TcpStream>, S, B, X, U>
|
||||
where
|
||||
@ -126,19 +126,19 @@ mod openssl {
|
||||
Config = (),
|
||||
Request = TcpStream,
|
||||
Response = (),
|
||||
Error = SslError<HandshakeError<TcpStream>, DispatchError>,
|
||||
Error = TlsError<HandshakeError<TcpStream>, DispatchError>,
|
||||
InitError = (),
|
||||
> {
|
||||
pipeline_factory(
|
||||
Acceptor::new(acceptor)
|
||||
.map_err(SslError::Ssl)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!()),
|
||||
)
|
||||
.and_then(|io: SslStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().peer_addr().ok();
|
||||
ok((io, peer_addr))
|
||||
})
|
||||
.and_then(self.map_err(SslError::Service))
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -147,7 +147,7 @@ mod openssl {
|
||||
mod rustls {
|
||||
use super::*;
|
||||
use actix_tls::rustls::{Acceptor, ServerConfig, TlsStream};
|
||||
use actix_tls::SslError;
|
||||
use actix_tls::TlsError;
|
||||
use std::{fmt, io};
|
||||
|
||||
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||
@ -176,19 +176,19 @@ mod rustls {
|
||||
Config = (),
|
||||
Request = TcpStream,
|
||||
Response = (),
|
||||
Error = SslError<io::Error, DispatchError>,
|
||||
Error = TlsError<io::Error, DispatchError>,
|
||||
InitError = (),
|
||||
> {
|
||||
pipeline_factory(
|
||||
Acceptor::new(config)
|
||||
.map_err(SslError::Ssl)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!()),
|
||||
)
|
||||
.and_then(|io: TlsStream<TcpStream>| {
|
||||
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||
ok((io, peer_addr))
|
||||
})
|
||||
.and_then(self.map_err(SslError::Service))
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -548,10 +548,12 @@ where
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[pin_project::pin_project]
|
||||
pub struct OneRequestServiceResponse<T>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite + Unpin,
|
||||
{
|
||||
#[pin]
|
||||
framed: Option<Framed<T, Codec>>,
|
||||
}
|
||||
|
||||
@ -562,16 +564,18 @@ where
|
||||
type Output = Result<(Request, Framed<T, Codec>), ParseError>;
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
match self.framed.as_mut().unwrap().next_item(cx) {
|
||||
Poll::Ready(Some(Ok(req))) => match req {
|
||||
let this = self.as_mut().project();
|
||||
|
||||
match ready!(this.framed.as_pin_mut().unwrap().next_item(cx)) {
|
||||
Some(Ok(req)) => match req {
|
||||
Message::Item(req) => {
|
||||
Poll::Ready(Ok((req, self.framed.take().unwrap())))
|
||||
let mut this = self.as_mut().project();
|
||||
Poll::Ready(Ok((req, this.framed.take().unwrap())))
|
||||
}
|
||||
Message::Chunk(_) => unreachable!("Something is wrong"),
|
||||
},
|
||||
Poll::Ready(Some(Err(err))) => Poll::Ready(Err(err)),
|
||||
Poll::Ready(None) => Poll::Ready(Err(ParseError::Incomplete)),
|
||||
Poll::Pending => Poll::Pending,
|
||||
Some(Err(err)) => Poll::Ready(Err(err)),
|
||||
None => Poll::Ready(Err(ParseError::Incomplete)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -9,12 +9,13 @@ use crate::error::Error;
|
||||
use crate::h1::{Codec, Message};
|
||||
use crate::response::Response;
|
||||
|
||||
/// Send http/1 response
|
||||
/// Send HTTP/1 response
|
||||
#[pin_project::pin_project]
|
||||
pub struct SendResponse<T, B> {
|
||||
res: Option<Message<(Response<()>, BodySize)>>,
|
||||
#[pin]
|
||||
body: Option<ResponseBody<B>>,
|
||||
#[pin]
|
||||
framed: Option<Framed<T, Codec>>,
|
||||
}
|
||||
|
||||
@ -35,23 +36,30 @@ where
|
||||
|
||||
impl<T, B> Future for SendResponse<T, B>
|
||||
where
|
||||
T: AsyncRead + AsyncWrite,
|
||||
T: AsyncRead + AsyncWrite + Unpin,
|
||||
B: MessageBody + Unpin,
|
||||
{
|
||||
type Output = Result<Framed<T, Codec>, Error>;
|
||||
|
||||
// TODO: rethink if we need loops in polls
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let mut this = self.project();
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let mut this = self.as_mut().project();
|
||||
|
||||
let mut body_done = this.body.is_none();
|
||||
loop {
|
||||
let mut body_ready = !body_done;
|
||||
let framed = this.framed.as_mut().unwrap();
|
||||
|
||||
// send body
|
||||
if this.res.is_none() && body_ready {
|
||||
while body_ready && !body_done && !framed.is_write_buf_full() {
|
||||
while body_ready
|
||||
&& !body_done
|
||||
&& !this
|
||||
.framed
|
||||
.as_ref()
|
||||
.as_pin_ref()
|
||||
.unwrap()
|
||||
.is_write_buf_full()
|
||||
{
|
||||
match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx)? {
|
||||
Poll::Ready(item) => {
|
||||
// body is done when item is None
|
||||
@ -59,6 +67,7 @@ where
|
||||
if body_done {
|
||||
let _ = this.body.take();
|
||||
}
|
||||
let framed = this.framed.as_mut().as_pin_mut().unwrap();
|
||||
framed.write(Message::Chunk(item))?;
|
||||
}
|
||||
Poll::Pending => body_ready = false,
|
||||
@ -66,6 +75,8 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
let framed = this.framed.as_mut().as_pin_mut().unwrap();
|
||||
|
||||
// flush write buffer
|
||||
if !framed.is_write_buf_empty() {
|
||||
match framed.flush(cx)? {
|
||||
@ -96,6 +107,9 @@ where
|
||||
break;
|
||||
}
|
||||
}
|
||||
Poll::Ready(Ok(this.framed.take().unwrap()))
|
||||
|
||||
let framed = this.framed.take().unwrap();
|
||||
|
||||
Poll::Ready(Ok(framed))
|
||||
}
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ struct ServiceResponse<F, I, E, B> {
|
||||
_t: PhantomData<(I, E)>,
|
||||
}
|
||||
|
||||
#[pin_project::pin_project]
|
||||
#[pin_project::pin_project(project = ServiceResponseStateProj)]
|
||||
enum ServiceResponseState<F, B> {
|
||||
ServiceCall(#[pin] F, Option<SendResponse<Bytes>>),
|
||||
SendPayload(SendStream<Bytes>, #[pin] ResponseBody<B>),
|
||||
@ -227,9 +227,11 @@ where
|
||||
if !has_date {
|
||||
let mut bytes = BytesMut::with_capacity(29);
|
||||
self.config.set_date_header(&mut bytes);
|
||||
res.headers_mut().insert(DATE, unsafe {
|
||||
HeaderValue::from_maybe_shared_unchecked(bytes.freeze())
|
||||
});
|
||||
res.headers_mut().insert(
|
||||
DATE,
|
||||
// SAFETY: serialized date-times are known ASCII strings
|
||||
unsafe { HeaderValue::from_maybe_shared_unchecked(bytes.freeze()) },
|
||||
);
|
||||
}
|
||||
|
||||
res
|
||||
@ -245,13 +247,11 @@ where
|
||||
{
|
||||
type Output = ();
|
||||
|
||||
#[pin_project::project]
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let mut this = self.as_mut().project();
|
||||
|
||||
#[project]
|
||||
match this.state.project() {
|
||||
ServiceResponseState::ServiceCall(call, send) => match call.poll(cx) {
|
||||
ServiceResponseStateProj::ServiceCall(call, send) => match call.poll(cx) {
|
||||
Poll::Ready(Ok(res)) => {
|
||||
let (res, body) = res.into().replace_body(());
|
||||
|
||||
@ -305,55 +305,59 @@ where
|
||||
}
|
||||
}
|
||||
},
|
||||
ServiceResponseState::SendPayload(ref mut stream, ref mut body) => loop {
|
||||
ServiceResponseStateProj::SendPayload(ref mut stream, ref mut body) => {
|
||||
loop {
|
||||
if let Some(ref mut buffer) = this.buffer {
|
||||
match stream.poll_capacity(cx) {
|
||||
Poll::Pending => return Poll::Pending,
|
||||
Poll::Ready(None) => return Poll::Ready(()),
|
||||
Poll::Ready(Some(Ok(cap))) => {
|
||||
let len = buffer.len();
|
||||
let bytes = buffer.split_to(std::cmp::min(cap, len));
|
||||
loop {
|
||||
if let Some(ref mut buffer) = this.buffer {
|
||||
match stream.poll_capacity(cx) {
|
||||
Poll::Pending => return Poll::Pending,
|
||||
Poll::Ready(None) => return Poll::Ready(()),
|
||||
Poll::Ready(Some(Ok(cap))) => {
|
||||
let len = buffer.len();
|
||||
let bytes = buffer.split_to(std::cmp::min(cap, len));
|
||||
|
||||
if let Err(e) = stream.send_data(bytes, false) {
|
||||
if let Err(e) = stream.send_data(bytes, false) {
|
||||
warn!("{:?}", e);
|
||||
return Poll::Ready(());
|
||||
} else if !buffer.is_empty() {
|
||||
let cap =
|
||||
std::cmp::min(buffer.len(), CHUNK_SIZE);
|
||||
stream.reserve_capacity(cap);
|
||||
} else {
|
||||
this.buffer.take();
|
||||
}
|
||||
}
|
||||
Poll::Ready(Some(Err(e))) => {
|
||||
warn!("{:?}", e);
|
||||
return Poll::Ready(());
|
||||
} else if !buffer.is_empty() {
|
||||
let cap = std::cmp::min(buffer.len(), CHUNK_SIZE);
|
||||
stream.reserve_capacity(cap);
|
||||
} else {
|
||||
this.buffer.take();
|
||||
}
|
||||
}
|
||||
Poll::Ready(Some(Err(e))) => {
|
||||
warn!("{:?}", e);
|
||||
return Poll::Ready(());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match body.as_mut().poll_next(cx) {
|
||||
Poll::Pending => return Poll::Pending,
|
||||
Poll::Ready(None) => {
|
||||
if let Err(e) = stream.send_data(Bytes::new(), true) {
|
||||
warn!("{:?}", e);
|
||||
} else {
|
||||
match body.as_mut().poll_next(cx) {
|
||||
Poll::Pending => return Poll::Pending,
|
||||
Poll::Ready(None) => {
|
||||
if let Err(e) = stream.send_data(Bytes::new(), true)
|
||||
{
|
||||
warn!("{:?}", e);
|
||||
}
|
||||
return Poll::Ready(());
|
||||
}
|
||||
Poll::Ready(Some(Ok(chunk))) => {
|
||||
stream.reserve_capacity(std::cmp::min(
|
||||
chunk.len(),
|
||||
CHUNK_SIZE,
|
||||
));
|
||||
*this.buffer = Some(chunk);
|
||||
}
|
||||
Poll::Ready(Some(Err(e))) => {
|
||||
error!("Response payload stream error: {:?}", e);
|
||||
return Poll::Ready(());
|
||||
}
|
||||
return Poll::Ready(());
|
||||
}
|
||||
Poll::Ready(Some(Ok(chunk))) => {
|
||||
stream.reserve_capacity(std::cmp::min(
|
||||
chunk.len(),
|
||||
CHUNK_SIZE,
|
||||
));
|
||||
*this.buffer = Some(chunk);
|
||||
}
|
||||
Poll::Ready(Some(Err(e))) => {
|
||||
error!("Response payload stream error: {:?}", e);
|
||||
return Poll::Ready(());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ where
|
||||
mod openssl {
|
||||
use actix_service::{fn_factory, fn_service};
|
||||
use actix_tls::openssl::{Acceptor, SslAcceptor, SslStream};
|
||||
use actix_tls::{openssl::HandshakeError, SslError};
|
||||
use actix_tls::{openssl::HandshakeError, TlsError};
|
||||
|
||||
use super::*;
|
||||
|
||||
@ -117,12 +117,12 @@ mod openssl {
|
||||
Config = (),
|
||||
Request = TcpStream,
|
||||
Response = (),
|
||||
Error = SslError<HandshakeError<TcpStream>, DispatchError>,
|
||||
Error = TlsError<HandshakeError<TcpStream>, DispatchError>,
|
||||
InitError = S::InitError,
|
||||
> {
|
||||
pipeline_factory(
|
||||
Acceptor::new(acceptor)
|
||||
.map_err(SslError::Ssl)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!()),
|
||||
)
|
||||
.and_then(fn_factory(|| {
|
||||
@ -131,7 +131,7 @@ mod openssl {
|
||||
ok((io, peer_addr))
|
||||
}))
|
||||
}))
|
||||
.and_then(self.map_err(SslError::Service))
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -140,7 +140,7 @@ mod openssl {
|
||||
mod rustls {
|
||||
use super::*;
|
||||
use actix_tls::rustls::{Acceptor, ServerConfig, TlsStream};
|
||||
use actix_tls::SslError;
|
||||
use actix_tls::TlsError;
|
||||
use std::io;
|
||||
|
||||
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||
@ -159,7 +159,7 @@ mod rustls {
|
||||
Config = (),
|
||||
Request = TcpStream,
|
||||
Response = (),
|
||||
Error = SslError<io::Error, DispatchError>,
|
||||
Error = TlsError<io::Error, DispatchError>,
|
||||
InitError = S::InitError,
|
||||
> {
|
||||
let protos = vec!["h2".to_string().into()];
|
||||
@ -167,7 +167,7 @@ mod rustls {
|
||||
|
||||
pipeline_factory(
|
||||
Acceptor::new(config)
|
||||
.map_err(SslError::Ssl)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!()),
|
||||
)
|
||||
.and_then(fn_factory(|| {
|
||||
@ -176,7 +176,7 @@ mod rustls {
|
||||
ok((io, peer_addr))
|
||||
}))
|
||||
}))
|
||||
.and_then(self.map_err(SslError::Service))
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use http::Method;
|
||||
use http::header;
|
||||
use http::Method;
|
||||
|
||||
header! {
|
||||
/// `Allow` header, defined in [RFC7231](http://tools.ietf.org/html/rfc7231#section-7.4.1)
|
||||
|
@ -283,11 +283,11 @@ impl DispositionParam {
|
||||
/// Some("\u{1f600}.svg".as_bytes()));
|
||||
/// ```
|
||||
///
|
||||
/// # WARN
|
||||
/// # Security Note
|
||||
///
|
||||
/// If "filename" parameter is supplied, do not use the file name blindly, check and possibly
|
||||
/// change to match local file system conventions if applicable, and do not use directory path
|
||||
/// information that may be present. See [RFC2183](https://tools.ietf.org/html/rfc2183#section-2.3)
|
||||
/// .
|
||||
/// information that may be present. See [RFC2183](https://tools.ietf.org/html/rfc2183#section-2.3).
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct ContentDisposition {
|
||||
/// The disposition type
|
||||
@ -387,26 +387,17 @@ impl ContentDisposition {
|
||||
|
||||
/// Returns `true` if it is [`Inline`](DispositionType::Inline).
|
||||
pub fn is_inline(&self) -> bool {
|
||||
match self.disposition {
|
||||
DispositionType::Inline => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.disposition, DispositionType::Inline)
|
||||
}
|
||||
|
||||
/// Returns `true` if it is [`Attachment`](DispositionType::Attachment).
|
||||
pub fn is_attachment(&self) -> bool {
|
||||
match self.disposition {
|
||||
DispositionType::Attachment => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.disposition, DispositionType::Attachment)
|
||||
}
|
||||
|
||||
/// Returns `true` if it is [`FormData`](DispositionType::FormData).
|
||||
pub fn is_form_data(&self) -> bool {
|
||||
match self.disposition {
|
||||
DispositionType::FormData => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.disposition, DispositionType::FormData)
|
||||
}
|
||||
|
||||
/// Returns `true` if it is [`Ext`](DispositionType::Ext) and the `disp_type` matches.
|
||||
|
@ -9,11 +9,13 @@
|
||||
|
||||
pub use self::accept_charset::AcceptCharset;
|
||||
//pub use self::accept_encoding::AcceptEncoding;
|
||||
pub use self::accept_language::AcceptLanguage;
|
||||
pub use self::accept::Accept;
|
||||
pub use self::accept_language::AcceptLanguage;
|
||||
pub use self::allow::Allow;
|
||||
pub use self::cache_control::{CacheControl, CacheDirective};
|
||||
pub use self::content_disposition::{ContentDisposition, DispositionType, DispositionParam};
|
||||
pub use self::content_disposition::{
|
||||
ContentDisposition, DispositionParam, DispositionType,
|
||||
};
|
||||
pub use self::content_language::ContentLanguage;
|
||||
pub use self::content_range::{ContentRange, ContentRangeSpec};
|
||||
pub use self::content_type::ContentType;
|
||||
@ -47,7 +49,7 @@ macro_rules! __hyper__deref {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
@ -74,8 +76,8 @@ macro_rules! test_header {
|
||||
($id:ident, $raw:expr) => {
|
||||
#[test]
|
||||
fn $id() {
|
||||
use $crate::test;
|
||||
use super::*;
|
||||
use $crate::test;
|
||||
|
||||
let raw = $raw;
|
||||
let a: Vec<Vec<u8>> = raw.iter().map(|x| x.to_vec()).collect();
|
||||
@ -118,7 +120,7 @@ macro_rules! test_header {
|
||||
// Test formatting
|
||||
if typed.is_some() {
|
||||
let raw = &($raw)[..];
|
||||
let mut iter = raw.iter().map(|b|str::from_utf8(&b[..]).unwrap());
|
||||
let mut iter = raw.iter().map(|b| str::from_utf8(&b[..]).unwrap());
|
||||
let mut joined = String::new();
|
||||
joined.push_str(iter.next().unwrap());
|
||||
for s in iter {
|
||||
@ -128,7 +130,7 @@ macro_rules! test_header {
|
||||
assert_eq!(format!("{}", typed.unwrap()), joined);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
@ -330,11 +332,10 @@ macro_rules! header {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
mod accept_charset;
|
||||
//mod accept_encoding;
|
||||
mod accept_language;
|
||||
mod accept;
|
||||
mod accept_language;
|
||||
mod allow;
|
||||
mod cache_control;
|
||||
mod content_disposition;
|
||||
|
@ -148,10 +148,7 @@ impl ContentEncoding {
|
||||
#[inline]
|
||||
/// Is the content compressed?
|
||||
pub fn is_compression(self) -> bool {
|
||||
match self {
|
||||
ContentEncoding::Identity | ContentEncoding::Auto => false,
|
||||
_ => true,
|
||||
}
|
||||
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -167,7 +167,6 @@ where
|
||||
mod tests {
|
||||
use bytes::Bytes;
|
||||
use encoding_rs::ISO_8859_2;
|
||||
use mime;
|
||||
|
||||
use super::*;
|
||||
use crate::test::TestRequest;
|
||||
|
@ -1,5 +1,6 @@
|
||||
//! Basic http primitives for actix-net framework.
|
||||
#![warn(rust_2018_idioms, warnings)]
|
||||
|
||||
#![deny(rust_2018_idioms)]
|
||||
#![allow(
|
||||
clippy::type_complexity,
|
||||
clippy::too_many_arguments,
|
||||
@ -32,7 +33,7 @@ mod response;
|
||||
mod service;
|
||||
mod time_parser;
|
||||
|
||||
pub mod cookie;
|
||||
pub use cookie;
|
||||
pub mod error;
|
||||
pub mod h1;
|
||||
pub mod h2;
|
||||
|
@ -38,7 +38,7 @@ macro_rules! downcast {
|
||||
/// Downcasts generic body to a specific type.
|
||||
pub fn downcast_ref<T: $name + 'static>(&self) -> Option<&T> {
|
||||
if self.__private_get_type_id__().0 == std::any::TypeId::of::<T>() {
|
||||
// Safety: external crates cannot override the default
|
||||
// SAFETY: external crates cannot override the default
|
||||
// implementation of `__private_get_type_id__`, since
|
||||
// it requires returning a private type. We can therefore
|
||||
// rely on the returned `TypeId`, which ensures that this
|
||||
@ -48,10 +48,11 @@ macro_rules! downcast {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Downcasts a generic body to a mutable specific type.
|
||||
pub fn downcast_mut<T: $name + 'static>(&mut self) -> Option<&mut T> {
|
||||
if self.__private_get_type_id__().0 == std::any::TypeId::of::<T>() {
|
||||
// Safety: external crates cannot override the default
|
||||
// SAFETY: external crates cannot override the default
|
||||
// implementation of `__private_get_type_id__`, since
|
||||
// it requires returning a private type. We can therefore
|
||||
// rely on the returned `TypeId`, which ensures that this
|
||||
@ -86,7 +87,7 @@ mod tests {
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast");
|
||||
let body = &mut resp_body.downcast_mut::<String>().unwrap();
|
||||
body.push_str("!");
|
||||
body.push('!');
|
||||
let body = resp_body.downcast_ref::<String>().unwrap();
|
||||
assert_eq!(body, "hello cast!");
|
||||
let not_body = resp_body.downcast_ref::<()>();
|
||||
|
@ -877,7 +877,7 @@ mod tests {
|
||||
.domain("www.rust-lang.org")
|
||||
.path("/test")
|
||||
.http_only(true)
|
||||
.max_age_time(time::Duration::days(1))
|
||||
.max_age(time::Duration::days(1))
|
||||
.finish(),
|
||||
)
|
||||
.del_cookie(&cookies[1])
|
||||
|
@ -10,7 +10,7 @@ use bytes::Bytes;
|
||||
use futures_core::{ready, Future};
|
||||
use futures_util::future::ok;
|
||||
use h2::server::{self, Handshake};
|
||||
use pin_project::{pin_project, project};
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::body::MessageBody;
|
||||
use crate::builder::HttpServiceBuilder;
|
||||
@ -195,7 +195,7 @@ where
|
||||
mod openssl {
|
||||
use super::*;
|
||||
use actix_tls::openssl::{Acceptor, SslAcceptor, SslStream};
|
||||
use actix_tls::{openssl::HandshakeError, SslError};
|
||||
use actix_tls::{openssl::HandshakeError, TlsError};
|
||||
|
||||
impl<S, B, X, U> HttpService<SslStream<TcpStream>, S, B, X, U>
|
||||
where
|
||||
@ -226,12 +226,12 @@ mod openssl {
|
||||
Config = (),
|
||||
Request = TcpStream,
|
||||
Response = (),
|
||||
Error = SslError<HandshakeError<TcpStream>, DispatchError>,
|
||||
Error = TlsError<HandshakeError<TcpStream>, DispatchError>,
|
||||
InitError = (),
|
||||
> {
|
||||
pipeline_factory(
|
||||
Acceptor::new(acceptor)
|
||||
.map_err(SslError::Ssl)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!()),
|
||||
)
|
||||
.and_then(|io: SslStream<TcpStream>| {
|
||||
@ -247,7 +247,7 @@ mod openssl {
|
||||
let peer_addr = io.get_ref().peer_addr().ok();
|
||||
ok((io, proto, peer_addr))
|
||||
})
|
||||
.and_then(self.map_err(SslError::Service))
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -256,7 +256,7 @@ mod openssl {
|
||||
mod rustls {
|
||||
use super::*;
|
||||
use actix_tls::rustls::{Acceptor, ServerConfig, Session, TlsStream};
|
||||
use actix_tls::SslError;
|
||||
use actix_tls::TlsError;
|
||||
use std::io;
|
||||
|
||||
impl<S, B, X, U> HttpService<TlsStream<TcpStream>, S, B, X, U>
|
||||
@ -288,7 +288,7 @@ mod rustls {
|
||||
Config = (),
|
||||
Request = TcpStream,
|
||||
Response = (),
|
||||
Error = SslError<io::Error, DispatchError>,
|
||||
Error = TlsError<io::Error, DispatchError>,
|
||||
InitError = (),
|
||||
> {
|
||||
let protos = vec!["h2".to_string().into(), "http/1.1".to_string().into()];
|
||||
@ -296,7 +296,7 @@ mod rustls {
|
||||
|
||||
pipeline_factory(
|
||||
Acceptor::new(config)
|
||||
.map_err(SslError::Ssl)
|
||||
.map_err(TlsError::Tls)
|
||||
.map_init_err(|_| panic!()),
|
||||
)
|
||||
.and_then(|io: TlsStream<TcpStream>| {
|
||||
@ -312,7 +312,7 @@ mod rustls {
|
||||
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||
ok((io, proto, peer_addr))
|
||||
})
|
||||
.and_then(self.map_err(SslError::Service))
|
||||
.and_then(self.map_err(TlsError::Service))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -574,7 +574,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
#[pin_project]
|
||||
#[pin_project(project = StateProj)]
|
||||
enum State<T, S, B, X, U>
|
||||
where
|
||||
S: Service<Request = Request>,
|
||||
@ -650,16 +650,14 @@ where
|
||||
U: Service<Request = (Request, Framed<T, h1::Codec>), Response = ()>,
|
||||
U::Error: fmt::Display,
|
||||
{
|
||||
#[project]
|
||||
fn poll(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Result<(), DispatchError>> {
|
||||
#[project]
|
||||
match self.as_mut().project() {
|
||||
State::H1(disp) => disp.poll(cx),
|
||||
State::H2(disp) => disp.poll(cx),
|
||||
State::H2Handshake(ref mut data) => {
|
||||
StateProj::H1(disp) => disp.poll(cx),
|
||||
StateProj::H2(disp) => disp.poll(cx),
|
||||
StateProj::H2Handshake(ref mut data) => {
|
||||
let conn = if let Some(ref mut item) = data {
|
||||
match Pin::new(&mut item.0).poll(cx) {
|
||||
Poll::Ready(Ok(conn)) => conn,
|
||||
|
@ -1,6 +1,5 @@
|
||||
//! Test Various helpers for Actix applications to use during testing.
|
||||
use std::convert::TryFrom;
|
||||
use std::fmt::Write as FmtWrite;
|
||||
use std::io::{self, Read, Write};
|
||||
use std::pin::Pin;
|
||||
use std::str::FromStr;
|
||||
@ -10,9 +9,8 @@ use actix_codec::{AsyncRead, AsyncWrite};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::header::{self, HeaderName, HeaderValue};
|
||||
use http::{Error as HttpError, Method, Uri, Version};
|
||||
use percent_encoding::percent_encode;
|
||||
|
||||
use crate::cookie::{Cookie, CookieJar, USERINFO};
|
||||
use crate::cookie::{Cookie, CookieJar};
|
||||
use crate::header::HeaderMap;
|
||||
use crate::header::{Header, IntoHeaderValue};
|
||||
use crate::payload::Payload;
|
||||
@ -163,17 +161,17 @@ impl TestRequest {
|
||||
head.version = inner.version;
|
||||
head.headers = inner.headers;
|
||||
|
||||
let mut cookie = String::new();
|
||||
for c in inner.cookies.delta() {
|
||||
let name = percent_encode(c.name().as_bytes(), USERINFO);
|
||||
let value = percent_encode(c.value().as_bytes(), USERINFO);
|
||||
let _ = write!(&mut cookie, "; {}={}", name, value);
|
||||
}
|
||||
let cookie: String = inner
|
||||
.cookies
|
||||
.delta()
|
||||
// ensure only name=value is written to cookie header
|
||||
.map(|c| Cookie::new(c.name(), c.value()).encoded().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("; ");
|
||||
|
||||
if !cookie.is_empty() {
|
||||
head.headers.insert(
|
||||
header::COOKIE,
|
||||
HeaderValue::from_str(&cookie.as_str()[2..]).unwrap(),
|
||||
);
|
||||
head.headers
|
||||
.insert(header::COOKIE, HeaderValue::from_str(&cookie).unwrap());
|
||||
}
|
||||
|
||||
req
|
||||
|
@ -91,8 +91,7 @@ impl Codec {
|
||||
}
|
||||
}
|
||||
|
||||
impl Encoder for Codec {
|
||||
type Item = Message;
|
||||
impl Encoder<Message> for Codec {
|
||||
type Error = ProtocolError;
|
||||
|
||||
fn encode(&mut self, item: Message, dst: &mut BytesMut) -> Result<(), Self::Error> {
|
||||
|
@ -4,16 +4,18 @@ use std::task::{Context, Poll};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use actix_service::{IntoService, Service};
|
||||
use actix_utils::framed;
|
||||
use actix_utils::dispatcher::{Dispatcher as InnerDispatcher, DispatcherError};
|
||||
|
||||
use super::{Codec, Frame, Message};
|
||||
|
||||
#[pin_project::pin_project]
|
||||
pub struct Dispatcher<S, T>
|
||||
where
|
||||
S: Service<Request = Frame, Response = Message> + 'static,
|
||||
T: AsyncRead + AsyncWrite,
|
||||
{
|
||||
inner: framed::Dispatcher<S, T, Codec>,
|
||||
#[pin]
|
||||
inner: InnerDispatcher<S, T, Codec, Message>,
|
||||
}
|
||||
|
||||
impl<S, T> Dispatcher<S, T>
|
||||
@ -25,13 +27,13 @@ where
|
||||
{
|
||||
pub fn new<F: IntoService<S>>(io: T, service: F) -> Self {
|
||||
Dispatcher {
|
||||
inner: framed::Dispatcher::new(Framed::new(io, Codec::new()), service),
|
||||
inner: InnerDispatcher::new(Framed::new(io, Codec::new()), service),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with<F: IntoService<S>>(framed: Framed<T, Codec>, service: F) -> Self {
|
||||
Dispatcher {
|
||||
inner: framed::Dispatcher::new(framed, service),
|
||||
inner: InnerDispatcher::new(framed, service),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -43,9 +45,9 @@ where
|
||||
S::Future: 'static,
|
||||
S::Error: 'static,
|
||||
{
|
||||
type Output = Result<(), framed::DispatcherError<S::Error, Codec>>;
|
||||
type Output = Result<(), DispatcherError<S::Error, Codec, Message>>;
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
Pin::new(&mut self.inner).poll(cx)
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
self.project().inner.poll(cx)
|
||||
}
|
||||
}
|
||||
|
@ -229,10 +229,7 @@ mod tests {
|
||||
fn is_none(
|
||||
frm: &Result<Option<(bool, OpCode, Option<BytesMut>)>, ProtocolError>,
|
||||
) -> bool {
|
||||
match *frm {
|
||||
Ok(None) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(*frm, Ok(None))
|
||||
}
|
||||
|
||||
fn extract(
|
||||
|
@ -7,6 +7,8 @@ use std::slice;
|
||||
struct ShortSlice<'a>(&'a mut [u8]);
|
||||
|
||||
impl<'a> ShortSlice<'a> {
|
||||
/// # Safety
|
||||
/// Given slice must be shorter than 8 bytes.
|
||||
unsafe fn new(slice: &'a mut [u8]) -> Self {
|
||||
// Sanity check for debug builds
|
||||
debug_assert!(slice.len() < 8);
|
||||
@ -46,13 +48,13 @@ pub(crate) fn apply_mask(buf: &mut [u8], mask_u32: u32) {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
// TODO: copy_nonoverlapping here compiles to call memcpy. While it is not so
|
||||
// inefficient, it could be done better. The compiler does not understand that
|
||||
// a `ShortSlice` must be smaller than a u64.
|
||||
#[inline]
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn xor_short(buf: ShortSlice<'_>, mask: u64) {
|
||||
// Unsafe: we know that a `ShortSlice` fits in a u64
|
||||
// SAFETY: we know that a `ShortSlice` fits in a u64
|
||||
unsafe {
|
||||
let (ptr, len) = (buf.0.as_mut_ptr(), buf.0.len());
|
||||
let mut b: u64 = 0;
|
||||
@ -64,8 +66,9 @@ fn xor_short(buf: ShortSlice<'_>, mask: u64) {
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Caller must ensure the buffer has the correct size and alignment.
|
||||
#[inline]
|
||||
// Unsafe: caller must ensure the buffer has the correct size and alignment
|
||||
unsafe fn cast_slice(buf: &mut [u8]) -> &mut [u64] {
|
||||
// Assert correct size and alignment in debug builds
|
||||
debug_assert!(buf.len().trailing_zeros() >= 3);
|
||||
@ -74,9 +77,9 @@ unsafe fn cast_slice(buf: &mut [u8]) -> &mut [u64] {
|
||||
slice::from_raw_parts_mut(buf.as_mut_ptr() as *mut u64, buf.len() >> 3)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
// Splits a slice into three parts: an unaligned short head and tail, plus an aligned
|
||||
// u64 mid section.
|
||||
#[inline]
|
||||
fn align_buf(buf: &mut [u8]) -> (ShortSlice<'_>, &mut [u64], ShortSlice<'_>) {
|
||||
let start_ptr = buf.as_ptr() as usize;
|
||||
let end_ptr = start_ptr + buf.len();
|
||||
@ -91,13 +94,13 @@ fn align_buf(buf: &mut [u8]) -> (ShortSlice<'_>, &mut [u64], ShortSlice<'_>) {
|
||||
let (tmp, tail) = buf.split_at_mut(end_aligned - start_ptr);
|
||||
let (head, mid) = tmp.split_at_mut(start_aligned - start_ptr);
|
||||
|
||||
// Unsafe: we know the middle section is correctly aligned, and the outer
|
||||
// SAFETY: we know the middle section is correctly aligned, and the outer
|
||||
// sections are smaller than 8 bytes
|
||||
unsafe { (ShortSlice::new(head), cast_slice(mid), ShortSlice(tail)) }
|
||||
} else {
|
||||
// We didn't cross even one aligned boundary!
|
||||
|
||||
// Unsafe: The outer sections are smaller than 8 bytes
|
||||
// SAFETY: The outer sections are smaller than 8 bytes
|
||||
unsafe { (ShortSlice::new(buf), &mut [], ShortSlice::new(&mut [])) }
|
||||
}
|
||||
}
|
||||
@ -139,7 +142,7 @@ mod tests {
|
||||
let mut masked = unmasked.clone();
|
||||
apply_mask_fallback(&mut masked[1..], &mask);
|
||||
|
||||
let mut masked_fast = unmasked.clone();
|
||||
let mut masked_fast = unmasked;
|
||||
apply_mask(&mut masked_fast[1..], mask_u32);
|
||||
|
||||
assert_eq!(masked, masked_fast);
|
||||
|
@ -208,10 +208,10 @@ pub fn hash_key(key: &[u8]) -> String {
|
||||
use sha1::Digest;
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
|
||||
hasher.input(key);
|
||||
hasher.input(WS_GUID.as_bytes());
|
||||
hasher.update(key);
|
||||
hasher.update(WS_GUID.as_bytes());
|
||||
|
||||
base64::encode(hasher.result().as_ref())
|
||||
base64::encode(&hasher.finalize())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -274,9 +274,7 @@ async fn test_h2_head_empty() {
|
||||
async fn test_h2_head_binary() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| {
|
||||
ok::<_, ()>(Response::Ok().body(STR))
|
||||
})
|
||||
.h2(|_| ok::<_, ()>(Response::Ok().body(STR)))
|
||||
.openssl(ssl_acceptor())
|
||||
.map_err(|_| ())
|
||||
})
|
||||
|
@ -280,9 +280,7 @@ async fn test_h2_head_empty() {
|
||||
async fn test_h2_head_binary() {
|
||||
let mut srv = test_server(move || {
|
||||
HttpService::build()
|
||||
.h2(|_| {
|
||||
ok::<_, ()>(Response::Ok().body(STR))
|
||||
})
|
||||
.h2(|_| ok::<_, ()>(Response::Ok().body(STR)))
|
||||
.rustls(ssl_acceptor())
|
||||
})
|
||||
.await;
|
||||
|
@ -489,9 +489,7 @@ async fn test_h1_head_empty() {
|
||||
async fn test_h1_head_binary() {
|
||||
let mut srv = test_server(|| {
|
||||
HttpService::build()
|
||||
.h1(|_| {
|
||||
ok::<_, ()>(Response::Ok().body(STR))
|
||||
})
|
||||
.h1(|_| ok::<_, ()>(Response::Ok().body(STR)))
|
||||
.tcp()
|
||||
})
|
||||
.await;
|
||||
|
@ -8,7 +8,7 @@ use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use actix_http::{body, h1, ws, Error, HttpService, Request, Response};
|
||||
use actix_http_test::test_server;
|
||||
use actix_service::{fn_factory, Service};
|
||||
use actix_utils::framed::Dispatcher;
|
||||
use actix_utils::dispatcher::Dispatcher;
|
||||
use bytes::Bytes;
|
||||
use futures_util::future;
|
||||
use futures_util::task::{Context, Poll};
|
||||
@ -59,7 +59,7 @@ where
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Dispatcher::new(framed.into_framed(ws::Codec::new()), service)
|
||||
Dispatcher::new(framed.replace_codec(ws::Codec::new()), service)
|
||||
.await
|
||||
.map_err(|_| panic!())
|
||||
};
|
||||
|
@ -1,11 +0,0 @@
|
||||
# Identity service for actix web framework [](https://travis-ci.org/actix/actix-web) [](https://codecov.io/gh/actix/actix-web) [](https://crates.io/crates/actix-identity) [](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
**This crate moved to https://github.com/actix/actix-extras.**
|
||||
|
||||
## Documentation & community resources
|
||||
|
||||
* [User Guide](https://actix.rs/docs/)
|
||||
* [API Documentation](https://docs.rs/actix-identity/)
|
||||
* [Chat on gitter](https://gitter.im/actix/actix)
|
||||
* Cargo package: [actix-session](https://crates.io/crates/actix-identity)
|
||||
* Minimum supported Rust version: 1.34 or later
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user