1
0
mirror of https://github.com/fafhrd91/actix-web synced 2025-07-04 01:51:30 +02:00

Compare commits

..

21 Commits

Author SHA1 Message Date
d92d53a4f5 Merge pull request #1541 from JohnTitor/backport-files
files: Bump up to 0.2.2
2020-05-27 00:37:29 +09:00
55d79cc1b2 files: Bump up to 0.2.2 2020-05-26 17:22:16 +09:00
a8117183bb files: Minimize futures dependencies 2020-05-26 17:19:12 +09:00
60dcfd1aff Actix-files will always send SizedStream (#1496)
* Fixes #1384

* There is no need to set no_chunking

* Test content-length for static files

* Update the tests

* Add Changelog

* Try to simply fix Windows test issues!

Co-authored-by: Rob Ede <robjtede@icloud.com>
2020-05-26 17:18:47 +09:00
7870165da2 Fix spelling error 2020-05-26 17:14:33 +09:00
6560a2285f Suppress warning 2020-05-26 17:13:35 +09:00
12eccf40e1 Merge pull request #1539 from JohnTitor/framed-0.3.1
framed: Bump up to 0.3.1 with deprecation notice
2020-05-25 18:58:32 +09:00
7a9c0f632d framed: Bump up to 0.3.1 with deprecation notice 2020-05-25 18:20:55 +09:00
7baa35a897 Merge pull request #1526 from actix/backport-gha
2.0: Tweak CI config
2020-05-21 15:04:28 +09:00
ea0d748a5e Remove Travis and AppVeyor config 2020-05-21 12:07:54 +09:00
bc4f98321a 2.0: Tweak GHA config 2020-05-21 12:06:05 +09:00
2ddbe2b15c Merge pull request #1400 from JohnTitor/actions-2.0
[2.0 backport] Tweaks for GitHub Actions
2020-03-08 03:04:31 +09:00
87378a7410 Demote lint level to warn 2020-03-08 02:23:55 +09:00
ad0d809af6 Disable coverage for PRs 2020-03-08 02:03:08 +09:00
7b00c16d44 Add some Actions workflows 2020-03-08 02:02:52 +09:00
155a6db4cd More ignore test causes timeout 2020-03-08 02:02:15 +09:00
228e886986 Don't use cache in Windows CI (#1327) 2020-03-08 02:01:59 +09:00
4aa4b5f928 Tweak caches (#1319)
* Try to use `cargo-cache`

* Tweak issue template
2020-03-08 02:01:45 +09:00
54ff97470e Initial Issue template (#1311)
* Initial Issue template

* First round of changes for the bug report

Co-authored-by: Yuki Okushi <huyuumi.dev@gmail.com>
2020-03-08 02:01:31 +09:00
f02ad2913f Fix vcpkg cache (#1312) 2020-03-08 02:01:17 +09:00
96b0dfeac3 Tweak actions (#1305)
* Add benchmark action

* Fix Windows build
2020-03-08 02:00:58 +09:00
62 changed files with 376 additions and 1237 deletions

View File

@ -1,41 +0,0 @@
environment:
global:
PROJECT_NAME: actix-web
matrix:
# Stable channel
- TARGET: i686-pc-windows-msvc
CHANNEL: stable
- TARGET: x86_64-pc-windows-gnu
CHANNEL: stable
- TARGET: x86_64-pc-windows-msvc
CHANNEL: stable
# Nightly channel
- TARGET: i686-pc-windows-msvc
CHANNEL: nightly
- TARGET: x86_64-pc-windows-gnu
CHANNEL: nightly
- TARGET: x86_64-pc-windows-msvc
CHANNEL: nightly
# Install Rust and Cargo
# (Based on from https://github.com/rust-lang/libc/blob/master/appveyor.yml)
install:
- ps: >-
If ($Env:TARGET -eq 'x86_64-pc-windows-gnu') {
$Env:PATH += ';C:\msys64\mingw64\bin'
} ElseIf ($Env:TARGET -eq 'i686-pc-windows-gnu') {
$Env:PATH += ';C:\MinGW\bin'
}
- curl -sSf -o rustup-init.exe https://win.rustup.rs
- rustup-init.exe --default-host %TARGET% --default-toolchain %CHANNEL% -y
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
- rustc -Vv
- cargo -V
# 'cargo test' takes care of building for us, so disable Appveyor's build stage.
build: false
# Equivalent to Travis' `script` phase
test_script:
- cargo clean
- cargo test --no-default-features --features="flate2-rust"

View File

@ -1,47 +0,0 @@
name: Benchmark (Linux)
on: [push, pull_request]
jobs:
check_benchmark:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with:
command: generate-lockfile
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: ~/.cargo/registry
key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-registry-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-index-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v1
with:
path: target
key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-build-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: Check benchmark
uses: actions-rs/cargo@v1
with:
command: bench
- name: Clear the cargo caches
run: |
cargo install cargo-cache --no-default-features --features ci-autoclean
cargo-cache

View File

@ -25,26 +25,6 @@ jobs:
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with:
command: generate-lockfile
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: ~/.cargo/registry
key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-registry-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-index-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v1
with:
path: target
key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-build-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: check build
uses: actions-rs/cargo@v1
with:
@ -71,20 +51,3 @@ jobs:
with:
command: test
args: --package=awc --no-default-features --features=rustls -- --nocapture
- name: Generate coverage file
if: matrix.version == 'stable' && github.ref == 'refs/heads/master'
run: |
cargo install cargo-tarpaulin
cargo tarpaulin --out Xml
- name: Upload to Codecov
if: matrix.version == 'stable' && github.ref == 'refs/heads/master'
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: cobertura.xml
- name: Clear the cargo caches
run: |
cargo install cargo-cache --no-default-features --features ci-autoclean
cargo-cache

View File

@ -24,26 +24,6 @@ jobs:
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with:
command: generate-lockfile
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: ~/.cargo/registry
key: ${{ matrix.version }}-x86_64-apple-darwin-cargo-registry-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ${{ matrix.version }}-x86_64-apple-darwin-cargo-index-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v1
with:
path: target
key: ${{ matrix.version }}-x86_64-apple-darwin-cargo-build-trimmed-${{ hashFiles('**/Cargo.lock') }}
- name: check build
uses: actions-rs/cargo@v1
with:
@ -57,8 +37,3 @@ jobs:
args: --all --all-features --no-fail-fast -- --nocapture
--skip=test_h2_content_length
--skip=test_reading_deflate_encoding_large_random_rustls
- name: Clear the cargo caches
run: |
cargo install cargo-cache --no-default-features --features ci-autoclean
cargo-cache

View File

@ -1,35 +0,0 @@
name: Upload documentation
on:
push:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
if: github.repository == 'actix/actix-web'
steps:
- uses: actions/checkout@master
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: check build
uses: actions-rs/cargo@v1
with:
command: doc
args: --no-deps --all-features
- name: Tweak HTML
run: echo "<meta http-equiv=refresh content=0;url=os_balloon/index.html>" > target/doc/index.html
- name: Upload documentation
run: |
git clone https://github.com/davisp/ghp-import.git
./ghp-import/ghp_import.py -n -p -f -m "Documentation upload" -r https://${{ secrets.GITHUB_TOKEN }}@github.com/"${{ github.repository }}.git" target/doc

View File

@ -35,7 +35,6 @@ jobs:
Copy-Item C:\vcpkg\installed\x64-windows\bin\libssl-1_1-x64.dll C:\vcpkg\installed\x64-windows\bin\libssl.dll
Get-ChildItem C:\vcpkg\installed\x64-windows\bin
Get-ChildItem C:\vcpkg\installed\x64-windows\lib
- name: check build
uses: actions-rs/cargo@v1
with:

View File

@ -1,15 +1,10 @@
# Changes
## [2.0.NEXT] - 2020-01-xx
### Changed
* Use `sha-1` crate instead of unmaintained `sha1` crate
* Skip empty chunks when returning response from a `Stream` #1308
* Update the `time` dependency to 0.2.7
* Use `sha-1` crate instead of unmaintained `sha1` crate
## [2.0.0] - 2019-12-25

View File

@ -87,7 +87,7 @@ regex = "1.3"
serde = { version = "1.0", features=["derive"] }
serde_json = "1.0"
serde_urlencoded = "0.6.1"
time = { version = "0.2.7", default-features = false, features = ["std"] }
time = "0.1.42"
url = "2.1"
open-ssl = { version="0.10", package = "openssl", optional = true }
rust-tls = { version = "0.16.0", package = "rustls", optional = true }
@ -99,7 +99,6 @@ env_logger = "0.6"
serde_derive = "1.0"
brotli2 = "0.3.2"
flate2 = "1.0.13"
criterion = "0.3"
[profile.release]
lto = true
@ -117,11 +116,3 @@ actix-session = { path = "actix-session" }
actix-files = { path = "actix-files" }
actix-multipart = { path = "actix-multipart" }
awc = { path = "awc" }
[[bench]]
name = "server"
harness = false
[[bench]]
name = "service"
harness = false

View File

@ -1,9 +1,3 @@
## Unreleased
* Setting a cookie's SameSite property, explicitly, to `SameSite::None` will now
result in `SameSite=None` being sent with the response Set-Cookie header.
To create a cookie without a SameSite attribute, remove any calls setting same_site.
## 2.0.0
* `HttpServer::start()` renamed to `HttpServer::run()`. It also possible to

View File

@ -1,5 +1,13 @@
# Changes
## [0.2.2] - 2020-05-26
* Minimize `futures` dependency
* Support sending Content-Length when Content-Range is specified [#1496]
* Update `actix-web` to 2.0.0
[#1496]: https://github.com/actix/actix-web/issues/1496
## [0.2.1] - 2019-12-22
* Use the same format for file URLs regardless of platforms

View File

@ -1,6 +1,6 @@
[package]
name = "actix-files"
version = "0.2.1"
version = "0.2.2"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Static files support for actix web."
readme = "README.md"
@ -18,12 +18,13 @@ name = "actix_files"
path = "src/lib.rs"
[dependencies]
actix-web = { version = "2.0.0-rc", default-features = false }
actix-web = { version = "2.0.0", default-features = false }
actix-http = "1.0.1"
actix-service = "1.0.1"
bitflags = "1"
bytes = "0.5.3"
futures = "0.3.1"
futures-core = { version = "0.3.5", default-features = false }
futures-util = { version = "0.3.5", default-features = false }
derive_more = "0.99.2"
log = "0.4"
mime = "0.3"
@ -33,4 +34,4 @@ v_htmlescape = "0.4"
[dev-dependencies]
actix-rt = "1.0.0"
actix-web = { version = "2.0.0-rc", features=["openssl"] }
actix-web = { version = "2.0.0", features = ["openssl"] }

View File

@ -24,8 +24,8 @@ use actix_web::http::header::{self, DispositionType};
use actix_web::http::Method;
use actix_web::{web, FromRequest, HttpRequest, HttpResponse};
use bytes::Bytes;
use futures::future::{ok, ready, Either, FutureExt, LocalBoxFuture, Ready};
use futures::Stream;
use futures_core::Stream;
use futures_util::future::{ok, ready, Either, FutureExt, LocalBoxFuture, Ready};
use mime;
use mime_guess::from_ext;
use percent_encoding::{utf8_percent_encode, CONTROLS};
@ -521,7 +521,7 @@ impl Service for FilesService {
Err(e) => return Either::Left(ok(req.error_response(e))),
};
// full filepath
// full file path
let path = match self.directory.join(&real_path.0).canonicalize() {
Ok(path) => path,
Err(e) => return self.handle_err(e, req),
@ -952,135 +952,92 @@ mod tests {
#[actix_rt::test]
async fn test_named_file_content_range_headers() {
let mut srv = test::init_service(
App::new().service(Files::new("/test", ".").index_file("tests/test.binary")),
)
.await;
let srv = test::start(|| {
App::new().service(Files::new("/", "."))
});
// Valid range header
let request = TestRequest::get()
.uri("/t%65st/tests/test.binary")
let response = srv
.get("/tests/test.binary")
.header(header::RANGE, "bytes=10-20")
.to_request();
let response = test::call_service(&mut srv, request).await;
let contentrange = response
.headers()
.get(header::CONTENT_RANGE)
.unwrap()
.to_str()
.send()
.await
.unwrap();
assert_eq!(contentrange, "bytes 10-20/100");
let content_range = response.headers().get(header::CONTENT_RANGE).unwrap();
assert_eq!(content_range.to_str().unwrap(), "bytes 10-20/100");
// Invalid range header
let request = TestRequest::get()
.uri("/t%65st/tests/test.binary")
let response = srv
.get("/tests/test.binary")
.header(header::RANGE, "bytes=10-5")
.to_request();
let response = test::call_service(&mut srv, request).await;
let contentrange = response
.headers()
.get(header::CONTENT_RANGE)
.unwrap()
.to_str()
.send()
.await
.unwrap();
assert_eq!(contentrange, "bytes */100");
let content_range = response.headers().get(header::CONTENT_RANGE).unwrap();
assert_eq!(content_range.to_str().unwrap(), "bytes */100");
}
#[actix_rt::test]
async fn test_named_file_content_length_headers() {
// use actix_web::body::{MessageBody, ResponseBody};
let mut srv = test::init_service(
App::new().service(Files::new("test", ".").index_file("tests/test.binary")),
)
.await;
let srv = test::start(|| {
App::new().service(Files::new("/", "."))
});
// Valid range header
let request = TestRequest::get()
.uri("/t%65st/tests/test.binary")
let response = srv
.get("/tests/test.binary")
.header(header::RANGE, "bytes=10-20")
.to_request();
let _response = test::call_service(&mut srv, request).await;
.send()
.await
.unwrap();
let content_length = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(content_length.to_str().unwrap(), "11");
// let contentlength = response
// .headers()
// .get(header::CONTENT_LENGTH)
// .unwrap()
// .to_str()
// .unwrap();
// assert_eq!(contentlength, "11");
// Invalid range header
let request = TestRequest::get()
.uri("/t%65st/tests/test.binary")
.header(header::RANGE, "bytes=10-8")
.to_request();
let response = test::call_service(&mut srv, request).await;
assert_eq!(response.status(), StatusCode::RANGE_NOT_SATISFIABLE);
// Valid range header, starting from 0
let response = srv
.get("/tests/test.binary")
.header(header::RANGE, "bytes=0-20")
.send()
.await
.unwrap();
let content_length = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(content_length.to_str().unwrap(), "21");
// Without range header
let request = TestRequest::get()
.uri("/t%65st/tests/test.binary")
// .no_default_headers()
.to_request();
let _response = test::call_service(&mut srv, request).await;
let mut response = srv.get("/tests/test.binary").send().await.unwrap();
let content_length = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(content_length.to_str().unwrap(), "100");
// let contentlength = response
// .headers()
// .get(header::CONTENT_LENGTH)
// .unwrap()
// .to_str()
// .unwrap();
// assert_eq!(contentlength, "100");
// Should be no transfer-encoding
let transfer_encoding = response.headers().get(header::TRANSFER_ENCODING);
assert!(transfer_encoding.is_none());
// chunked
let request = TestRequest::get()
.uri("/t%65st/tests/test.binary")
.to_request();
let response = test::call_service(&mut srv, request).await;
// with enabled compression
// {
// let te = response
// .headers()
// .get(header::TRANSFER_ENCODING)
// .unwrap()
// .to_str()
// .unwrap();
// assert_eq!(te, "chunked");
// }
let bytes = test::read_body(response).await;
// Check file contents
let bytes = response.body().await.unwrap();
let data = Bytes::from(fs::read("tests/test.binary").unwrap());
assert_eq!(bytes, data);
}
#[actix_rt::test]
async fn test_head_content_length_headers() {
let mut srv = test::init_service(
App::new().service(Files::new("test", ".").index_file("tests/test.binary")),
)
.await;
let srv = test::start(|| {
App::new().service(Files::new("/", "."))
});
// Valid range header
let request = TestRequest::default()
.method(Method::HEAD)
.uri("/t%65st/tests/test.binary")
.to_request();
let _response = test::call_service(&mut srv, request).await;
let response = srv
.head("/tests/test.binary")
.send()
.await
.unwrap();
// TODO: fix check
// let contentlength = response
// .headers()
// .get(header::CONTENT_LENGTH)
// .unwrap()
// .to_str()
// .unwrap();
// assert_eq!(contentlength, "100");
let content_length = response
.headers()
.get(header::CONTENT_LENGTH)
.unwrap()
.to_str()
.unwrap();
assert_eq!(content_length, "100");
}
#[actix_rt::test]

View File

@ -18,7 +18,7 @@ use actix_web::http::header::{
};
use actix_web::http::{ContentEncoding, StatusCode};
use actix_web::{Error, HttpMessage, HttpRequest, HttpResponse, Responder};
use futures::future::{ready, Ready};
use futures_util::future::{ready, Ready};
use crate::range::HttpRange;
use crate::ChunkedReadFile;
@ -388,11 +388,12 @@ impl NamedFile {
fut: None,
counter: 0,
};
if offset != 0 || length != self.md.len() {
Ok(resp.status(StatusCode::PARTIAL_CONTENT).streaming(reader))
} else {
Ok(resp.body(SizedStream::new(length, reader)))
resp.status(StatusCode::PARTIAL_CONTENT);
}
Ok(resp.body(SizedStream::new(length, reader)))
}
}

View File

@ -1,6 +1,6 @@
[package]
name = "actix-framed"
version = "0.3.0"
version = "0.3.1"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix framed app server"
readme = "README.md"

View File

@ -1,5 +1,7 @@
# Framed app for actix web [![Build Status](https://travis-ci.org/actix/actix-web.svg?branch=master)](https://travis-ci.org/actix/actix-web) [![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web) [![crates.io](https://meritbadge.herokuapp.com/actix-framed)](https://crates.io/crates/actix-framed) [![Join the chat at https://gitter.im/actix/actix](https://badges.gitter.im/actix/actix.svg)](https://gitter.im/actix/actix?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
**Notice**: `actix-framed` is deprecated since we don't use it anymore.
## Documentation & community resources
* [API Documentation](https://docs.rs/actix-framed/)

View File

@ -1,5 +1,9 @@
# Changes
## [0.3.1] - 2020-05-25
* Deprecate this crate
## [0.3.0] - 2019-12-25
* Migrate to actix-http 1.0

View File

@ -1,15 +1,5 @@
# Changes
# [Unreleased]
### Changed
* Update the `time` dependency to 0.2.7
### Fixed
* Allow `SameSite=None` cookies to be sent in a response.
## [1.0.1] - 2019-12-20
### Fixed

View File

@ -52,6 +52,7 @@ base64 = "0.11"
bitflags = "1.2"
bytes = "0.5.3"
copyless = "0.1.4"
chrono = "0.4.6"
derive_more = "0.99.2"
either = "1.5.3"
encoding_rs = "0.8"
@ -76,7 +77,7 @@ serde_json = "1.0"
sha-1 = "0.8"
slab = "0.4"
serde_urlencoded = "0.6.1"
time = { version = "0.2.7", default-features = false, features = ["std"] }
time = "0.1.42"
# for secure cookie
ring = { version = "0.16.9", optional = true }

View File

@ -5,7 +5,6 @@ use std::{fmt, mem};
use bytes::{Bytes, BytesMut};
use futures_core::Stream;
use futures_util::ready;
use pin_project::{pin_project, project};
use crate::error::Error;
@ -37,12 +36,8 @@ pub trait MessageBody {
fn size(&self) -> BodySize;
fn poll_next(&mut self, cx: &mut Context<'_>) -> Poll<Option<Result<Bytes, Error>>>;
downcast_get_type_id!();
}
downcast!(MessageBody);
impl MessageBody for () {
fn size(&self) -> BodySize {
BodySize::Empty
@ -365,8 +360,10 @@ impl MessageBody for String {
/// Type represent streaming body.
/// Response does not contain `content-length` header and appropriate transfer encoding is used.
#[pin_project]
pub struct BodyStream<S, E> {
stream: Pin<Box<S>>,
#[pin]
stream: S,
_t: PhantomData<E>,
}
@ -377,7 +374,7 @@ where
{
pub fn new(stream: S) -> Self {
BodyStream {
stream: Box::pin(stream),
stream,
_t: PhantomData,
}
}
@ -392,27 +389,22 @@ where
BodySize::Stream
}
/// Attempts to pull out the next value of the underlying [`Stream`].
///
/// Empty values are skipped to prevent [`BodyStream`]'s transmission being
/// ended on a zero-length chunk, but rather proceed until the underlying
/// [`Stream`] ends.
fn poll_next(&mut self, cx: &mut Context<'_>) -> Poll<Option<Result<Bytes, Error>>> {
let mut stream = self.stream.as_mut();
loop {
return Poll::Ready(match ready!(stream.as_mut().poll_next(cx)) {
Some(Ok(ref bytes)) if bytes.is_empty() => continue,
opt => opt.map(|res| res.map_err(Into::into)),
});
}
unsafe { Pin::new_unchecked(self) }
.project()
.stream
.poll_next(cx)
.map(|res| res.map(|res| res.map_err(std::convert::Into::into)))
}
}
/// Type represent streaming body. This body implementation should be used
/// if total size of stream is known. Data get sent as is without using transfer encoding.
#[pin_project]
pub struct SizedStream<S> {
size: u64,
stream: Pin<Box<S>>,
#[pin]
stream: S,
}
impl<S> SizedStream<S>
@ -420,10 +412,7 @@ where
S: Stream<Item = Result<Bytes, Error>>,
{
pub fn new(size: u64, stream: S) -> Self {
SizedStream {
size,
stream: Box::pin(stream),
}
SizedStream { size, stream }
}
}
@ -435,26 +424,17 @@ where
BodySize::Sized64(self.size)
}
/// Attempts to pull out the next value of the underlying [`Stream`].
///
/// Empty values are skipped to prevent [`SizedStream`]'s transmission being
/// ended on a zero-length chunk, but rather proceed until the underlying
/// [`Stream`] ends.
fn poll_next(&mut self, cx: &mut Context<'_>) -> Poll<Option<Result<Bytes, Error>>> {
let mut stream = self.stream.as_mut();
loop {
return Poll::Ready(match ready!(stream.as_mut().poll_next(cx)) {
Some(Ok(ref bytes)) if bytes.is_empty() => continue,
val => val,
});
}
unsafe { Pin::new_unchecked(self) }
.project()
.stream
.poll_next(cx)
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::stream;
use futures_util::future::poll_fn;
impl Body {
@ -609,59 +589,4 @@ mod tests {
BodySize::Sized(25)
);
}
mod body_stream {
use super::*;
#[actix_rt::test]
async fn skips_empty_chunks() {
let mut body = BodyStream::new(stream::iter(
["1", "", "2"]
.iter()
.map(|&v| Ok(Bytes::from(v)) as Result<Bytes, ()>),
));
assert_eq!(
poll_fn(|cx| body.poll_next(cx)).await.unwrap().ok(),
Some(Bytes::from("1")),
);
assert_eq!(
poll_fn(|cx| body.poll_next(cx)).await.unwrap().ok(),
Some(Bytes::from("2")),
);
}
}
mod sized_stream {
use super::*;
#[actix_rt::test]
async fn skips_empty_chunks() {
let mut body = SizedStream::new(
2,
stream::iter(["1", "", "2"].iter().map(|&v| Ok(Bytes::from(v)))),
);
assert_eq!(
poll_fn(|cx| body.poll_next(cx)).await.unwrap().ok(),
Some(Bytes::from("1")),
);
assert_eq!(
poll_fn(|cx| body.poll_next(cx)).await.unwrap().ok(),
Some(Bytes::from("2")),
);
}
}
#[actix_rt::test]
async fn test_body_casting() {
let mut body = String::from("hello cast");
let resp_body: &mut dyn MessageBody = &mut body;
let body = resp_body.downcast_ref::<String>().unwrap();
assert_eq!(body, "hello cast");
let body = &mut resp_body.downcast_mut::<String>().unwrap();
body.push_str("!");
let body = resp_body.downcast_ref::<String>().unwrap();
assert_eq!(body, "hello cast!");
let not_body = resp_body.downcast_ref::<()>();
assert!(not_body.is_none());
}
}

View File

@ -17,7 +17,6 @@ use h2::client::{handshake, Connection, SendRequest};
use http::uri::Authority;
use indexmap::IndexSet;
use slab::Slab;
use pin_project::pin_project;
use super::connection::{ConnectionType, IoConnection};
use super::error::ConnectError;
@ -423,7 +422,6 @@ where
}
}
#[pin_project]
struct ConnectorPoolSupport<T, Io>
where
Io: AsyncRead + AsyncWrite + Unpin + 'static,
@ -441,7 +439,7 @@ where
type Output = ();
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
let this = unsafe { self.get_unchecked_mut() };
let mut inner = this.inner.as_ref().borrow_mut();
inner.waker.register(cx.waker());
@ -490,12 +488,10 @@ where
}
}
#[pin_project::pin_project(PinnedDrop)]
struct OpenWaitingConnection<F, Io>
where
Io: AsyncRead + AsyncWrite + Unpin + 'static,
{
#[pin]
fut: F,
key: Key,
h2: Option<
@ -529,13 +525,12 @@ where
}
}
#[pin_project::pinned_drop]
impl<F, Io> PinnedDrop for OpenWaitingConnection<F, Io>
impl<F, Io> Drop for OpenWaitingConnection<F, Io>
where
Io: AsyncRead + AsyncWrite + Unpin + 'static,
{
fn drop(self: Pin<&mut Self>) {
if let Some(inner) = self.project().inner.take() {
fn drop(&mut self) {
if let Some(inner) = self.inner.take() {
let mut inner = inner.as_ref().borrow_mut();
inner.release();
inner.check_availibility();
@ -550,8 +545,8 @@ where
{
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.as_mut().project();
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = unsafe { self.get_unchecked_mut() };
if let Some(ref mut h2) = this.h2 {
return match Pin::new(h2).poll(cx) {
@ -576,7 +571,7 @@ where
};
}
match this.fut.poll(cx) {
match unsafe { Pin::new_unchecked(&mut this.fut) }.poll(cx) {
Poll::Ready(Err(err)) => {
let _ = this.inner.take();
if let Some(rx) = this.rx.take() {
@ -594,8 +589,8 @@ where
)));
Poll::Ready(())
} else {
*this.h2 = Some(handshake(io).boxed_local());
self.poll(cx)
this.h2 = Some(handshake(io).boxed_local());
unsafe { Pin::new_unchecked(this) }.poll(cx)
}
}
Poll::Pending => Poll::Pending,

View File

@ -1,4 +1,4 @@
use std::cell::RefCell;
use std::cell::UnsafeCell;
use std::rc::Rc;
use std::task::{Context, Poll};
@ -6,15 +6,11 @@ use actix_service::Service;
#[doc(hidden)]
/// Service that allows to turn non-clone service to a service with `Clone` impl
///
/// # Panics
/// CloneableService might panic with some creative use of thread local storage.
/// See https://github.com/actix/actix-web/issues/1295 for example
pub(crate) struct CloneableService<T: Service>(Rc<RefCell<T>>);
pub(crate) struct CloneableService<T: Service>(Rc<UnsafeCell<T>>);
impl<T: Service> CloneableService<T> {
pub(crate) fn new(service: T) -> Self {
Self(Rc::new(RefCell::new(service)))
Self(Rc::new(UnsafeCell::new(service)))
}
}
@ -31,10 +27,10 @@ impl<T: Service> Service for CloneableService<T> {
type Future = T::Future;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.0.borrow_mut().poll_ready(cx)
unsafe { &mut *self.0.as_ref().get() }.poll_ready(cx)
}
fn call(&mut self, req: T::Request) -> Self::Future {
self.0.borrow_mut().call(req)
unsafe { &mut *self.0.as_ref().get() }.call(req)
}
}

View File

@ -1,4 +1,4 @@
use std::cell::Cell;
use std::cell::UnsafeCell;
use std::fmt::Write;
use std::rc::Rc;
use std::time::Duration;
@ -7,7 +7,7 @@ use std::{fmt, net};
use actix_rt::time::{delay_for, delay_until, Delay, Instant};
use bytes::BytesMut;
use futures_util::{future, FutureExt};
use time::OffsetDateTime;
use time;
// "Sun, 06 Nov 1994 08:49:37 GMT".len()
const DATE_VALUE_LENGTH: usize = 29;
@ -211,7 +211,7 @@ impl Date {
}
fn update(&mut self) {
self.pos = 0;
write!(self, "{}", OffsetDateTime::now().format("%a, %d %b %Y %H:%M:%S GMT")).unwrap();
write!(self, "{}", time::at_utc(time::get_time()).rfc822()).unwrap();
}
}
@ -228,24 +228,24 @@ impl fmt::Write for Date {
struct DateService(Rc<DateServiceInner>);
struct DateServiceInner {
current: Cell<Option<(Date, Instant)>>,
current: UnsafeCell<Option<(Date, Instant)>>,
}
impl DateServiceInner {
fn new() -> Self {
DateServiceInner {
current: Cell::new(None),
current: UnsafeCell::new(None),
}
}
fn reset(&self) {
self.current.take();
unsafe { (&mut *self.current.get()).take() };
}
fn update(&self) {
let now = Instant::now();
let date = Date::new();
self.current.set(Some((date, now)));
*(unsafe { &mut *self.current.get() }) = Some((date, now));
}
}
@ -255,7 +255,7 @@ impl DateService {
}
fn check_date(&self) {
if self.0.current.get().is_none() {
if unsafe { (&*self.0.current.get()).is_none() } {
self.0.update();
// periodic date update
@ -269,12 +269,12 @@ impl DateService {
fn now(&self) -> Instant {
self.check_date();
self.0.current.get().unwrap().1
unsafe { (&*self.0.current.get()).as_ref().unwrap().1 }
}
fn set_date<F: FnMut(&Date)>(&self, mut f: F) {
self.check_date();
f(&self.0.current.get().unwrap().0);
f(&unsafe { (&*self.0.current.get()).as_ref().unwrap().0 })
}
}
@ -282,19 +282,6 @@ impl DateService {
mod tests {
use super::*;
// Test modifying the date from within the closure
// passed to `set_date`
#[test]
fn test_evil_date() {
let service = DateService::new();
// Make sure that `check_date` doesn't try to spawn a task
service.0.update();
service.set_date(|_| {
service.0.reset()
});
}
#[test]
fn test_date_len() {
assert_eq!(DATE_VALUE_LENGTH, "Sun, 06 Nov 1994 08:49:37 GMT".len());

View File

@ -1,6 +1,7 @@
use std::borrow::Cow;
use time::{Duration, OffsetDateTime};
use chrono::Duration;
use time::Tm;
use super::{Cookie, SameSite};
@ -63,13 +64,13 @@ impl CookieBuilder {
/// use actix_http::cookie::Cookie;
///
/// let c = Cookie::build("foo", "bar")
/// .expires(time::OffsetDateTime::now())
/// .expires(time::now())
/// .finish();
///
/// assert!(c.expires().is_some());
/// ```
#[inline]
pub fn expires(mut self, when: OffsetDateTime) -> CookieBuilder {
pub fn expires(mut self, when: Tm) -> CookieBuilder {
self.cookie.set_expires(when);
self
}
@ -107,9 +108,7 @@ impl CookieBuilder {
/// ```
#[inline]
pub fn max_age_time(mut self, value: Duration) -> CookieBuilder {
// Truncate any nanoseconds from the Duration, as they aren't represented within `Max-Age`
// and would cause two otherwise identical `Cookie` instances to not be equivalent to one another.
self.cookie.set_max_age(Duration::seconds(value.whole_seconds()));
self.cookie.set_max_age(value);
self
}
@ -213,7 +212,7 @@ impl CookieBuilder {
///
/// ```rust
/// use actix_http::cookie::Cookie;
/// use time::Duration;
/// use chrono::Duration;
///
/// let c = Cookie::build("foo", "bar")
/// .permanent()

View File

@ -10,26 +10,18 @@ use std::fmt;
/// attribute is "Strict", then the cookie is never sent in cross-site requests.
/// If the `SameSite` attribute is "Lax", the cookie is only sent in cross-site
/// requests with "safe" HTTP methods, i.e, `GET`, `HEAD`, `OPTIONS`, `TRACE`.
/// If the `SameSite` attribute is not present then the cookie will be sent as
/// normal. In some browsers, this will implicitly handle the cookie as if "Lax"
/// and in others, "None". It's best to explicitly set the `SameSite` attribute
/// to avoid inconsistent behavior.
///
/// **Note:** Depending on browser, the `Secure` attribute may be required for
/// `SameSite` "None" cookies to be accepted.
/// If the `SameSite` attribute is not present (made explicit via the
/// `SameSite::None` variant), then the cookie will be sent as normal.
///
/// **Note:** This cookie attribute is an HTTP draft! Its meaning and definition
/// are subject to change.
///
/// More info about these draft changes can be found in the draft spec:
/// - https://tools.ietf.org/html/draft-west-cookie-incrementalism-00
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum SameSite {
/// The "Strict" `SameSite` attribute.
Strict,
/// The "Lax" `SameSite` attribute.
Lax,
/// The "None" `SameSite` attribute.
/// No `SameSite` attribute.
None,
}
@ -100,7 +92,7 @@ impl fmt::Display for SameSite {
match *self {
SameSite::Strict => write!(f, "Strict"),
SameSite::Lax => write!(f, "Lax"),
SameSite::None => write!(f, "None"),
SameSite::None => Ok(()),
}
}
}

View File

@ -1,7 +1,7 @@
use std::collections::HashSet;
use std::mem::replace;
use time::{Duration, OffsetDateTime};
use chrono::Duration;
use super::delta::DeltaCookie;
use super::Cookie;
@ -188,7 +188,7 @@ impl CookieJar {
///
/// ```rust
/// use actix_http::cookie::{CookieJar, Cookie};
/// use time::Duration;
/// use chrono::Duration;
///
/// let mut jar = CookieJar::new();
///
@ -202,7 +202,7 @@ impl CookieJar {
/// let delta: Vec<_> = jar.delta().collect();
/// assert_eq!(delta.len(), 1);
/// assert_eq!(delta[0].name(), "name");
/// assert_eq!(delta[0].max_age(), Some(Duration::zero()));
/// assert_eq!(delta[0].max_age(), Some(Duration::seconds(0)));
/// ```
///
/// Removing a new cookie does not result in a _removal_ cookie:
@ -220,8 +220,8 @@ impl CookieJar {
pub fn remove(&mut self, mut cookie: Cookie<'static>) {
if self.original_cookies.contains(cookie.name()) {
cookie.set_value("");
cookie.set_max_age(Duration::zero());
cookie.set_expires(OffsetDateTime::now() - Duration::days(365));
cookie.set_max_age(Duration::seconds(0));
cookie.set_expires(time::now() - Duration::days(365));
self.delta_cookies.replace(DeltaCookie::removed(cookie));
} else {
self.delta_cookies.remove(cookie.name());
@ -239,7 +239,7 @@ impl CookieJar {
///
/// ```rust
/// use actix_http::cookie::{CookieJar, Cookie};
/// use time::Duration;
/// use chrono::Duration;
///
/// let mut jar = CookieJar::new();
///
@ -533,7 +533,7 @@ mod test {
#[test]
#[cfg(feature = "secure-cookies")]
fn delta() {
use time::Duration;
use chrono::Duration;
use std::collections::HashMap;
let mut c = CookieJar::new();
@ -556,7 +556,7 @@ mod test {
assert!(names.get("test2").unwrap().is_none());
assert!(names.get("test3").unwrap().is_none());
assert!(names.get("test4").unwrap().is_none());
assert_eq!(names.get("original").unwrap(), &Some(Duration::zero()));
assert_eq!(names.get("original").unwrap(), &Some(Duration::seconds(0)));
}
#[test]

View File

@ -47,7 +47,7 @@
//! ```
#![doc(html_root_url = "https://docs.rs/cookie/0.11")]
#![deny(missing_docs)]
#![warn(missing_docs)]
mod builder;
mod delta;
@ -65,8 +65,9 @@ use std::borrow::Cow;
use std::fmt;
use std::str::FromStr;
use chrono::Duration;
use percent_encoding::{percent_encode, AsciiSet, CONTROLS};
use time::{Duration, OffsetDateTime};
use time::Tm;
pub use self::builder::CookieBuilder;
pub use self::draft::*;
@ -171,7 +172,7 @@ pub struct Cookie<'c> {
/// The cookie's value.
value: CookieStr,
/// The cookie's expiration, if any.
expires: Option<OffsetDateTime>,
expires: Option<Tm>,
/// The cookie's maximum age, if any.
max_age: Option<Duration>,
/// The cookie's domain, if any.
@ -478,7 +479,7 @@ impl<'c> Cookie<'c> {
/// assert_eq!(c.max_age(), None);
///
/// let c = Cookie::parse("name=value; Max-Age=3600").unwrap();
/// assert_eq!(c.max_age().map(|age| age.whole_hours()), Some(1));
/// assert_eq!(c.max_age().map(|age| age.num_hours()), Some(1));
/// ```
#[inline]
pub fn max_age(&self) -> Option<Duration> {
@ -543,10 +544,10 @@ impl<'c> Cookie<'c> {
/// let expire_time = "Wed, 21 Oct 2017 07:28:00 GMT";
/// let cookie_str = format!("name=value; Expires={}", expire_time);
/// let c = Cookie::parse(cookie_str).unwrap();
/// assert_eq!(c.expires().map(|t| t.year()), Some(2017));
/// assert_eq!(c.expires().map(|t| t.tm_year), Some(117));
/// ```
#[inline]
pub fn expires(&self) -> Option<OffsetDateTime> {
pub fn expires(&self) -> Option<Tm> {
self.expires
}
@ -644,7 +645,7 @@ impl<'c> Cookie<'c> {
///
/// ```rust
/// use actix_http::cookie::Cookie;
/// use time::Duration;
/// use chrono::Duration;
///
/// let mut c = Cookie::new("name", "value");
/// assert_eq!(c.max_age(), None);
@ -697,19 +698,18 @@ impl<'c> Cookie<'c> {
///
/// ```rust
/// use actix_http::cookie::Cookie;
/// use time::{Duration, OffsetDateTime};
///
/// let mut c = Cookie::new("name", "value");
/// assert_eq!(c.expires(), None);
///
/// let mut now = OffsetDateTime::now();
/// now += Duration::week();
/// let mut now = time::now();
/// now.tm_year += 1;
///
/// c.set_expires(now);
/// assert!(c.expires().is_some())
/// ```
#[inline]
pub fn set_expires(&mut self, time: OffsetDateTime) {
pub fn set_expires(&mut self, time: Tm) {
self.expires = Some(time);
}
@ -720,7 +720,7 @@ impl<'c> Cookie<'c> {
///
/// ```rust
/// use actix_http::cookie::Cookie;
/// use time::Duration;
/// use chrono::Duration;
///
/// let mut c = Cookie::new("foo", "bar");
/// assert!(c.expires().is_none());
@ -733,7 +733,7 @@ impl<'c> Cookie<'c> {
pub fn make_permanent(&mut self) {
let twenty_years = Duration::days(365 * 20);
self.set_max_age(twenty_years);
self.set_expires(OffsetDateTime::now() + twenty_years);
self.set_expires(time::now() + twenty_years);
}
fn fmt_parameters(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -746,7 +746,9 @@ impl<'c> Cookie<'c> {
}
if let Some(same_site) = self.same_site() {
write!(f, "; SameSite={}", same_site)?;
if !same_site.is_none() {
write!(f, "; SameSite={}", same_site)?;
}
}
if let Some(path) = self.path() {
@ -758,11 +760,11 @@ impl<'c> Cookie<'c> {
}
if let Some(max_age) = self.max_age() {
write!(f, "; Max-Age={}", max_age.whole_seconds())?;
write!(f, "; Max-Age={}", max_age.num_seconds())?;
}
if let Some(time) = self.expires() {
write!(f, "; Expires={}", time.format("%a, %d %b %Y %H:%M:%S GMT"))?;
write!(f, "; Expires={}", time.rfc822())?;
}
Ok(())
@ -990,7 +992,7 @@ impl<'a, 'b> PartialEq<Cookie<'b>> for Cookie<'a> {
#[cfg(test)]
mod tests {
use super::{Cookie, SameSite};
use time::PrimitiveDateTime;
use time::strptime;
#[test]
fn format() {
@ -1015,7 +1017,7 @@ mod tests {
assert_eq!(&cookie.to_string(), "foo=bar; Domain=www.rust-lang.org");
let time_str = "Wed, 21 Oct 2015 07:28:00 GMT";
let expires = PrimitiveDateTime::parse(time_str, "%a, %d %b %Y %H:%M:%S").unwrap().assume_utc();
let expires = strptime(time_str, "%a, %d %b %Y %H:%M:%S %Z").unwrap();
let cookie = Cookie::build("foo", "bar").expires(expires).finish();
assert_eq!(
&cookie.to_string(),
@ -1035,7 +1037,7 @@ mod tests {
let cookie = Cookie::build("foo", "bar")
.same_site(SameSite::None)
.finish();
assert_eq!(&cookie.to_string(), "foo=bar; SameSite=None");
assert_eq!(&cookie.to_string(), "foo=bar");
}
#[test]

View File

@ -5,13 +5,11 @@ use std::error::Error;
use std::fmt;
use std::str::Utf8Error;
use chrono::Duration;
use percent_encoding::percent_decode;
use time::Duration;
use super::{Cookie, CookieStr, SameSite};
use crate::time_parser;
/// Enum corresponding to a parsing error.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum ParseError {
@ -149,7 +147,7 @@ fn parse_inner<'c>(s: &str, decode: bool) -> Result<Cookie<'c>, ParseError> {
Ok(val) => {
// Don't panic if the max age seconds is greater than what's supported by
// `Duration`.
let val = cmp::min(val, Duration::max_value().whole_seconds());
let val = cmp::min(val, Duration::max_value().num_seconds());
Some(Duration::seconds(val))
}
Err(_) => continue,
@ -181,14 +179,16 @@ fn parse_inner<'c>(s: &str, decode: bool) -> Result<Cookie<'c>, ParseError> {
}
}
("expires", Some(v)) => {
// Try parsing with three date formats according to
// Try strptime with three date formats according to
// http://tools.ietf.org/html/rfc2616#section-3.3.1. Try
// additional ones as encountered in the real world.
let tm = time_parser::parse_http_date(v)
.or_else(|| time::parse(v, "%a, %d-%b-%Y %H:%M:%S").ok());
let tm = time::strptime(v, "%a, %d %b %Y %H:%M:%S %Z")
.or_else(|_| time::strptime(v, "%A, %d-%b-%y %H:%M:%S %Z"))
.or_else(|_| time::strptime(v, "%a, %d-%b-%Y %H:%M:%S %Z"))
.or_else(|_| time::strptime(v, "%a %b %d %H:%M:%S %Y"));
if let Some(time) = tm {
cookie.expires = Some(time.assume_utc())
if let Ok(time) = tm {
cookie.expires = Some(time)
}
}
_ => {
@ -216,7 +216,8 @@ where
#[cfg(test)]
mod tests {
use super::{Cookie, SameSite};
use time::{Duration, PrimitiveDateTime};
use chrono::Duration;
use time::strptime;
macro_rules! assert_eq_parse {
($string:expr, $expected:expr) => {
@ -376,7 +377,7 @@ mod tests {
);
let time_str = "Wed, 21 Oct 2015 07:28:00 GMT";
let expires = PrimitiveDateTime::parse(time_str, "%a, %d %b %Y %H:%M:%S").unwrap().assume_utc();
let expires = strptime(time_str, "%a, %d %b %Y %H:%M:%S %Z").unwrap();
expected.set_expires(expires);
assert_eq_parse!(
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
@ -385,7 +386,7 @@ mod tests {
);
unexpected.set_domain("foo.com");
let bad_expires = PrimitiveDateTime::parse(time_str, "%a, %d %b %Y %H:%S:%M").unwrap().assume_utc();
let bad_expires = strptime(time_str, "%a, %d %b %Y %H:%S:%M %Z").unwrap();
expected.set_expires(bad_expires);
assert_ne_parse!(
" foo=bar ;HttpOnly; Secure; Max-Age=4; Path=/foo; \
@ -413,9 +414,8 @@ mod tests {
#[test]
fn do_not_panic_on_large_max_ages() {
let max_duration = Duration::max_value();
let expected = Cookie::build("foo", "bar").max_age_time(max_duration).finish();
let overflow_duration = max_duration.checked_add(Duration::nanoseconds(1)).unwrap_or(max_duration);
assert_eq_parse!(format!(" foo=bar; Max-Age={:?}", overflow_duration.whole_seconds()), expected);
let max_seconds = Duration::max_value().num_seconds();
let expected = Cookie::build("foo", "bar").max_age(max_seconds).finish();
assert_eq_parse!(format!(" foo=bar; Max-Age={:?}", max_seconds + 1), expected);
}
}

View File

@ -1,4 +1,5 @@
//! Error and Result module
use std::any::TypeId;
use std::cell::RefCell;
use std::io::Write;
use std::str::Utf8Error;
@ -82,10 +83,25 @@ pub trait ResponseError: fmt::Debug + fmt::Display {
resp.set_body(Body::from(buf))
}
downcast_get_type_id!();
#[doc(hidden)]
fn __private_get_type_id__(&self) -> TypeId
where
Self: 'static,
{
TypeId::of::<Self>()
}
}
downcast!(ResponseError);
impl dyn ResponseError + 'static {
/// Downcasts a response error to a specific type.
pub fn downcast_ref<T: ResponseError + 'static>(&self) -> Option<&T> {
if self.__private_get_type_id__() == TypeId::of::<T>() {
unsafe { Some(&*(self as *const dyn ResponseError as *const T)) }
} else {
None
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {

View File

@ -28,30 +28,33 @@ impl Extensions {
/// Check if container contains entry
pub fn contains<T: 'static>(&self) -> bool {
self.map.contains_key(&TypeId::of::<T>())
self.map.get(&TypeId::of::<T>()).is_some()
}
/// Get a reference to a type previously inserted on this `Extensions`.
pub fn get<T: 'static>(&self) -> Option<&T> {
self.map
.get(&TypeId::of::<T>())
.and_then(|boxed| boxed.downcast_ref())
.and_then(|boxed| (&**boxed as &(dyn Any + 'static)).downcast_ref())
}
/// Get a mutable reference to a type previously inserted on this `Extensions`.
pub fn get_mut<T: 'static>(&mut self) -> Option<&mut T> {
self.map
.get_mut(&TypeId::of::<T>())
.and_then(|boxed| boxed.downcast_mut())
.and_then(|boxed| (&mut **boxed as &mut (dyn Any + 'static)).downcast_mut())
}
/// Remove a type from this `Extensions`.
///
/// If a extension of this type existed, it will be returned.
pub fn remove<T: 'static>(&mut self) -> Option<T> {
self.map
.remove(&TypeId::of::<T>())
.and_then(|boxed| boxed.downcast().ok().map(|boxed| *boxed))
self.map.remove(&TypeId::of::<T>()).and_then(|boxed| {
(boxed as Box<dyn Any + 'static>)
.downcast()
.ok()
.map(|boxed| *boxed)
})
}
/// Clear the `Extensions` of all inserted extensions.
@ -67,92 +70,6 @@ impl fmt::Debug for Extensions {
}
}
#[test]
fn test_remove() {
let mut map = Extensions::new();
map.insert::<i8>(123);
assert!(map.get::<i8>().is_some());
map.remove::<i8>();
assert!(map.get::<i8>().is_none());
}
#[test]
fn test_clear() {
let mut map = Extensions::new();
map.insert::<i8>(8);
map.insert::<i16>(16);
map.insert::<i32>(32);
assert!(map.contains::<i8>());
assert!(map.contains::<i16>());
assert!(map.contains::<i32>());
map.clear();
assert!(!map.contains::<i8>());
assert!(!map.contains::<i16>());
assert!(!map.contains::<i32>());
map.insert::<i8>(10);
assert_eq!(*map.get::<i8>().unwrap(), 10);
}
#[test]
fn test_integers() {
let mut map = Extensions::new();
map.insert::<i8>(8);
map.insert::<i16>(16);
map.insert::<i32>(32);
map.insert::<i64>(64);
map.insert::<i128>(128);
map.insert::<u8>(8);
map.insert::<u16>(16);
map.insert::<u32>(32);
map.insert::<u64>(64);
map.insert::<u128>(128);
assert!(map.get::<i8>().is_some());
assert!(map.get::<i16>().is_some());
assert!(map.get::<i32>().is_some());
assert!(map.get::<i64>().is_some());
assert!(map.get::<i128>().is_some());
assert!(map.get::<u8>().is_some());
assert!(map.get::<u16>().is_some());
assert!(map.get::<u32>().is_some());
assert!(map.get::<u64>().is_some());
assert!(map.get::<u128>().is_some());
}
#[test]
fn test_composition() {
struct Magi<T>(pub T);
struct Madoka {
pub god: bool,
}
struct Homura {
pub attempts: usize,
}
struct Mami {
pub guns: usize,
}
let mut map = Extensions::new();
map.insert(Magi(Madoka { god: false }));
map.insert(Magi(Homura { attempts: 0 }));
map.insert(Magi(Mami { guns: 999 }));
assert!(!map.get::<Magi<Madoka>>().unwrap().0.god);
assert_eq!(0, map.get::<Magi<Homura>>().unwrap().0.attempts);
assert_eq!(999, map.get::<Magi<Mami>>().unwrap().0.guns);
}
#[test]
fn test_extensions() {
#[derive(Debug, PartialEq)]

View File

@ -158,16 +158,14 @@ where
#[pin_project::pin_project]
struct ServiceResponse<F, I, E, B> {
#[pin]
state: ServiceResponseState<F, B>,
config: ServiceConfig,
buffer: Option<Bytes>,
_t: PhantomData<(I, E)>,
}
#[pin_project::pin_project]
enum ServiceResponseState<F, B> {
ServiceCall(#[pin] F, Option<SendResponse<Bytes>>),
ServiceCall(F, Option<SendResponse<Bytes>>),
SendPayload(SendStream<Bytes>, ResponseBody<B>),
}
@ -249,14 +247,12 @@ where
{
type Output = ();
#[pin_project::project]
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = self.as_mut().project();
#[project]
match this.state.project() {
ServiceResponseState::ServiceCall(call, send) => {
match call.poll(cx) {
match this.state {
ServiceResponseState::ServiceCall(ref mut call, ref mut send) => {
match unsafe { Pin::new_unchecked(call) }.poll(cx) {
Poll::Ready(Ok(res)) => {
let (res, body) = res.into().replace_body(());
@ -277,7 +273,8 @@ where
if size.is_eof() {
Poll::Ready(())
} else {
this.state.set(ServiceResponseState::SendPayload(stream, body));
*this.state =
ServiceResponseState::SendPayload(stream, body);
self.poll(cx)
}
}
@ -303,10 +300,10 @@ where
if size.is_eof() {
Poll::Ready(())
} else {
this.state.set(ServiceResponseState::SendPayload(
*this.state = ServiceResponseState::SendPayload(
stream,
body.into_body(),
));
);
self.poll(cx)
}
}

View File

@ -63,7 +63,7 @@ header! {
(AcceptCharset, ACCEPT_CHARSET) => (QualityItem<Charset>)+
test_accept_charset {
// Test case from RFC
/// Test case from RFC
test_header!(test1, vec![b"iso-8859-5, unicode-1-1;q=0.8"]);
}
}

View File

@ -1,46 +1,59 @@
use std::fmt::{self, Display};
use std::io::Write;
use std::str::FromStr;
use std::time::{SystemTime, UNIX_EPOCH};
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use bytes::{buf::BufMutExt, BytesMut};
use http::header::{HeaderValue, InvalidHeaderValue};
use time::{PrimitiveDateTime, OffsetDateTime, offset};
use crate::error::ParseError;
use crate::header::IntoHeaderValue;
use crate::time_parser;
/// A timestamp with HTTP formatting and parsing
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct HttpDate(OffsetDateTime);
pub struct HttpDate(time::Tm);
impl FromStr for HttpDate {
type Err = ParseError;
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
match time_parser::parse_http_date(s) {
Some(t) => Ok(HttpDate(t.assume_utc())),
None => Err(ParseError::Header)
match time::strptime(s, "%a, %d %b %Y %T %Z")
.or_else(|_| time::strptime(s, "%A, %d-%b-%y %T %Z"))
.or_else(|_| time::strptime(s, "%c"))
{
Ok(t) => Ok(HttpDate(t)),
Err(_) => Err(ParseError::Header),
}
}
}
impl Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.0.format("%a, %d %b %Y %H:%M:%S GMT"), f)
fmt::Display::fmt(&self.0.to_utc().rfc822(), f)
}
}
impl From<OffsetDateTime> for HttpDate {
fn from(dt: OffsetDateTime) -> HttpDate {
HttpDate(dt)
impl From<time::Tm> for HttpDate {
fn from(tm: time::Tm) -> HttpDate {
HttpDate(tm)
}
}
impl From<SystemTime> for HttpDate {
fn from(sys: SystemTime) -> HttpDate {
HttpDate(PrimitiveDateTime::from(sys).assume_utc())
let tmspec = match sys.duration_since(UNIX_EPOCH) {
Ok(dur) => {
time::Timespec::new(dur.as_secs() as i64, dur.subsec_nanos() as i32)
}
Err(err) => {
let neg = err.duration();
time::Timespec::new(
-(neg.as_secs() as i64),
-(neg.subsec_nanos() as i32),
)
}
};
HttpDate(time::at_utc(tmspec))
}
}
@ -49,45 +62,56 @@ impl IntoHeaderValue for HttpDate {
fn try_into(self) -> Result<HeaderValue, Self::Error> {
let mut wrt = BytesMut::with_capacity(29).writer();
write!(wrt, "{}", self.0.to_offset(offset!(UTC)).format("%a, %d %b %Y %H:%M:%S GMT")).unwrap();
write!(wrt, "{}", self.0.rfc822()).unwrap();
HeaderValue::from_maybe_shared(wrt.get_mut().split().freeze())
}
}
impl From<HttpDate> for SystemTime {
fn from(date: HttpDate) -> SystemTime {
let dt = date.0;
let epoch = OffsetDateTime::unix_epoch();
UNIX_EPOCH + (dt - epoch)
let spec = date.0.to_timespec();
if spec.sec >= 0 {
UNIX_EPOCH + Duration::new(spec.sec as u64, spec.nsec as u32)
} else {
UNIX_EPOCH - Duration::new(spec.sec as u64, spec.nsec as u32)
}
}
}
#[cfg(test)]
mod tests {
use super::HttpDate;
use time::{PrimitiveDateTime, date, time};
use time::Tm;
const NOV_07: HttpDate = HttpDate(Tm {
tm_nsec: 0,
tm_sec: 37,
tm_min: 48,
tm_hour: 8,
tm_mday: 7,
tm_mon: 10,
tm_year: 94,
tm_wday: 0,
tm_isdst: 0,
tm_yday: 0,
tm_utcoff: 0,
});
#[test]
fn test_date() {
let nov_07 = HttpDate(PrimitiveDateTime::new(
date!(1994-11-07),
time!(8:48:37)
).assume_utc());
assert_eq!(
"Sun, 07 Nov 1994 08:48:37 GMT".parse::<HttpDate>().unwrap(),
nov_07
NOV_07
);
assert_eq!(
"Sunday, 07-Nov-94 08:48:37 GMT"
.parse::<HttpDate>()
.unwrap(),
nov_07
NOV_07
);
assert_eq!(
"Sun Nov 7 08:48:37 1994".parse::<HttpDate>().unwrap(),
nov_07
NOV_07
);
assert!("this-is-no-date".parse::<HttpDate>().is_err());
}

View File

@ -1,5 +1,5 @@
//! Basic http primitives for actix-net framework.
#![deny(rust_2018_idioms, warnings)]
#![warn(rust_2018_idioms, warnings)]
#![allow(
clippy::type_complexity,
clippy::too_many_arguments,
@ -10,9 +10,6 @@
#[macro_use]
extern crate log;
#[macro_use]
mod macros;
pub mod body;
mod builder;
pub mod client;
@ -30,7 +27,6 @@ mod payload;
mod request;
mod response;
mod service;
mod time_parser;
pub mod cookie;
pub mod error;

View File

@ -1,95 +0,0 @@
#[macro_export]
macro_rules! downcast_get_type_id {
() => {
/// A helper method to get the type ID of the type
/// this trait is implemented on.
/// This method is unsafe to *implement*, since `downcast_ref` relies
/// on the returned `TypeId` to perform a cast.
///
/// Unfortunately, Rust has no notion of a trait method that is
/// unsafe to implement (marking it as `unsafe` makes it unsafe
/// to *call*). As a workaround, we require this method
/// to return a private type along with the `TypeId`. This
/// private type (`PrivateHelper`) has a private constructor,
/// making it impossible for safe code to construct outside of
/// this module. This ensures that safe code cannot violate
/// type-safety by implementing this method.
#[doc(hidden)]
fn __private_get_type_id__(&self) -> (std::any::TypeId, PrivateHelper)
where
Self: 'static,
{
(std::any::TypeId::of::<Self>(), PrivateHelper(()))
}
}
}
//Generate implementation for dyn $name
#[macro_export]
macro_rules! downcast {
($name:ident) => {
/// A struct with a private constructor, for use with
/// `__private_get_type_id__`. Its single field is private,
/// ensuring that it can only be constructed from this module
#[doc(hidden)]
pub struct PrivateHelper(());
impl dyn $name + 'static {
/// Downcasts generic body to a specific type.
pub fn downcast_ref<T: $name + 'static>(&self) -> Option<&T> {
if self.__private_get_type_id__().0 == std::any::TypeId::of::<T>() {
// Safety: external crates cannot override the default
// implementation of `__private_get_type_id__`, since
// it requires returning a private type. We can therefore
// rely on the returned `TypeId`, which ensures that this
// case is correct.
unsafe { Some(&*(self as *const dyn $name as *const T)) }
} else {
None
}
}
/// Downcasts a generic body to a mutable specific type.
pub fn downcast_mut<T: $name + 'static>(&mut self) -> Option<&mut T> {
if self.__private_get_type_id__().0 == std::any::TypeId::of::<T>() {
// Safety: external crates cannot override the default
// implementation of `__private_get_type_id__`, since
// it requires returning a private type. We can therefore
// rely on the returned `TypeId`, which ensures that this
// case is correct.
unsafe {
Some(&mut *(self as *const dyn $name as *const T as *mut T))
}
} else {
None
}
}
}
};
}
#[cfg(test)]
mod tests {
trait MB {
downcast_get_type_id!();
}
downcast!(MB);
impl MB for String {}
impl MB for () {}
#[actix_rt::test]
async fn test_any_casting() {
let mut body = String::from("hello cast");
let resp_body: &mut dyn MB = &mut body;
let body = resp_body.downcast_ref::<String>().unwrap();
assert_eq!(body, "hello cast");
let body = &mut resp_body.downcast_mut::<String>().unwrap();
body.push_str("!");
let body = resp_body.downcast_ref::<String>().unwrap();
assert_eq!(body, "hello cast!");
let not_body = resp_body.downcast_ref::<()>();
assert!(not_body.is_none());
}
}

View File

@ -1,42 +0,0 @@
use time::{OffsetDateTime, PrimitiveDateTime, Date};
/// Attempt to parse a `time` string as one of either RFC 1123, RFC 850, or asctime.
pub fn parse_http_date(time: &str) -> Option<PrimitiveDateTime> {
try_parse_rfc_1123(time)
.or_else(|| try_parse_rfc_850(time))
.or_else(|| try_parse_asctime(time))
}
/// Attempt to parse a `time` string as a RFC 1123 formatted date time string.
fn try_parse_rfc_1123(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a, %d %b %Y %H:%M:%S").ok()
}
/// Attempt to parse a `time` string as a RFC 850 formatted date time string.
fn try_parse_rfc_850(time: &str) -> Option<PrimitiveDateTime> {
match PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S") {
Ok(dt) => {
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
// we consider the year as part of this century if it's within the next 50 years,
// otherwise we consider as part of the previous century.
let now = OffsetDateTime::now();
let century_start_year = (now.year() / 100) * 100;
let mut expanded_year = century_start_year + dt.year();
if expanded_year > now.year() + 50 {
expanded_year -= 100;
}
match Date::try_from_ymd(expanded_year, dt.month(), dt.day()) {
Ok(date) => Some(PrimitiveDateTime::new(date, dt.time())),
Err(_) => None
}
}
Err(_) => None
}
}
/// Attempt to parse a `time` string using ANSI C's `asctime` format.
fn try_parse_asctime(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a %b %_d %H:%M:%S %Y").ok()
}

View File

@ -1,9 +1,5 @@
# Changes
## [Unreleased] - 2020-xx-xx
* Update the `time` dependency to 0.2.5
## [0.2.1] - 2020-01-10
* Fix panic with already borrowed: BorrowMutError #1263

View File

@ -21,9 +21,9 @@ actix-service = "1.0.2"
futures = "0.3.1"
serde = "1.0"
serde_json = "1.0"
time = { version = "0.2.5", default-features = false, features = ["std"] }
time = "0.1.42"
[dev-dependencies]
actix-rt = "1.0.0"
actix-http = "1.0.1"
bytes = "0.5.3"
bytes = "0.5.3"

View File

@ -428,14 +428,14 @@ impl CookieIdentityInner {
let now = SystemTime::now();
if let Some(visit_deadline) = self.visit_deadline {
if now.duration_since(value.visit_timestamp?).ok()?
> visit_deadline
> visit_deadline.to_std().ok()?
{
return None;
}
}
if let Some(login_deadline) = self.login_deadline {
if now.duration_since(value.login_timestamp?).ok()?
> login_deadline
> login_deadline.to_std().ok()?
{
return None;
}
@ -855,7 +855,7 @@ mod tests {
let cv: CookieValue = serde_json::from_str(cookie.value()).unwrap();
assert_eq!(cv.identity, identity);
let now = SystemTime::now();
let t30sec_ago = now - Duration::seconds(30);
let t30sec_ago = now - Duration::seconds(30).to_std().unwrap();
match login_timestamp {
LoginTimestampCheck::NoTimestamp => assert_eq!(cv.login_timestamp, None),
LoginTimestampCheck::NewTimestamp => assert!(
@ -997,7 +997,7 @@ mod tests {
create_identity_server(|c| c.login_deadline(Duration::days(90))).await;
let cookie = login_cookie(
COOKIE_LOGIN,
Some(SystemTime::now() - Duration::days(180)),
Some(SystemTime::now() - Duration::days(180).to_std().unwrap()),
None,
);
let mut resp = test::call_service(
@ -1023,7 +1023,7 @@ mod tests {
let cookie = login_cookie(
COOKIE_LOGIN,
None,
Some(SystemTime::now() - Duration::days(180)),
Some(SystemTime::now() - Duration::days(180).to_std().unwrap()),
);
let mut resp = test::call_service(
&mut srv,
@ -1065,7 +1065,7 @@ mod tests {
.login_deadline(Duration::days(90))
})
.await;
let timestamp = SystemTime::now() - Duration::days(1);
let timestamp = SystemTime::now() - Duration::days(1).to_std().unwrap();
let cookie = login_cookie(COOKIE_LOGIN, Some(timestamp), Some(timestamp));
let mut resp = test::call_service(
&mut srv,

View File

@ -1,9 +1,5 @@
# Changes
## [0.2.1] - 2020-01-xx
* Remove the unused `time` dependency
## [0.2.0] - 2019-12-20
* Release
@ -48,4 +44,4 @@
* Split multipart support to separate crate
* Optimize multipart handling #634, #769
* Optimize multipart handling #634, #769

View File

@ -25,8 +25,9 @@ httparse = "1.3"
futures = "0.3.1"
log = "0.4"
mime = "0.3"
time = "0.1"
twoway = "0.2"
[dev-dependencies]
actix-rt = "1.0.0"
actix-http = "1.0.0"
actix-http = "1.0.0"

View File

@ -1,10 +1,5 @@
# Changes
## [Unreleased] - 2020-01-xx
* Update the `time` dependency to 0.2.5
* [#1292](https://github.com/actix/actix-web/pull/1292) Long lasting auto-prolonged session
## [0.3.0] - 2019-12-20
* Release

View File

@ -29,7 +29,7 @@ derive_more = "0.99.2"
futures = "0.3.1"
serde = "1.0"
serde_json = "1.0"
time = { version = "0.2.5", default-features = false, features = ["std"] }
time = "0.1.42"
[dev-dependencies]
actix-rt = "1.0.0"

View File

@ -27,7 +27,6 @@ use actix_web::{Error, HttpMessage, ResponseError};
use derive_more::{Display, From};
use futures::future::{ok, FutureExt, LocalBoxFuture, Ready};
use serde_json::error::Error as JsonError;
use time::{Duration, OffsetDateTime};
use crate::{Session, SessionStatus};
@ -57,8 +56,7 @@ struct CookieSessionInner {
domain: Option<String>,
secure: bool,
http_only: bool,
max_age: Option<Duration>,
expires_in: Option<Duration>,
max_age: Option<time::Duration>,
same_site: Option<SameSite>,
}
@ -73,7 +71,6 @@ impl CookieSessionInner {
secure: true,
http_only: true,
max_age: None,
expires_in: None,
same_site: None,
}
}
@ -99,10 +96,6 @@ impl CookieSessionInner {
cookie.set_domain(domain.clone());
}
if let Some(expires_in) = self.expires_in {
cookie.set_expires(OffsetDateTime::now() + expires_in);
}
if let Some(max_age) = self.max_age {
cookie.set_max_age(max_age);
}
@ -130,8 +123,8 @@ impl CookieSessionInner {
fn remove_cookie<B>(&self, res: &mut ServiceResponse<B>) -> Result<(), Error> {
let mut cookie = Cookie::named(self.name.clone());
cookie.set_value("");
cookie.set_max_age(Duration::zero());
cookie.set_expires(OffsetDateTime::now() - Duration::days(365));
cookie.set_max_age(time::Duration::seconds(0));
cookie.set_expires(time::now() - time::Duration::days(365));
let val = HeaderValue::from_str(&cookie.to_string())?;
res.headers_mut().append(SET_COOKIE, val);
@ -270,7 +263,7 @@ impl CookieSession {
/// Sets the `max-age` field in the session cookie being built.
pub fn max_age(self, seconds: i64) -> CookieSession {
self.max_age_time(Duration::seconds(seconds))
self.max_age_time(time::Duration::seconds(seconds))
}
/// Sets the `max-age` field in the session cookie being built.
@ -278,17 +271,6 @@ impl CookieSession {
Rc::get_mut(&mut self.0).unwrap().max_age = Some(value);
self
}
/// Sets the `expires` field in the session cookie being built.
pub fn expires_in(self, seconds: i64) -> CookieSession {
self.expires_in_time(Duration::seconds(seconds))
}
/// Sets the `expires` field in the session cookie being built.
pub fn expires_in_time(mut self, value: Duration) -> CookieSession {
Rc::get_mut(&mut self.0).unwrap().expires_in = Some(value);
self
}
}
impl<S, B: 'static> Transform<S> for CookieSession
@ -341,7 +323,6 @@ where
fn call(&mut self, mut req: ServiceRequest) -> Self::Future {
let inner = self.inner.clone();
let (is_new, state) = self.inner.load(&req);
let prolong_expiration = self.inner.expires_in.is_some();
Session::set_session(state.into_iter(), &mut req);
let fut = self.service.call(req);
@ -353,9 +334,6 @@ where
| (SessionStatus::Renewed, Some(state)) => {
res.checked_expr(|res| inner.set_cookie(res, state))
}
(SessionStatus::Unchanged, Some(state)) if prolong_expiration => {
res.checked_expr(|res| inner.set_cookie(res, state))
}
(SessionStatus::Unchanged, _) =>
// set a new session cookie upon first request (new client)
{
@ -499,47 +477,4 @@ mod tests {
let body = test::read_response(&mut app, request).await;
assert_eq!(body, Bytes::from_static(b"counter: 100"));
}
#[actix_rt::test]
async fn prolong_expiration() {
let mut app = test::init_service(
App::new()
.wrap(CookieSession::signed(&[0; 32]).secure(false).expires_in(60))
.service(web::resource("/").to(|ses: Session| {
async move {
let _ = ses.set("counter", 100);
"test"
}
}))
.service(
web::resource("/test/")
.to(|| async move { "no-changes-in-session" }),
),
)
.await;
let request = test::TestRequest::get().to_request();
let response = app.call(request).await.unwrap();
let expires_1 = response
.response()
.cookies()
.find(|c| c.name() == "actix-session")
.expect("Cookie is set")
.expires()
.expect("Expiration is set");
actix_rt::time::delay_for(std::time::Duration::from_secs(1)).await;
let request = test::TestRequest::with_uri("/test/").to_request();
let response = app.call(request).await.unwrap();
let expires_2 = response
.response()
.cookies()
.find(|c| c.name() == "actix-session")
.expect("Cookie is set")
.expires()
.expect("Expiration is set");
assert!(expires_2 - expires_1 >= Duration::seconds(1));
}
}

View File

@ -1,13 +1,5 @@
# Changes
## [0.2.1] - 2020-02-25
* Add `#[allow(missing_docs)]` attribute to generated structs [#1368]
* Allow the handler function to be named as `config` [#1290]
[#1368]: https://github.com/actix/actix-web/issues/1368
[#1290]: https://github.com/actix/actix-web/issues/1290
## [0.2.0] - 2019-12-13
* Generate code for actix-web 2.0

View File

@ -1,6 +1,6 @@
[package]
name = "actix-web-codegen"
version = "0.2.1"
version = "0.2.0"
description = "Actix web proc macros"
readme = "README.md"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
@ -17,6 +17,6 @@ syn = { version = "^1", features = ["full", "parsing"] }
proc-macro2 = "^1"
[dev-dependencies]
actix-rt = "1.0.0"
actix-web = "2.0.0"
futures = "0.3.1"
actix-rt = { version = "1.0.0" }
actix-web = { version = "2.0.0-rc" }
futures = { version = "0.3.1" }

View File

@ -191,19 +191,19 @@ impl Route {
let extra_guards = &self.args.guards;
let resource_type = &self.resource_type;
let stream = quote! {
#[allow(non_camel_case_types, missing_docs)]
#[allow(non_camel_case_types)]
pub struct #name;
impl actix_web::dev::HttpServiceFactory for #name {
fn register(self, __config: &mut actix_web::dev::AppService) {
fn register(self, config: &mut actix_web::dev::AppService) {
#ast
let __resource = actix_web::Resource::new(#path)
let resource = actix_web::Resource::new(#path)
.name(#resource_name)
.guard(actix_web::guard::#guard())
#(.guard(actix_web::guard::fn_guard(#extra_guards)))*
.#resource_type(#name);
actix_web::dev::HttpServiceFactory::register(__resource, __config)
actix_web::dev::HttpServiceFactory::register(resource, config)
}
}
};

View File

@ -2,12 +2,6 @@ use actix_web::{http, test, web::Path, App, HttpResponse, Responder};
use actix_web_codegen::{connect, delete, get, head, options, patch, post, put, trace};
use futures::{future, Future};
// Make sure that we can name function as 'config'
#[get("/config")]
async fn config() -> impl Responder {
HttpResponse::Ok()
}
#[get("/test")]
async fn test_handler() -> impl Responder {
HttpResponse::Ok()

View File

@ -1,4 +1,4 @@
#![deny(rust_2018_idioms, warnings)]
#![warn(rust_2018_idioms, warnings)]
#![allow(
clippy::type_complexity,
clippy::borrow_interior_mutable_const,

View File

@ -11,7 +11,6 @@ use futures::future::ok;
use open_ssl::ssl::{SslAcceptor, SslFiletype, SslMethod, SslVerifyMode};
use rust_tls::ClientConfig;
#[allow(unused)]
fn ssl_acceptor() -> SslAcceptor {
// load ssl keys
let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();

View File

@ -1,64 +0,0 @@
use actix_web::{test, web, App, HttpResponse};
use awc::Client;
use criterion::{criterion_group, criterion_main, Criterion};
use futures::future::join_all;
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
// benchmark sending all requests at the same time
fn bench_async_burst(c: &mut Criterion) {
let srv = test::start(|| {
App::new()
.service(web::resource("/").route(web::to(|| HttpResponse::Ok().body(STR))))
});
// We are using System here, since Runtime requires preinitialized tokio
// Maybe add to actix_rt docs
let url = srv.url("/");
let mut rt = actix_rt::System::new("test");
c.bench_function("get_body_async_burst", move |b| {
b.iter_custom(|iters| {
let client = Client::new().get(url.clone()).freeze().unwrap();
let start = std::time::Instant::now();
// benchmark body
let resps = rt.block_on(async move {
let burst = (0..iters).map(|_| client.send());
join_all(burst).await
});
let elapsed = start.elapsed();
// if there are failed requests that might be an issue
let failed = resps.iter().filter(|r| r.is_err()).count();
if failed > 0 {
eprintln!("failed {} requests (might be bench timeout)", failed);
};
elapsed
})
});
}
criterion_group!(server_benches, bench_async_burst);
criterion_main!(server_benches);

View File

@ -1,108 +0,0 @@
use actix_service::Service;
use actix_web::dev::{ServiceRequest, ServiceResponse};
use actix_web::{web, App, Error, HttpResponse};
use criterion::{criterion_main, Criterion};
use std::cell::RefCell;
use std::rc::Rc;
use actix_web::test::{init_service, ok_service, TestRequest};
/// Criterion Benchmark for async Service
/// Should be used from within criterion group:
/// ```rust,ignore
/// let mut criterion: ::criterion::Criterion<_> =
/// ::criterion::Criterion::default().configure_from_args();
/// bench_async_service(&mut criterion, ok_service(), "async_service_direct");
/// ```
///
/// Usable for benching Service wrappers:
/// Using minimum service code implementation we first measure
/// time to run minimum service, then measure time with wrapper.
///
/// Sample output
/// async_service_direct time: [1.0908 us 1.1656 us 1.2613 us]
pub fn bench_async_service<S>(c: &mut Criterion, srv: S, name: &str)
where
S: Service<Request = ServiceRequest, Response = ServiceResponse, Error = Error>
+ 'static,
{
let mut rt = actix_rt::System::new("test");
let srv = Rc::new(RefCell::new(srv));
let req = TestRequest::default().to_srv_request();
assert!(rt
.block_on(srv.borrow_mut().call(req))
.unwrap()
.status()
.is_success());
// start benchmark loops
c.bench_function(name, move |b| {
b.iter_custom(|iters| {
let srv = srv.clone();
// exclude request generation, it appears it takes significant time vs call (3us vs 1us)
let reqs: Vec<_> = (0..iters)
.map(|_| TestRequest::default().to_srv_request())
.collect();
let start = std::time::Instant::now();
// benchmark body
rt.block_on(async move {
for req in reqs {
srv.borrow_mut().call(req).await.unwrap();
}
});
let elapsed = start.elapsed();
// check that at least first request succeeded
elapsed
})
});
}
async fn index(req: ServiceRequest) -> Result<ServiceResponse, Error> {
Ok(req.into_response(HttpResponse::Ok().finish()))
}
// Benchmark basic WebService directly
// this approach is usable for benching WebService, though it adds some time to direct service call:
// Sample results on MacBook Pro '14
// time: [2.0724 us 2.1345 us 2.2074 us]
fn async_web_service(c: &mut Criterion) {
let mut rt = actix_rt::System::new("test");
let srv = Rc::new(RefCell::new(rt.block_on(init_service(
App::new().service(web::service("/").finish(index)),
))));
let req = TestRequest::get().uri("/").to_request();
assert!(rt
.block_on(srv.borrow_mut().call(req))
.unwrap()
.status()
.is_success());
// start benchmark loops
c.bench_function("async_web_service_direct", move |b| {
b.iter_custom(|iters| {
let srv = srv.clone();
let reqs = (0..iters).map(|_| TestRequest::get().uri("/").to_request());
let start = std::time::Instant::now();
// benchmark body
rt.block_on(async move {
for req in reqs {
srv.borrow_mut().call(req).await.unwrap();
}
});
let elapsed = start.elapsed();
// check that at least first request succeeded
elapsed
})
});
}
pub fn service_benches() {
let mut criterion: ::criterion::Criterion<_> =
::criterion::Criterion::default().configure_from_args();
bench_async_service(&mut criterion, ok_service(), "async_service_direct");
async_web_service(&mut criterion);
}
criterion_main!(service_benches);

View File

@ -1,5 +0,0 @@
ignore: # ignore codecoverage on following paths
- "**/tests"
- "test-server"
- "**/benches"
- "**/examples"

View File

@ -1,6 +1,6 @@
use actix_web::{get, web, HttpRequest};
#[cfg(unix)]
use actix_web::{middleware, App, Error, HttpResponse, HttpServer};
use actix_web::{
get, middleware, web, App, Error, HttpRequest, HttpResponse, HttpServer,
};
#[get("/resource1/{name}/index.html")]
async fn index(req: HttpRequest, name: web::Path<String>) -> String {
@ -8,7 +8,6 @@ async fn index(req: HttpRequest, name: web::Path<String>) -> String {
format!("Hello: {}!\r\n", name)
}
#[cfg(unix)]
async fn index_async(req: HttpRequest) -> Result<&'static str, Error> {
println!("REQ: {:?}", req);
Ok("Hello world!\r\n")

View File

@ -193,83 +193,57 @@ impl FromRequest for () {
macro_rules! tuple_from_req ({$fut_type:ident, $(($n:tt, $T:ident)),+} => {
// This module is a trick to get around the inability of
// `macro_rules!` macros to make new idents. We want to make
// a new `FutWrapper` struct for each distinct invocation of
// this macro. Ideally, we would name it something like
// `FutWrapper_$fut_type`, but this can't be done in a macro_rules
// macro.
//
// Instead, we put everything in a module named `$fut_type`, thus allowing
// us to use the name `FutWrapper` without worrying about conflicts.
// This macro only exists to generate trait impls for tuples - these
// are inherently global, so users don't have to care about this
// weird trick.
#[allow(non_snake_case)]
mod $fut_type {
/// FromRequest implementation for tuple
#[doc(hidden)]
#[allow(unused_parens)]
impl<$($T: FromRequest + 'static),+> FromRequest for ($($T,)+)
{
type Error = Error;
type Future = $fut_type<$($T),+>;
type Config = ($($T::Config),+);
// Bring everything into scope, so we don't need
// redundant imports
use super::*;
/// A helper struct to allow us to pin-project through
/// to individual fields
#[pin_project::pin_project]
struct FutWrapper<$($T: FromRequest),+>($(#[pin] $T::Future),+);
/// FromRequest implementation for tuple
#[doc(hidden)]
#[allow(unused_parens)]
impl<$($T: FromRequest + 'static),+> FromRequest for ($($T,)+)
{
type Error = Error;
type Future = $fut_type<$($T),+>;
type Config = ($($T::Config),+);
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
$fut_type {
items: <($(Option<$T>,)+)>::default(),
futs: FutWrapper($($T::from_request(req, payload),)+),
}
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
$fut_type {
items: <($(Option<$T>,)+)>::default(),
futs: ($($T::from_request(req, payload),)+),
}
}
}
#[doc(hidden)]
#[pin_project::pin_project]
pub struct $fut_type<$($T: FromRequest),+> {
items: ($(Option<$T>,)+),
#[pin]
futs: FutWrapper<$($T,)+>,
}
#[doc(hidden)]
#[pin_project::pin_project]
pub struct $fut_type<$($T: FromRequest),+> {
items: ($(Option<$T>,)+),
futs: ($($T::Future,)+),
}
impl<$($T: FromRequest),+> Future for $fut_type<$($T),+>
{
type Output = Result<($($T,)+), Error>;
impl<$($T: FromRequest),+> Future for $fut_type<$($T),+>
{
type Output = Result<($($T,)+), Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = self.project();
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
let mut ready = true;
$(
if this.items.$n.is_none() {
match this.futs.as_mut().project().$n.poll(cx) {
Poll::Ready(Ok(item)) => {
this.items.$n = Some(item);
}
Poll::Pending => ready = false,
Poll::Ready(Err(e)) => return Poll::Ready(Err(e.into())),
let mut ready = true;
$(
if this.items.$n.is_none() {
match unsafe { Pin::new_unchecked(&mut this.futs.$n) }.poll(cx) {
Poll::Ready(Ok(item)) => {
this.items.$n = Some(item);
}
Poll::Pending => ready = false,
Poll::Ready(Err(e)) => return Poll::Ready(Err(e.into())),
}
)+
}
)+
if ready {
Poll::Ready(Ok(
($(this.items.$n.take().unwrap(),)+)
))
} else {
Poll::Pending
}
}
if ready {
Poll::Ready(Ok(
($(this.items.$n.take().unwrap(),)+)
))
} else {
Poll::Pending
}
}
}
});

View File

@ -1,4 +1,4 @@
#![deny(rust_2018_idioms, warnings)]
#![warn(rust_2018_idioms, warnings)]
#![allow(
clippy::needless_doctest_main,
clippy::type_complexity,
@ -7,12 +7,6 @@
//! Actix web is a small, pragmatic, and extremely fast web framework
//! for Rust.
//!
//! ## Example
//!
//! The `#[actix_rt::main]` macro in the example below is provided by the Actix runtime
//! crate, [`actix-rt`](https://crates.io/crates/actix-rt). You will need to include
//! `actix-rt` in your dependencies for it to run.
//!
//! ```rust,no_run
//! use actix_web::{web, App, Responder, HttpServer};
//!

View File

@ -14,7 +14,7 @@ use bytes::Bytes;
use futures::future::{ok, Ready};
use log::debug;
use regex::Regex;
use time::OffsetDateTime;
use time;
use crate::dev::{BodySize, MessageBody, ResponseBody};
use crate::error::{Error, Result};
@ -163,11 +163,11 @@ where
LoggerResponse {
fut: self.service.call(req),
format: None,
time: OffsetDateTime::now(),
time: time::now(),
_t: PhantomData,
}
} else {
let now = OffsetDateTime::now();
let now = time::now();
let mut format = self.inner.format.clone();
for unit in &mut format.0 {
@ -192,7 +192,7 @@ where
{
#[pin]
fut: S::Future,
time: OffsetDateTime,
time: time::Tm,
format: Option<Format>,
_t: PhantomData<(B,)>,
}
@ -242,7 +242,7 @@ pub struct StreamLog<B> {
body: ResponseBody<B>,
format: Option<Format>,
size: usize,
time: OffsetDateTime,
time: time::Tm,
}
impl<B> Drop for StreamLog<B> {
@ -366,20 +366,20 @@ impl FormatText {
&self,
fmt: &mut Formatter<'_>,
size: usize,
entry_time: OffsetDateTime,
entry_time: time::Tm,
) -> Result<(), fmt::Error> {
match *self {
FormatText::Str(ref string) => fmt.write_str(string),
FormatText::Percent => "%".fmt(fmt),
FormatText::ResponseSize => size.fmt(fmt),
FormatText::Time => {
let rt = OffsetDateTime::now() - entry_time;
let rt = rt.as_seconds_f64();
let rt = time::now() - entry_time;
let rt = (rt.num_nanoseconds().unwrap_or(0) as f64) / 1_000_000_000.0;
fmt.write_fmt(format_args!("{:.6}", rt))
}
FormatText::TimeMillis => {
let rt = OffsetDateTime::now() - entry_time;
let rt = (rt.whole_nanoseconds() as f64) / 1_000_000.0;
let rt = time::now() - entry_time;
let rt = (rt.num_nanoseconds().unwrap_or(0) as f64) / 1_000_000.0;
fmt.write_fmt(format_args!("{:.6}", rt))
}
FormatText::EnvironHeader(ref name) => {
@ -414,7 +414,7 @@ impl FormatText {
}
}
fn render_request(&mut self, now: OffsetDateTime, req: &ServiceRequest) {
fn render_request(&mut self, now: time::Tm, req: &ServiceRequest) {
match *self {
FormatText::RequestLine => {
*self = if req.query_string().is_empty() {
@ -436,7 +436,7 @@ impl FormatText {
}
FormatText::UrlPath => *self = FormatText::Str(req.path().to_string()),
FormatText::RequestTime => {
*self = FormatText::Str(now.format("%Y-%m-%dT%H:%M:%S"))
*self = FormatText::Str(now.rfc3339().to_string())
}
FormatText::RequestHeader(ref name) => {
let s = if let Some(val) = req.headers().get(name) {
@ -513,7 +513,7 @@ mod tests {
.uri("/test/route/yeah")
.to_srv_request();
let now = OffsetDateTime::now();
let now = time::now();
for unit in &mut format.0 {
unit.render_request(now, &req);
}
@ -544,7 +544,7 @@ mod tests {
)
.to_srv_request();
let now = OffsetDateTime::now();
let now = time::now();
for unit in &mut format.0 {
unit.render_request(now, &req);
}
@ -554,7 +554,7 @@ mod tests {
unit.render_response(&resp);
}
let entry_time = OffsetDateTime::now();
let entry_time = time::now();
let render = |fmt: &mut Formatter<'_>| {
for unit in &format.0 {
unit.render(fmt, 1024, entry_time)?;
@ -572,7 +572,7 @@ mod tests {
let mut format = Format::new("%t");
let req = TestRequest::default().to_srv_request();
let now = OffsetDateTime::now();
let now = time::now();
for unit in &mut format.0 {
unit.render_request(now, &req);
}
@ -589,6 +589,6 @@ mod tests {
Ok(())
};
let s = format!("{}", FormatDisplay(&render));
assert!(s.contains(&format!("{}", now.format("%Y-%m-%dT%H:%M:%S"))));
assert!(s.contains(&format!("{}", now.rfc3339())));
}
}

View File

@ -443,6 +443,8 @@ where
#[cfg(unix)]
/// Start listening for unix domain connections on existing listener.
///
/// This method is available with `uds` feature.
pub fn listen_uds(
mut self,
lst: std::os::unix::net::UnixListener,
@ -481,6 +483,8 @@ where
#[cfg(unix)]
/// Start listening for incoming unix domain connections.
///
/// This method is available with `uds` feature.
pub fn bind_uds<A>(mut self, addr: A) -> io::Result<Self>
where
A: AsRef<std::path::Path>,

View File

@ -95,10 +95,11 @@ where
/// Calls service and waits for response future completion.
///
/// ```rust
/// use actix_web::{test, web, App, HttpResponse, http::StatusCode};
/// use actix_web::{test, App, HttpResponse, http::StatusCode};
/// use actix_service::Service;
///
/// #[actix_rt::test]
/// async fn test_response() {
/// #[test]
/// fn test_response() {
/// let mut app = test::init_service(
/// App::new()
/// .service(web::resource("/test").to(|| async {

View File

@ -96,7 +96,7 @@ pub fn route() -> Route {
/// );
/// ```
///
/// In the above example, one `GET` route gets added:
/// In the above example, one `GET` route get added:
/// * /{project_id}
///
pub fn get() -> Route {
@ -114,7 +114,7 @@ pub fn get() -> Route {
/// );
/// ```
///
/// In the above example, one `POST` route gets added:
/// In the above example, one `POST` route get added:
/// * /{project_id}
///
pub fn post() -> Route {
@ -132,7 +132,7 @@ pub fn post() -> Route {
/// );
/// ```
///
/// In the above example, one `PUT` route gets added:
/// In the above example, one `PUT` route get added:
/// * /{project_id}
///
pub fn put() -> Route {
@ -150,7 +150,7 @@ pub fn put() -> Route {
/// );
/// ```
///
/// In the above example, one `PATCH` route gets added:
/// In the above example, one `PATCH` route get added:
/// * /{project_id}
///
pub fn patch() -> Route {
@ -168,7 +168,7 @@ pub fn patch() -> Route {
/// );
/// ```
///
/// In the above example, one `DELETE` route gets added:
/// In the above example, one `DELETE` route get added:
/// * /{project_id}
///
pub fn delete() -> Route {
@ -186,7 +186,7 @@ pub fn delete() -> Route {
/// );
/// ```
///
/// In the above example, one `HEAD` route gets added:
/// In the above example, one `HEAD` route get added:
/// * /{project_id}
///
pub fn head() -> Route {
@ -204,7 +204,7 @@ pub fn head() -> Route {
/// );
/// ```
///
/// In the above example, one `GET` route gets added:
/// In the above example, one `GET` route get added:
/// * /{project_id}
///
pub fn method(method: Method) -> Route {

View File

@ -1,10 +1,5 @@
# Changes
## [Unreleased] - 2020-xx-xx
* Update the `time` dependency to 0.2.7
## [1.0.0] - 2019-12-13
### Changed

View File

@ -51,7 +51,7 @@ serde_json = "1.0"
sha1 = "0.6"
slab = "0.4"
serde_urlencoded = "0.6.1"
time = { version = "0.2.7", default-features = false, features = ["std"] }
time = "0.1"
open-ssl = { version="0.10", package="openssl", optional = true }
[dev-dependencies]

View File

@ -1,26 +0,0 @@
// Regression test for #/1321
use futures::task::{noop_waker, Context};
use futures::stream::once;
use actix_http::body::{MessageBody, BodyStream};
use bytes::Bytes;
#[test]
fn weird_poll() {
let (sender, receiver) = futures::channel::oneshot::channel();
let mut body_stream = Ok(BodyStream::new(once(async {
let x = Box::new(0);
let y = &x;
receiver.await.unwrap();
let _z = **y;
Ok::<_, ()>(Bytes::new())
})));
let waker = noop_waker();
let mut context = Context::from_waker(&waker);
let _ = body_stream.as_mut().unwrap().poll_next(&mut context);
sender.send(()).unwrap();
let _ = std::mem::replace(&mut body_stream, Err([0; 32])).unwrap().poll_next(&mut context);
}