mirror of
https://github.com/fafhrd91/actix-web
synced 2025-02-20 03:14:21 +01:00
Select compression algorithm using features flags (#2250)
Add compress-* feature flags in actix-http / actix-web / awc. This allow enable / disable not wanted compression algorithm.
This commit is contained in:
parent
c260fb1c48
commit
baa5a663c4
@ -2,6 +2,11 @@
|
|||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2021-xx-xx
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
* Change compression algorithm features flags. [#2250]
|
||||||
|
|
||||||
|
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||||
|
|
||||||
## 4.0.0-beta.7 - 2021-06-17
|
## 4.0.0-beta.7 - 2021-06-17
|
||||||
### Added
|
### Added
|
||||||
|
23
Cargo.toml
23
Cargo.toml
@ -17,7 +17,7 @@ edition = "2018"
|
|||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
# features that docs.rs will build with
|
# features that docs.rs will build with
|
||||||
features = ["openssl", "rustls", "compress", "cookies", "secure-cookies"]
|
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "actix_web"
|
name = "actix_web"
|
||||||
@ -39,10 +39,14 @@ members = [
|
|||||||
# resolver = "2"
|
# resolver = "2"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["compress", "cookies"]
|
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||||
|
|
||||||
# content-encoding support
|
# Brotli algorithm content-encoding support
|
||||||
compress = ["actix-http/compress"]
|
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
||||||
|
# Gzip and deflate algorithms content-encoding support
|
||||||
|
compress-gzip = ["actix-http/compress-gzip", "__compress"]
|
||||||
|
# Zstd algorithm content-encoding support
|
||||||
|
compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
||||||
|
|
||||||
# support for cookies
|
# support for cookies
|
||||||
cookies = ["cookie"]
|
cookies = ["cookie"]
|
||||||
@ -56,6 +60,10 @@ openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
|||||||
# rustls
|
# rustls
|
||||||
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
||||||
|
|
||||||
|
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||||
|
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||||
|
__compress = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-codec = "0.4.0"
|
actix-codec = "0.4.0"
|
||||||
actix-macros = "0.2.1"
|
actix-macros = "0.2.1"
|
||||||
@ -71,6 +79,7 @@ actix-http = "3.0.0-beta.7"
|
|||||||
|
|
||||||
ahash = "0.7"
|
ahash = "0.7"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
|
cfg-if = "1"
|
||||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
||||||
derive_more = "0.99.5"
|
derive_more = "0.99.5"
|
||||||
either = "1.5.3"
|
either = "1.5.3"
|
||||||
@ -126,15 +135,15 @@ awc = { path = "awc" }
|
|||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "test_server"
|
name = "test_server"
|
||||||
required-features = ["compress", "cookies"]
|
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "basic"
|
name = "basic"
|
||||||
required-features = ["compress"]
|
required-features = ["compress-gzip"]
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "uds"
|
name = "uds"
|
||||||
required-features = ["compress"]
|
required-features = ["compress-gzip"]
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "on_connect"
|
name = "on_connect"
|
||||||
|
12
MIGRATION.md
12
MIGRATION.md
@ -10,6 +10,18 @@
|
|||||||
|
|
||||||
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
|
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
|
||||||
|
|
||||||
|
* Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
|
||||||
|
By default all compression algorithms are enabled.
|
||||||
|
To select algorithm you want to include with `middleware::Compress` use following flags:
|
||||||
|
- `compress-brotli`
|
||||||
|
- `compress-gzip`
|
||||||
|
- `compress-zstd`
|
||||||
|
If you have set in your `Cargo.toml` dedicated `actix-web` features and you still want
|
||||||
|
to have compression enabled. Please change features selection like bellow:
|
||||||
|
|
||||||
|
Before: `"compress"`
|
||||||
|
After: `"compress-brotli", "compress-gzip", "compress-zstd"`
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0
|
## 3.0.0
|
||||||
|
|
||||||
|
@ -2,6 +2,11 @@
|
|||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2021-xx-xx
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
* Change compression algorithm features flags. [#2250]
|
||||||
|
|
||||||
|
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||||
|
|
||||||
## 3.0.0-beta.7 - 2021-06-17
|
## 3.0.0-beta.7 - 2021-06-17
|
||||||
### Added
|
### Added
|
||||||
|
@ -16,7 +16,7 @@ edition = "2018"
|
|||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
# features that docs.rs will build with
|
# features that docs.rs will build with
|
||||||
features = ["openssl", "rustls", "compress"]
|
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "actix_http"
|
name = "actix_http"
|
||||||
@ -32,11 +32,17 @@ openssl = ["actix-tls/openssl"]
|
|||||||
rustls = ["actix-tls/rustls"]
|
rustls = ["actix-tls/rustls"]
|
||||||
|
|
||||||
# enable compression support
|
# enable compression support
|
||||||
compress = ["flate2", "brotli2", "zstd"]
|
compress-brotli = ["brotli2", "__compress"]
|
||||||
|
compress-gzip = ["flate2", "__compress"]
|
||||||
|
compress-zstd = ["zstd", "__compress"]
|
||||||
|
|
||||||
# trust-dns as client dns resolver
|
# trust-dns as client dns resolver
|
||||||
trust-dns = ["trust-dns-resolver"]
|
trust-dns = ["trust-dns-resolver"]
|
||||||
|
|
||||||
|
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||||
|
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||||
|
__compress = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-service = "2.0.0"
|
actix-service = "2.0.0"
|
||||||
actix-codec = "0.4.0"
|
actix-codec = "0.4.0"
|
||||||
|
@ -8,10 +8,16 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||||
use brotli2::write::BrotliDecoder;
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use flate2::write::{GzDecoder, ZlibDecoder};
|
|
||||||
use futures_core::{ready, Stream};
|
use futures_core::{ready, Stream};
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
|
use brotli2::write::BrotliDecoder;
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
|
use flate2::write::{GzDecoder, ZlibDecoder};
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
use zstd::stream::write::Decoder as ZstdDecoder;
|
use zstd::stream::write::Decoder as ZstdDecoder;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -37,15 +43,19 @@ where
|
|||||||
#[inline]
|
#[inline]
|
||||||
pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> {
|
pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> {
|
||||||
let decoder = match encoding {
|
let decoder = match encoding {
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoding::Br => Some(ContentDecoder::Br(Box::new(
|
ContentEncoding::Br => Some(ContentDecoder::Br(Box::new(
|
||||||
BrotliDecoder::new(Writer::new()),
|
BrotliDecoder::new(Writer::new()),
|
||||||
))),
|
))),
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
|
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
|
||||||
ZlibDecoder::new(Writer::new()),
|
ZlibDecoder::new(Writer::new()),
|
||||||
))),
|
))),
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(
|
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(
|
||||||
GzDecoder::new(Writer::new()),
|
GzDecoder::new(Writer::new()),
|
||||||
))),
|
))),
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
|
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
|
||||||
ZstdDecoder::new(Writer::new()).expect(
|
ZstdDecoder::new(Writer::new()).expect(
|
||||||
"Failed to create zstd decoder. This is a bug. \
|
"Failed to create zstd decoder. This is a bug. \
|
||||||
@ -148,17 +158,22 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
enum ContentDecoder {
|
enum ContentDecoder {
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
Deflate(Box<ZlibDecoder<Writer>>),
|
Deflate(Box<ZlibDecoder<Writer>>),
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
Gzip(Box<GzDecoder<Writer>>),
|
Gzip(Box<GzDecoder<Writer>>),
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
Br(Box<BrotliDecoder<Writer>>),
|
Br(Box<BrotliDecoder<Writer>>),
|
||||||
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
|
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
|
||||||
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
Zstd(Box<ZstdDecoder<'static, Writer>>),
|
Zstd(Box<ZstdDecoder<'static, Writer>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ContentDecoder {
|
impl ContentDecoder {
|
||||||
fn feed_eof(&mut self) -> io::Result<Option<Bytes>> {
|
fn feed_eof(&mut self) -> io::Result<Option<Bytes>> {
|
||||||
match self {
|
match self {
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentDecoder::Br(ref mut decoder) => match decoder.flush() {
|
ContentDecoder::Br(ref mut decoder) => match decoder.flush() {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
let b = decoder.get_mut().take();
|
let b = decoder.get_mut().take();
|
||||||
@ -172,6 +187,7 @@ impl ContentDecoder {
|
|||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentDecoder::Gzip(ref mut decoder) => match decoder.try_finish() {
|
ContentDecoder::Gzip(ref mut decoder) => match decoder.try_finish() {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
let b = decoder.get_mut().take();
|
let b = decoder.get_mut().take();
|
||||||
@ -185,6 +201,7 @@ impl ContentDecoder {
|
|||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentDecoder::Deflate(ref mut decoder) => match decoder.try_finish() {
|
ContentDecoder::Deflate(ref mut decoder) => match decoder.try_finish() {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
let b = decoder.get_mut().take();
|
let b = decoder.get_mut().take();
|
||||||
@ -197,6 +214,7 @@ impl ContentDecoder {
|
|||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentDecoder::Zstd(ref mut decoder) => match decoder.flush() {
|
ContentDecoder::Zstd(ref mut decoder) => match decoder.flush() {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
let b = decoder.get_mut().take();
|
let b = decoder.get_mut().take();
|
||||||
@ -213,6 +231,7 @@ impl ContentDecoder {
|
|||||||
|
|
||||||
fn feed_data(&mut self, data: Bytes) -> io::Result<Option<Bytes>> {
|
fn feed_data(&mut self, data: Bytes) -> io::Result<Option<Bytes>> {
|
||||||
match self {
|
match self {
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentDecoder::Br(ref mut decoder) => match decoder.write_all(&data) {
|
ContentDecoder::Br(ref mut decoder) => match decoder.write_all(&data) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
decoder.flush()?;
|
decoder.flush()?;
|
||||||
@ -227,6 +246,7 @@ impl ContentDecoder {
|
|||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentDecoder::Gzip(ref mut decoder) => match decoder.write_all(&data) {
|
ContentDecoder::Gzip(ref mut decoder) => match decoder.write_all(&data) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
decoder.flush()?;
|
decoder.flush()?;
|
||||||
@ -241,6 +261,7 @@ impl ContentDecoder {
|
|||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentDecoder::Deflate(ref mut decoder) => match decoder.write_all(&data) {
|
ContentDecoder::Deflate(ref mut decoder) => match decoder.write_all(&data) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
decoder.flush()?;
|
decoder.flush()?;
|
||||||
@ -255,6 +276,7 @@ impl ContentDecoder {
|
|||||||
Err(e) => Err(e),
|
Err(e) => Err(e),
|
||||||
},
|
},
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentDecoder::Zstd(ref mut decoder) => match decoder.write_all(&data) {
|
ContentDecoder::Zstd(ref mut decoder) => match decoder.write_all(&data) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
decoder.flush()?;
|
decoder.flush()?;
|
||||||
|
@ -9,12 +9,18 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||||
use brotli2::write::BrotliEncoder;
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
use pin_project::pin_project;
|
use pin_project::pin_project;
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
|
use brotli2::write::BrotliEncoder;
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
|
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -233,28 +239,36 @@ fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
enum ContentEncoder {
|
enum ContentEncoder {
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
Deflate(ZlibEncoder<Writer>),
|
Deflate(ZlibEncoder<Writer>),
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
Gzip(GzEncoder<Writer>),
|
Gzip(GzEncoder<Writer>),
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
Br(BrotliEncoder<Writer>),
|
Br(BrotliEncoder<Writer>),
|
||||||
// We need explicit 'static lifetime here because ZstdEncoder need lifetime
|
// We need explicit 'static lifetime here because ZstdEncoder need lifetime
|
||||||
// argument, and we use `spawn_blocking` in `Encoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
// argument, and we use `spawn_blocking` in `Encoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
Zstd(ZstdEncoder<'static, Writer>),
|
Zstd(ZstdEncoder<'static, Writer>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ContentEncoder {
|
impl ContentEncoder {
|
||||||
fn encoder(encoding: ContentEncoding) -> Option<Self> {
|
fn encoder(encoding: ContentEncoding) -> Option<Self> {
|
||||||
match encoding {
|
match encoding {
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
|
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
|
||||||
Writer::new(),
|
Writer::new(),
|
||||||
flate2::Compression::fast(),
|
flate2::Compression::fast(),
|
||||||
))),
|
))),
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Gzip => Some(ContentEncoder::Gzip(GzEncoder::new(
|
ContentEncoding::Gzip => Some(ContentEncoder::Gzip(GzEncoder::new(
|
||||||
Writer::new(),
|
Writer::new(),
|
||||||
flate2::Compression::fast(),
|
flate2::Compression::fast(),
|
||||||
))),
|
))),
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoding::Br => {
|
ContentEncoding::Br => {
|
||||||
Some(ContentEncoder::Br(BrotliEncoder::new(Writer::new(), 3)))
|
Some(ContentEncoder::Br(BrotliEncoder::new(Writer::new(), 3)))
|
||||||
}
|
}
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentEncoding::Zstd => {
|
ContentEncoding::Zstd => {
|
||||||
let encoder = ZstdEncoder::new(Writer::new(), 3).ok()?;
|
let encoder = ZstdEncoder::new(Writer::new(), 3).ok()?;
|
||||||
Some(ContentEncoder::Zstd(encoder))
|
Some(ContentEncoder::Zstd(encoder))
|
||||||
@ -266,27 +280,35 @@ impl ContentEncoder {
|
|||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn take(&mut self) -> Bytes {
|
pub(crate) fn take(&mut self) -> Bytes {
|
||||||
match *self {
|
match *self {
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoder::Br(ref mut encoder) => encoder.get_mut().take(),
|
ContentEncoder::Br(ref mut encoder) => encoder.get_mut().take(),
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoder::Deflate(ref mut encoder) => encoder.get_mut().take(),
|
ContentEncoder::Deflate(ref mut encoder) => encoder.get_mut().take(),
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoder::Gzip(ref mut encoder) => encoder.get_mut().take(),
|
ContentEncoder::Gzip(ref mut encoder) => encoder.get_mut().take(),
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentEncoder::Zstd(ref mut encoder) => encoder.get_mut().take(),
|
ContentEncoder::Zstd(ref mut encoder) => encoder.get_mut().take(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(self) -> Result<Bytes, io::Error> {
|
fn finish(self) -> Result<Bytes, io::Error> {
|
||||||
match self {
|
match self {
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoder::Br(encoder) => match encoder.finish() {
|
ContentEncoder::Br(encoder) => match encoder.finish() {
|
||||||
Ok(writer) => Ok(writer.buf.freeze()),
|
Ok(writer) => Ok(writer.buf.freeze()),
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoder::Gzip(encoder) => match encoder.finish() {
|
ContentEncoder::Gzip(encoder) => match encoder.finish() {
|
||||||
Ok(writer) => Ok(writer.buf.freeze()),
|
Ok(writer) => Ok(writer.buf.freeze()),
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoder::Deflate(encoder) => match encoder.finish() {
|
ContentEncoder::Deflate(encoder) => match encoder.finish() {
|
||||||
Ok(writer) => Ok(writer.buf.freeze()),
|
Ok(writer) => Ok(writer.buf.freeze()),
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentEncoder::Zstd(encoder) => match encoder.finish() {
|
ContentEncoder::Zstd(encoder) => match encoder.finish() {
|
||||||
Ok(writer) => Ok(writer.buf.freeze()),
|
Ok(writer) => Ok(writer.buf.freeze()),
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
@ -296,6 +318,7 @@ impl ContentEncoder {
|
|||||||
|
|
||||||
fn write(&mut self, data: &[u8]) -> Result<(), io::Error> {
|
fn write(&mut self, data: &[u8]) -> Result<(), io::Error> {
|
||||||
match *self {
|
match *self {
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoder::Br(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Br(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@ -303,6 +326,7 @@ impl ContentEncoder {
|
|||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Gzip(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@ -310,6 +334,7 @@ impl ContentEncoder {
|
|||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Deflate(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@ -317,6 +342,7 @@ impl ContentEncoder {
|
|||||||
Err(err)
|
Err(err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Zstd(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
|
@ -2,10 +2,12 @@
|
|||||||
//!
|
//!
|
||||||
//! ## Crate Features
|
//! ## Crate Features
|
||||||
//! | Feature | Functionality |
|
//! | Feature | Functionality |
|
||||||
//! | ---------------- | ----------------------------------------------------- |
|
//! | ------------------- | ------------------------------------------- |
|
||||||
//! | `openssl` | TLS support via [OpenSSL]. |
|
//! | `openssl` | TLS support via [OpenSSL]. |
|
||||||
//! | `rustls` | TLS support via [rustls]. |
|
//! | `rustls` | TLS support via [rustls]. |
|
||||||
//! | `compress` | Payload compression support. (Deflate, Gzip & Brotli) |
|
//! | `compress-brotli` | Payload compression support: Brotli. |
|
||||||
|
//! | `compress-gzip` | Payload compression support: Deflate, Gzip. |
|
||||||
|
//! | `compress-zstd` | Payload compression support: Zstd. |
|
||||||
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
|
//! | `trust-dns` | Use [trust-dns] as the client DNS resolver. |
|
||||||
//!
|
//!
|
||||||
//! [OpenSSL]: https://crates.io/crates/openssl
|
//! [OpenSSL]: https://crates.io/crates/openssl
|
||||||
@ -32,7 +34,8 @@ pub mod body;
|
|||||||
mod builder;
|
mod builder;
|
||||||
pub mod client;
|
pub mod client;
|
||||||
mod config;
|
mod config;
|
||||||
#[cfg(feature = "compress")]
|
|
||||||
|
#[cfg(feature = "__compress")]
|
||||||
pub mod encoding;
|
pub mod encoding;
|
||||||
mod extensions;
|
mod extensions;
|
||||||
pub mod header;
|
pub mod header;
|
||||||
|
@ -2,11 +2,15 @@
|
|||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2021-xx-xx
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
* Change compression algorithm features flags. [#2250]
|
||||||
|
|
||||||
|
[#2250]: https://github.com/actix/actix-web/pull/2250
|
||||||
|
|
||||||
## 3.0.0-beta.6 - 2021-06-17
|
## 3.0.0-beta.6 - 2021-06-17
|
||||||
* No significant changes since 3.0.0-beta.5.
|
* No significant changes since 3.0.0-beta.5.
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0-beta.5 - 2021-04-17
|
## 3.0.0-beta.5 - 2021-04-17
|
||||||
### Removed
|
### Removed
|
||||||
* Deprecated methods on `ClientRequest`: `if_true`, `if_some`. [#2148]
|
* Deprecated methods on `ClientRequest`: `if_true`, `if_some`. [#2148]
|
||||||
|
@ -24,10 +24,10 @@ path = "src/lib.rs"
|
|||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
# features that docs.rs will build with
|
# features that docs.rs will build with
|
||||||
features = ["openssl", "rustls", "compress", "cookies"]
|
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["compress", "cookies"]
|
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
||||||
|
|
||||||
# openssl
|
# openssl
|
||||||
openssl = ["tls-openssl", "actix-http/openssl"]
|
openssl = ["tls-openssl", "actix-http/openssl"]
|
||||||
@ -35,8 +35,12 @@ openssl = ["tls-openssl", "actix-http/openssl"]
|
|||||||
# rustls
|
# rustls
|
||||||
rustls = ["tls-rustls", "actix-http/rustls"]
|
rustls = ["tls-rustls", "actix-http/rustls"]
|
||||||
|
|
||||||
# content-encoding support
|
# Brotli algorithm content-encoding support
|
||||||
compress = ["actix-http/compress"]
|
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
||||||
|
# Gzip and deflate algorithms content-encoding support
|
||||||
|
compress-gzip = ["actix-http/compress-gzip", "__compress"]
|
||||||
|
# Zstd algorithm content-encoding support
|
||||||
|
compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
||||||
|
|
||||||
# cookie parsing and cookie jar
|
# cookie parsing and cookie jar
|
||||||
cookies = ["cookie"]
|
cookies = ["cookie"]
|
||||||
@ -44,6 +48,10 @@ cookies = ["cookie"]
|
|||||||
# trust-dns as dns resolver
|
# trust-dns as dns resolver
|
||||||
trust-dns = ["actix-http/trust-dns"]
|
trust-dns = ["actix-http/trust-dns"]
|
||||||
|
|
||||||
|
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
||||||
|
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||||
|
__compress = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-codec = "0.4.0"
|
actix-codec = "0.4.0"
|
||||||
actix-service = "2.0.0"
|
actix-service = "2.0.0"
|
||||||
@ -52,6 +60,7 @@ actix-rt = { version = "2.1", default-features = false }
|
|||||||
|
|
||||||
base64 = "0.13"
|
base64 = "0.13"
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
|
cfg-if = "1"
|
||||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
||||||
derive_more = "0.99.5"
|
derive_more = "0.99.5"
|
||||||
futures-core = { version = "0.3.7", default-features = false }
|
futures-core = { version = "0.3.7", default-features = false }
|
||||||
|
@ -8,7 +8,7 @@ use actix_http::{
|
|||||||
body::Body,
|
body::Body,
|
||||||
http::{
|
http::{
|
||||||
header::{self, IntoHeaderPair},
|
header::{self, IntoHeaderPair},
|
||||||
uri, ConnectionType, Error as HttpError, HeaderMap, HeaderValue, Method, Uri, Version,
|
ConnectionType, Error as HttpError, HeaderMap, HeaderValue, Method, Uri, Version,
|
||||||
},
|
},
|
||||||
RequestHead,
|
RequestHead,
|
||||||
};
|
};
|
||||||
@ -22,11 +22,6 @@ use crate::{
|
|||||||
ClientConfig,
|
ClientConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
|
||||||
const HTTPS_ENCODING: &str = "br, gzip, deflate";
|
|
||||||
#[cfg(not(feature = "compress"))]
|
|
||||||
const HTTPS_ENCODING: &str = "br";
|
|
||||||
|
|
||||||
/// An HTTP Client request builder
|
/// An HTTP Client request builder
|
||||||
///
|
///
|
||||||
/// This type can be used to construct an instance of `ClientRequest` through a
|
/// This type can be used to construct an instance of `ClientRequest` through a
|
||||||
@ -480,22 +475,37 @@ impl ClientRequest {
|
|||||||
|
|
||||||
let mut slf = self;
|
let mut slf = self;
|
||||||
|
|
||||||
|
// Set Accept-Encoding HTTP header depending on enabled feature.
|
||||||
|
// If decompress is not ask, then we are not able to find which encoding is
|
||||||
|
// supported, so we cannot guess Accept-Encoding HTTP header.
|
||||||
if slf.response_decompress {
|
if slf.response_decompress {
|
||||||
let https = slf
|
// Set Accept-Encoding with compression algorithm awc is built with.
|
||||||
.head
|
#[cfg(feature = "__compress")]
|
||||||
.uri
|
let accept_encoding = {
|
||||||
.scheme()
|
let mut encoding = vec![];
|
||||||
.map(|s| s == &uri::Scheme::HTTPS)
|
|
||||||
.unwrap_or(true);
|
|
||||||
|
|
||||||
if https {
|
#[cfg(feature = "compress-brotli")]
|
||||||
slf = slf.insert_header_if_none((header::ACCEPT_ENCODING, HTTPS_ENCODING));
|
encoding.push("br");
|
||||||
} else {
|
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
{
|
{
|
||||||
slf = slf.insert_header_if_none((header::ACCEPT_ENCODING, "gzip, deflate"));
|
encoding.push("gzip");
|
||||||
|
encoding.push("deflate");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
|
encoding.push("zstd");
|
||||||
|
|
||||||
|
assert!(!encoding.is_empty(), "encoding cannot be empty unless __compress feature has been explictily enabled.");
|
||||||
|
encoding.join(", ")
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Otherwise tell the server, we do not support any compression algorithm.
|
||||||
|
// So we clearly indicate that we do want identity encoding.
|
||||||
|
#[cfg(not(feature = "__compress"))]
|
||||||
|
let accept_encoding = "identity";
|
||||||
|
|
||||||
|
slf = slf.insert_header_if_none((header::ACCEPT_ENCODING, accept_encoding));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(slf)
|
Ok(slf)
|
||||||
|
@ -22,7 +22,7 @@ use derive_more::From;
|
|||||||
use futures_core::Stream;
|
use futures_core::Stream;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
use actix_http::{encoding::Decoder, http::header::ContentEncoding, Payload, PayloadStream};
|
use actix_http::{encoding::Decoder, http::header::ContentEncoding, Payload, PayloadStream};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -91,7 +91,7 @@ impl SendClientRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
impl Future for SendClientRequest {
|
impl Future for SendClientRequest {
|
||||||
type Output = Result<ClientResponse<Decoder<Payload<PayloadStream>>>, SendRequestError>;
|
type Output = Result<ClientResponse<Decoder<Payload<PayloadStream>>>, SendRequestError>;
|
||||||
|
|
||||||
@ -131,7 +131,7 @@ impl Future for SendClientRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "compress"))]
|
#[cfg(not(feature = "__compress"))]
|
||||||
impl Future for SendClientRequest {
|
impl Future for SendClientRequest {
|
||||||
type Output = Result<ClientResponse, SendRequestError>;
|
type Output = Result<ClientResponse, SendRequestError>;
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::{fmt, str};
|
use std::{fmt, str};
|
||||||
|
|
||||||
pub use self::Encoding::{
|
pub use self::Encoding::{
|
||||||
Brotli, Chunked, Compress, Deflate, EncodingExt, Gzip, Identity, Trailers,
|
Brotli, Chunked, Compress, Deflate, EncodingExt, Gzip, Identity, Trailers, Zstd,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A value to represent an encoding used in `Transfer-Encoding`
|
/// A value to represent an encoding used in `Transfer-Encoding`
|
||||||
@ -22,6 +22,8 @@ pub enum Encoding {
|
|||||||
Identity,
|
Identity,
|
||||||
/// The `trailers` encoding.
|
/// The `trailers` encoding.
|
||||||
Trailers,
|
Trailers,
|
||||||
|
/// The `zstd` encoding.
|
||||||
|
Zstd,
|
||||||
/// Some other encoding that is less common, can be any String.
|
/// Some other encoding that is less common, can be any String.
|
||||||
EncodingExt(String),
|
EncodingExt(String),
|
||||||
}
|
}
|
||||||
@ -36,6 +38,7 @@ impl fmt::Display for Encoding {
|
|||||||
Compress => "compress",
|
Compress => "compress",
|
||||||
Identity => "identity",
|
Identity => "identity",
|
||||||
Trailers => "trailers",
|
Trailers => "trailers",
|
||||||
|
Zstd => "zstd",
|
||||||
EncodingExt(ref s) => s.as_ref(),
|
EncodingExt(ref s) => s.as_ref(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -52,6 +55,7 @@ impl str::FromStr for Encoding {
|
|||||||
"compress" => Ok(Compress),
|
"compress" => Ok(Compress),
|
||||||
"identity" => Ok(Identity),
|
"identity" => Ok(Identity),
|
||||||
"trailers" => Ok(Trailers),
|
"trailers" => Ok(Trailers),
|
||||||
|
"zstd" => Ok(Zstd),
|
||||||
_ => Ok(EncodingExt(s.to_owned())),
|
_ => Ok(EncodingExt(s.to_owned())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@
|
|||||||
//! * Streaming and pipelining
|
//! * Streaming and pipelining
|
||||||
//! * Keep-alive and slow requests handling
|
//! * Keep-alive and slow requests handling
|
||||||
//! * Client/server [WebSockets](https://actix.rs/docs/websockets/) support
|
//! * Client/server [WebSockets](https://actix.rs/docs/websockets/) support
|
||||||
//! * Transparent content compression/decompression (br, gzip, deflate)
|
//! * Transparent content compression/decompression (br, gzip, deflate, zstd)
|
||||||
//! * Powerful [request routing](https://actix.rs/docs/url-dispatch/)
|
//! * Powerful [request routing](https://actix.rs/docs/url-dispatch/)
|
||||||
//! * Multipart streams
|
//! * Multipart streams
|
||||||
//! * Static assets
|
//! * Static assets
|
||||||
@ -140,7 +140,8 @@ pub mod dev {
|
|||||||
pub use actix_http::body::{
|
pub use actix_http::body::{
|
||||||
AnyBody, Body, BodySize, MessageBody, ResponseBody, SizedStream,
|
AnyBody, Body, BodySize, MessageBody, ResponseBody, SizedStream,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "compress")]
|
|
||||||
|
#[cfg(feature = "__compress")]
|
||||||
pub use actix_http::encoding::Decoder as Decompress;
|
pub use actix_http::encoding::Decoder as Decompress;
|
||||||
pub use actix_http::ResponseBuilder as BaseHttpResponseBuilder;
|
pub use actix_http::ResponseBuilder as BaseHttpResponseBuilder;
|
||||||
pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, ResponseHead};
|
pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, ResponseHead};
|
||||||
|
@ -144,7 +144,7 @@ mod tests {
|
|||||||
use crate::{web, App, HttpResponse};
|
use crate::{web, App, HttpResponse};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
#[cfg(all(feature = "cookies", feature = "compress"))]
|
#[cfg(all(feature = "cookies", feature = "__compress"))]
|
||||||
async fn test_scope_middleware() {
|
async fn test_scope_middleware() {
|
||||||
use crate::middleware::Compress;
|
use crate::middleware::Compress;
|
||||||
|
|
||||||
@ -167,7 +167,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
#[cfg(all(feature = "cookies", feature = "compress"))]
|
#[cfg(all(feature = "cookies", feature = "__compress"))]
|
||||||
async fn test_resource_scope_middleware() {
|
async fn test_resource_scope_middleware() {
|
||||||
use crate::middleware::Compress;
|
use crate::middleware::Compress;
|
||||||
|
|
||||||
|
@ -14,7 +14,8 @@ pub use self::err_handlers::{ErrorHandlerResponse, ErrorHandlers};
|
|||||||
pub use self::logger::Logger;
|
pub use self::logger::Logger;
|
||||||
pub use self::normalize::{NormalizePath, TrailingSlash};
|
pub use self::normalize::{NormalizePath, TrailingSlash};
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
mod compress;
|
mod compress;
|
||||||
#[cfg(feature = "compress")]
|
|
||||||
|
#[cfg(feature = "__compress")]
|
||||||
pub use self::compress::Compress;
|
pub use self::compress::Compress;
|
||||||
|
@ -16,7 +16,7 @@ use futures_core::{future::LocalBoxFuture, ready};
|
|||||||
use futures_util::{FutureExt as _, StreamExt as _};
|
use futures_util::{FutureExt as _, StreamExt as _};
|
||||||
use serde::{de::DeserializeOwned, Serialize};
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
use crate::dev::Decompress;
|
use crate::dev::Decompress;
|
||||||
use crate::{
|
use crate::{
|
||||||
error::UrlencodedError, extract::FromRequest, http::header::CONTENT_LENGTH, web, Error,
|
error::UrlencodedError, extract::FromRequest, http::header::CONTENT_LENGTH, web, Error,
|
||||||
@ -255,9 +255,9 @@ impl Default for FormConfig {
|
|||||||
/// - content type is not `application/x-www-form-urlencoded`
|
/// - content type is not `application/x-www-form-urlencoded`
|
||||||
/// - content length is greater than [limit](UrlEncoded::limit())
|
/// - content length is greater than [limit](UrlEncoded::limit())
|
||||||
pub struct UrlEncoded<T> {
|
pub struct UrlEncoded<T> {
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
stream: Option<Decompress<Payload>>,
|
stream: Option<Decompress<Payload>>,
|
||||||
#[cfg(not(feature = "compress"))]
|
#[cfg(not(feature = "__compress"))]
|
||||||
stream: Option<Payload>,
|
stream: Option<Payload>,
|
||||||
|
|
||||||
limit: usize,
|
limit: usize,
|
||||||
@ -293,10 +293,15 @@ impl<T> UrlEncoded<T> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
let payload = {
|
||||||
let payload = Decompress::from_headers(payload.take(), req.headers());
|
cfg_if::cfg_if! {
|
||||||
#[cfg(not(feature = "compress"))]
|
if #[cfg(feature = "__compress")] {
|
||||||
let payload = payload.take();
|
Decompress::from_headers(payload.take(), req.headers())
|
||||||
|
} else {
|
||||||
|
payload.take()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
UrlEncoded {
|
UrlEncoded {
|
||||||
encoding,
|
encoding,
|
||||||
|
@ -16,7 +16,7 @@ use serde::{de::DeserializeOwned, Serialize};
|
|||||||
|
|
||||||
use actix_http::Payload;
|
use actix_http::Payload;
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
use crate::dev::Decompress;
|
use crate::dev::Decompress;
|
||||||
use crate::{
|
use crate::{
|
||||||
error::{Error, JsonPayloadError},
|
error::{Error, JsonPayloadError},
|
||||||
@ -300,9 +300,9 @@ pub enum JsonBody<T> {
|
|||||||
Body {
|
Body {
|
||||||
limit: usize,
|
limit: usize,
|
||||||
length: Option<usize>,
|
length: Option<usize>,
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
payload: Decompress<Payload>,
|
payload: Decompress<Payload>,
|
||||||
#[cfg(not(feature = "compress"))]
|
#[cfg(not(feature = "__compress"))]
|
||||||
payload: Payload,
|
payload: Payload,
|
||||||
buf: BytesMut,
|
buf: BytesMut,
|
||||||
_res: PhantomData<T>,
|
_res: PhantomData<T>,
|
||||||
@ -345,10 +345,15 @@ where
|
|||||||
// As the internal usage always call JsonBody::limit after JsonBody::new.
|
// As the internal usage always call JsonBody::limit after JsonBody::new.
|
||||||
// And limit check to return an error variant of JsonBody happens there.
|
// And limit check to return an error variant of JsonBody happens there.
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
let payload = {
|
||||||
let payload = Decompress::from_headers(payload.take(), req.headers());
|
cfg_if::cfg_if! {
|
||||||
#[cfg(not(feature = "compress"))]
|
if #[cfg(feature = "__compress")] {
|
||||||
let payload = payload.take();
|
Decompress::from_headers(payload.take(), req.headers())
|
||||||
|
} else {
|
||||||
|
payload.take()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
JsonBody::Body {
|
JsonBody::Body {
|
||||||
limit: DEFAULT_LIMIT,
|
limit: DEFAULT_LIMIT,
|
||||||
|
@ -282,9 +282,9 @@ impl Default for PayloadConfig {
|
|||||||
pub struct HttpMessageBody {
|
pub struct HttpMessageBody {
|
||||||
limit: usize,
|
limit: usize,
|
||||||
length: Option<usize>,
|
length: Option<usize>,
|
||||||
#[cfg(feature = "compress")]
|
#[cfg(feature = "__compress")]
|
||||||
stream: dev::Decompress<dev::Payload>,
|
stream: dev::Decompress<dev::Payload>,
|
||||||
#[cfg(not(feature = "compress"))]
|
#[cfg(not(feature = "__compress"))]
|
||||||
stream: dev::Payload,
|
stream: dev::Payload,
|
||||||
buf: BytesMut,
|
buf: BytesMut,
|
||||||
err: Option<PayloadError>,
|
err: Option<PayloadError>,
|
||||||
@ -312,10 +312,15 @@ impl HttpMessageBody {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "compress")]
|
let stream = {
|
||||||
let stream = dev::Decompress::from_headers(payload.take(), req.headers());
|
cfg_if::cfg_if! {
|
||||||
#[cfg(not(feature = "compress"))]
|
if #[cfg(feature = "__compress")] {
|
||||||
let stream = payload.take();
|
dev::Decompress::from_headers(payload.take(), req.headers())
|
||||||
|
} else {
|
||||||
|
payload.take()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
HttpMessageBody {
|
HttpMessageBody {
|
||||||
stream,
|
stream,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user