mirror of
https://github.com/fafhrd91/actix-web
synced 2025-03-14 20:26:26 +01:00
Compare commits
763 Commits
test-v0.1.
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
cede0c6dbb | ||
|
1005b6a12a | ||
|
d898e8f739 | ||
|
353873fc04 | ||
|
1390e29705 | ||
|
c6e7ebd185 | ||
|
e8351cc3aa | ||
|
f63cf69e6a | ||
|
92c1e2230d | ||
|
4bb495aba0 | ||
|
aa000b429d | ||
|
df0885cf21 | ||
|
0796f8e796 | ||
|
a2307fbb86 | ||
|
98ced477f7 | ||
|
98c263b3ee | ||
|
b8bdee0606 | ||
|
85843b9b0f | ||
|
9656383646 | ||
|
cee7451915 | ||
|
eb6f6a1976 | ||
|
04533a15fa | ||
|
a4eaa7f0bb | ||
|
66e2afe306 | ||
|
59961a58a8 | ||
|
33b487e854 | ||
|
182055bcb5 | ||
|
a36280466c | ||
|
8690f80a08 | ||
|
91e29c0ce4 | ||
|
b0fe679784 | ||
|
0fafb486d4 | ||
|
5aeb0dd950 | ||
|
856480cd90 | ||
|
bb1442e20b | ||
|
ac2a3bb124 | ||
|
8200e4ee82 | ||
|
5b60d81f57 | ||
|
ee6a6ec03e | ||
|
34327bd221 | ||
|
472dbca64e | ||
|
d8566da66f | ||
|
a908afa56b | ||
|
8115c818c1 | ||
|
002c1b5a19 | ||
|
836c75064b | ||
|
2132c95b01 | ||
|
eff2a20c90 | ||
|
9d849c19a5 | ||
|
6771be20b3 | ||
|
ef977055fc | ||
|
568bffeb58 | ||
|
03c65d93e5 | ||
|
ec05381f6f | ||
|
4c05c87b11 | ||
|
27c07f122b | ||
|
3849cdaa6c | ||
|
a5c2d0531b | ||
|
049b49290d | ||
|
b7a0ff0a3a | ||
|
a0a6761bfe | ||
|
ff9c0f7157 | ||
|
1c4e265a70 | ||
|
d9d22825d4 | ||
|
9a685cabad | ||
|
93edef8fee | ||
|
d148e84aba | ||
|
7360c732b3 | ||
|
48aaf41638 | ||
|
bb13f54180 | ||
|
b52e77beb4 | ||
|
b4f8bda032 | ||
|
c055723997 | ||
|
d6bdfac1b9 | ||
|
78ac5cf482 | ||
|
4303dd8c37 | ||
|
f61fcbe840 | ||
|
538c1bea34 | ||
|
70e3758ecc | ||
|
5ad92c0062 | ||
|
e0918fb179 | ||
|
9ba326aed0 | ||
|
882fb3d25b | ||
|
be28a0bd6d | ||
|
a431b7356c | ||
|
5be53820f0 | ||
|
d7d9000b19 | ||
|
e4e4bb799c | ||
|
323d1fa64f | ||
|
9aa62112aa | ||
|
270a6a3b70 | ||
|
07f720f716 | ||
|
f71f9ca66b | ||
|
b6bee346f7 | ||
|
5c6e0e17d3 | ||
|
e97e28db4f | ||
|
16125bd3be | ||
|
e9ccfbc866 | ||
|
e0e4d1e661 | ||
|
b01fbddba4 | ||
|
215a294584 | ||
|
ffee672909 | ||
|
01d60f3315 | ||
|
6ae131ce29 | ||
|
5c9e6e7c1d | ||
|
611154beb2 | ||
|
210c9a5eb3 | ||
|
00c185f617 | ||
|
7326707599 | ||
|
befb9c8196 | ||
|
2136e07bdd | ||
|
e189e4a3bf | ||
|
71cd3a31f9 | ||
|
668b8e5745 | ||
|
763c58445a | ||
|
0b193c7106 | ||
|
4db4251b8f | ||
|
9f45be03e1 | ||
|
4222f92bd3 | ||
|
d92a73eacd | ||
|
c612b5ce94 | ||
|
cbb55ba27d | ||
|
643d64581a | ||
|
66905efd7b | ||
|
c076e34b5d | ||
|
3ecaff5f5b | ||
|
fa74ab3dfb | ||
|
188206a903 | ||
|
0ce488e57a | ||
|
132b84d3b1 | ||
|
cc5030c542 | ||
|
cd301a6932 | ||
|
4c4c279938 | ||
|
0fd85bae2a | ||
|
9b3de1f1fe | ||
|
9553e7afff | ||
|
d9579cf58a | ||
|
7a2313cc4b | ||
|
2ee92d778e | ||
|
59e42c1446 | ||
|
53086a90a6 | ||
|
7f529e35b2 | ||
|
4908fd7dea | ||
|
a2b9823d9d | ||
|
da56de4556 | ||
|
758ae1dac1 | ||
|
37577dcb89 | ||
|
8b8eb4eae1 | ||
|
22593a1532 | ||
|
f7646bcc48 | ||
|
8018983a68 | ||
|
266834cf7c | ||
|
40e1034566 | ||
|
a5c78483f9 | ||
|
12a0521ef8 | ||
|
b4faf8820c | ||
|
d6f885127d | ||
|
ebc43dcf1b | ||
|
7c4c26d2df | ||
|
3db7891303 | ||
|
c366649516 | ||
|
534cfe1fda | ||
|
cff958e518 | ||
|
b9305ff59d | ||
|
5221c1b194 | ||
|
4493aa35d0 | ||
|
8b4d23a69a | ||
|
8fdf358954 | ||
|
b2d0196f34 | ||
|
85655f731d | ||
|
ebd8bb266d | ||
|
5c18569b78 | ||
|
dd84bcb609 | ||
|
3ce97effa2 | ||
|
26efa64278 | ||
|
cc06fd6a5e | ||
|
1b214bc5f5 | ||
|
d4bcdf28f2 | ||
|
4f7b334d80 | ||
|
fdff3775a8 | ||
|
b342b8fc82 | ||
|
804a344565 | ||
|
acb740584c | ||
|
9a437fe835 | ||
|
59115bca49 | ||
|
fe7268487a | ||
|
e8262da138 | ||
|
18e02b83d5 | ||
|
2e63ff5928 | ||
|
48d7adb7bf | ||
|
0a2788d662 | ||
|
2d035c066e | ||
|
fff45b28f4 | ||
|
c20603fc83 | ||
|
33c47c0ba9 | ||
|
3c9a930bd1 | ||
|
44f502e050 | ||
|
c1a6388614 | ||
|
bb65628de5 | ||
|
e4b9d17355 | ||
|
babac131d4 | ||
|
7f15a95d8e | ||
|
7fc73d58a9 | ||
|
ba7fd048b6 | ||
|
d98938b125 | ||
|
5a5486b484 | ||
|
76b2b2734b | ||
|
ccfa8d3817 | ||
|
09851f4a54 | ||
|
db76ad0f61 | ||
|
0383f4bdd1 | ||
|
9c3b4c61f7 | ||
|
52b0d5fbf9 | ||
|
ba7bddeadc | ||
|
d2150a3312 | ||
|
58dd00bccf | ||
|
a4df623b0c | ||
|
49020e79ae | ||
|
c10f05a867 | ||
|
994ea45d91 | ||
|
f8a0f3e188 | ||
|
7f0504e32b | ||
|
8c31d137aa | ||
|
289f749e9f | ||
|
82f8ddc38f | ||
|
3819767fa0 | ||
|
9ce5e33b72 | ||
|
1e08ebabf9 | ||
|
022b052bd9 | ||
|
1e2ef6f92f | ||
|
7e4e12b0aa | ||
|
373d4ca970 | ||
|
e518170a30 | ||
|
f5f6132f94 | ||
|
d9b31b80ac | ||
|
2b8c528e54 | ||
|
59bc85fe0e | ||
|
3f2fd2d59f | ||
|
17ed73b33e | ||
|
73fa1184f1 | ||
|
8e9e9fbcdd | ||
|
8db3de6ede | ||
|
2125aca2c5 | ||
|
b1eb57ac4f | ||
|
ae7736f134 | ||
|
c1f88f718b | ||
|
7a76ba7340 | ||
|
8e458b34b7 | ||
|
e89c881624 | ||
|
5246d24aba | ||
|
643a80bff2 | ||
|
891ab083c6 | ||
|
a7375b6876 | ||
|
ea8cd6e976 | ||
|
d453b15ddd | ||
|
2915bb7d90 | ||
|
e442b00c8c | ||
|
ba53c4f875 | ||
|
08a9c66568 | ||
|
83be07d77d | ||
|
33da480709 | ||
|
fcfc727295 | ||
|
ac04d80d8e | ||
|
d2bd549eec | ||
|
46dde69d50 | ||
|
febba786fa | ||
|
561cc440b2 | ||
|
ccb90dd5a1 | ||
|
1c88af50c0 | ||
|
f4f459d420 | ||
|
d14e98b62b | ||
|
f4851b3914 | ||
|
68597b5426 | ||
|
9dc3ad754e | ||
|
17060ed993 | ||
|
0d9ca4d939 | ||
|
ff2904ee78 | ||
|
fdef224a06 | ||
|
ede0201aa4 | ||
|
271edafd4d | ||
|
5e5e5d8315 | ||
|
c7a0af31d3 | ||
|
eefe8b0733 | ||
|
1114a51b22 | ||
|
0a312037ea | ||
|
37d304b0f2 | ||
|
039f8fb193 | ||
|
929ceb5eb5 | ||
|
e95c8fe5a6 | ||
|
2fe5189954 | ||
|
4accfab196 | ||
|
c0615f28ed | ||
|
9d1f75d349 | ||
|
e50bceb914 | ||
|
f5655721aa | ||
|
989548e36a | ||
|
7d2349afb9 | ||
|
b78f6da05f | ||
|
3b8d4de0e0 | ||
|
40196f16be | ||
|
32ddf972c6 | ||
|
ce18f35e03 | ||
|
d3d0208cbd | ||
|
9e51116da2 | ||
|
3193b81a3e | ||
|
3acdda48e0 | ||
|
935d36c441 | ||
|
05b4c4964f | ||
|
fba766b4be | ||
|
76a0385f94 | ||
|
f1c9b93b87 | ||
|
55ddded315 | ||
|
2cfe257fc2 | ||
|
ccabcd83c0 | ||
|
13fed45bfa | ||
|
8bd4b36ffe | ||
|
801a51b312 | ||
|
b28e0fff4b | ||
|
043bc88f73 | ||
|
e1c48dba26 | ||
|
835a57afc6 | ||
|
81ac30f3df | ||
|
d50eccb3f7 | ||
|
a7983351be | ||
|
215a52f565 | ||
|
d445742974 | ||
|
76f6106f8f | ||
|
39abe3ae5e | ||
|
e6636f1279 | ||
|
2b40033a9c | ||
|
d2c0d472e9 | ||
|
45fdc08788 | ||
|
a12d39c93e | ||
|
b422745b6c | ||
|
4ed61466e7 | ||
|
ac95362340 | ||
|
84eb8b306c | ||
|
384ca0a2cd | ||
|
905c30af86 | ||
|
cbf5e948db | ||
|
55c15f5bbf | ||
|
14355b9442 | ||
|
d8df60bf4c | ||
|
eaabe7e686 | ||
|
12dbda986e | ||
|
1c60978a89 | ||
|
b4fcdffdc3 | ||
|
605cd7c540 | ||
|
75a97f6b32 | ||
|
ff8fd2f7b5 | ||
|
6a0ea51b15 | ||
|
8cdbab3416 | ||
|
146011018e | ||
|
3eb5a059ad | ||
|
1040bc3d17 | ||
|
d22c9f9fb1 | ||
|
e25f3f8f1d | ||
|
6d452d4977 | ||
|
67cee2915d | ||
|
db99da5daf | ||
|
80185ce741 | ||
|
4272510261 | ||
|
908fb2606e | ||
|
b061f00421 | ||
|
3b9b38c44e | ||
|
a4c9361791 | ||
|
bf03207ca9 | ||
|
79a38e0628 | ||
|
60c76c5e10 | ||
|
e4e839f4d1 | ||
|
c34a18f64a | ||
|
ce3af777a0 | ||
|
0e8ed50e3a | ||
|
4eeb01415c | ||
|
241da6e081 | ||
|
1072d0dacf | ||
|
58c19b817f | ||
|
17218dc6c8 | ||
|
6fdda45ca3 | ||
|
8b2b755cde | ||
|
de1efa673f | ||
|
5d4f591875 | ||
|
e81dc768dc | ||
|
97399e8c8c | ||
|
8dee8a1426 | ||
|
e68f87f84f | ||
|
0f3068f488 | ||
|
5e29726c4f | ||
|
442fa279da | ||
|
bfdc29ebb8 | ||
|
0e7380659f | ||
|
44c5cdaa10 | ||
|
9e7a6fe57b | ||
|
dfaca18584 | ||
|
19c9d858f2 | ||
|
4131786127 | ||
|
0ba147ef71 | ||
|
3fc01c4887 | ||
|
4c4024c949 | ||
|
e0939a01fc | ||
|
20c7c07dc0 | ||
|
d7c6774ad5 | ||
|
67efa4a4db | ||
|
d77bcb0b7c | ||
|
c4db9a1ae2 | ||
|
740d0c0c9d | ||
|
f27584046c | ||
|
129b78f9c7 | ||
|
ad27150c5f | ||
|
8d5d6a2598 | ||
|
e97329eb2a | ||
|
fbfff3e751 | ||
|
fdfb3d45db | ||
|
4e05629368 | ||
|
e35ec28cd2 | ||
|
35006e9cae | ||
|
115701eb86 | ||
|
e2fed91efd | ||
|
d4b833ccf0 | ||
|
358c1cf85b | ||
|
42193bee29 | ||
|
dc08ea044b | ||
|
85d88ffada | ||
|
bf19a0e761 | ||
|
bf1f169be2 | ||
|
359d5d5c80 | ||
|
65c0545a7a | ||
|
b933ed4456 | ||
|
4bff1d0abe | ||
|
fa106da555 | ||
|
c15016dafb | ||
|
74688843ba | ||
|
845156da85 | ||
|
98752c053c | ||
|
df6fde883c | ||
|
8d4cb8c69a | ||
|
dd9ac4d9b8 | ||
|
72c80f9107 | ||
|
b00fe72cf6 | ||
|
2f0b8a264a | ||
|
b9f0faafde | ||
|
6627109984 | ||
|
b9f54c8796 | ||
|
cfd40b4f15 | ||
|
08c2cdf641 | ||
|
fbd0e5dd0a | ||
|
7b936bc443 | ||
|
d2364c80c4 | ||
|
77459ec415 | ||
|
6f0a6bd1bb | ||
|
06c3513bc0 | ||
|
29bd6a1dd5 | ||
|
17f7cd2aae | ||
|
ede645ee4e | ||
|
6d48593a60 | ||
|
3c69d078b2 | ||
|
e7c34f2e45 | ||
|
d708a4de6d | ||
|
d97bd7ec17 | ||
|
fcd06c9896 | ||
|
1065043528 | ||
|
45b77c6819 | ||
|
a2e2c30d59 | ||
|
83cd061c86 | ||
|
068909f1b3 | ||
|
f8cb71e789 | ||
|
73b94e902d | ||
|
ad7e67f940 | ||
|
1519ae7772 | ||
|
cc7145d41d | ||
|
172c4c7a0a | ||
|
fd63305859 | ||
|
ef64d6a27c | ||
|
4d3689db5e | ||
|
894effb856 | ||
|
07a7290432 | ||
|
bd5c0af0a6 | ||
|
c73fba16ce | ||
|
909461087c | ||
|
40f7ab38d2 | ||
|
a9e44bcf07 | ||
|
7767cf3071 | ||
|
b59a96d9d7 | ||
|
037740bf62 | ||
|
386258c285 | ||
|
99bf774e94 | ||
|
35b0fd1a85 | ||
|
0b5b4dcbf3 | ||
|
c993055fc8 | ||
|
679f61cf37 | ||
|
056de320f0 | ||
|
f220719fae | ||
|
c9f91796df | ||
|
ea764b1d57 | ||
|
19aa14a9d6 | ||
|
10746fb2fb | ||
|
4bbe60b609 | ||
|
8ff489aa90 | ||
|
e0a88cea8d | ||
|
d78ff283af | ||
|
ce6d520215 | ||
|
3e25742a41 | ||
|
20f4cfe6b5 | ||
|
6408291ab0 | ||
|
8d260e599f | ||
|
14bcf72ec1 | ||
|
6485434a33 | ||
|
16c7c16463 | ||
|
9b0fdca6e9 | ||
|
8759d79b03 | ||
|
c0d5d7bdb5 | ||
|
40eab1f091 | ||
|
75517cce82 | ||
|
9b51624b27 | ||
|
8e2ae8cd40 | ||
|
9a2f8450e0 | ||
|
23ef51609e | ||
|
f7d629a61a | ||
|
e0845d9ad9 | ||
|
2f79daec16 | ||
|
f3f41a0cc7 | ||
|
987067698b | ||
|
b62f1b4ef7 | ||
|
df5257c373 | ||
|
226ea696ce | ||
|
e524fc86ea | ||
|
7e990e423f | ||
|
8f9a12ed5d | ||
|
c6eba2da9b | ||
|
06c7945801 | ||
|
0dba6310c6 | ||
|
f7d7d92984 | ||
|
3d6ea7fe9b | ||
|
8dbf7da89f | ||
|
de92b3be2e | ||
|
5d0e8138ee | ||
|
6b7196225e | ||
|
265fa0d050 | ||
|
062127a210 | ||
|
3926416580 | ||
|
43671ae4aa | ||
|
264a703d94 | ||
|
498fb954b3 | ||
|
2253eae2bb | ||
|
8e76a1c775 | ||
|
dce57a79c9 | ||
|
6a5b370206 | ||
|
b1c85ba85b | ||
|
9aab911600 | ||
|
017e40f733 | ||
|
45592b37b6 | ||
|
8abcb94512 | ||
|
f2cacc4c9d | ||
|
56b9c0d08e | ||
|
de9e41484a | ||
|
2fed978597 | ||
|
40048a5811 | ||
|
e942d3e3b1 | ||
|
09cffc093c | ||
|
c58f287044 | ||
|
7b27493e4c | ||
|
478b33b8a3 | ||
|
592b40f914 | ||
|
fe5279c77a | ||
|
80d222aa78 | ||
|
a03a2a0076 | ||
|
745e738955 | ||
|
1fd90f0b10 | ||
|
a35804b89f | ||
|
5611b98c0d | ||
|
dce9438518 | ||
|
be986d96b3 | ||
|
8ddb24b49b | ||
|
87f627cd5d | ||
|
03456b8a33 | ||
|
8c2fad3164 | ||
|
62fbd225bc | ||
|
0fa4d999d9 | ||
|
da4c849f62 | ||
|
49cd303c3b | ||
|
955c3ac0c4 | ||
|
56e5c19b85 | ||
|
3f03af1c59 | ||
|
25c0673278 | ||
|
e7a05f9892 | ||
|
2f13e5f675 | ||
|
9f964751f6 | ||
|
fcca515387 | ||
|
075932d823 | ||
|
cb379c0e0c | ||
|
d4a5d450de | ||
|
542200cbc2 | ||
|
d0c08dbb7d | ||
|
d0b5fb18d2 | ||
|
12fb3412a5 | ||
|
2665357a0c | ||
|
693271e571 | ||
|
10ef9b0751 | ||
|
ce00c88963 | ||
|
75e6ffb057 | ||
|
ad38973767 | ||
|
1c1d6477ef | ||
|
53509a5361 | ||
|
a6f27baff1 | ||
|
218e34ee17 | ||
|
11bfa84926 | ||
|
5aa6f713c7 | ||
|
151a15da74 | ||
|
1ce58ecb30 | ||
|
f940653981 | ||
|
b291e29882 | ||
|
f843776f36 | ||
|
52f7d96358 | ||
|
51e573b888 | ||
|
38e015432b | ||
|
f5895d5eff | ||
|
a0c4bf8d1b | ||
|
594e3a6ef1 | ||
|
a808a26d8c | ||
|
de62e8b025 | ||
|
3486edabcf | ||
|
4c59a34513 | ||
|
1b706b3069 | ||
|
a9f445875a | ||
|
e0f02c1d9e | ||
|
092dbba5b9 | ||
|
ff4b2d251f | ||
|
98faa61afe | ||
|
3f2db9e75c | ||
|
074d18209d | ||
|
593fbde46a | ||
|
161861997c | ||
|
3d621677a5 | ||
|
0c144054cb | ||
|
b0fbe0dfd8 | ||
|
b653bf557f | ||
|
1d1a65282f | ||
|
b0a363a7ae | ||
|
b4d3c2394d | ||
|
5ca42df89a | ||
|
fc5ecdc30b | ||
|
7fe800c3ff | ||
|
075df88a07 | ||
|
391d8a744a | ||
|
5b6cb681b9 | ||
|
0957ec40b4 | ||
|
ccf430d74a | ||
|
c84c1f0f15 | ||
|
e9279dfbb8 | ||
|
a68239adaa | ||
|
40a4b1ccd5 | ||
|
7f5a8c0851 | ||
|
bcdde1d4ea | ||
|
30aa64ea32 | ||
|
5469b02638 | ||
|
a66cd38ec5 | ||
|
20609e93fd | ||
|
bf282472ab | ||
|
7f4b44c258 | ||
|
66243717b3 | ||
|
102720d398 | ||
|
c3c7eb8df9 | ||
|
21f57caf4a | ||
|
47f5faf26e | ||
|
9777653dc0 | ||
|
9fde5b30db | ||
|
fd412a8223 | ||
|
cd511affd5 | ||
|
3200de3f34 | ||
|
b3e84b5c4b | ||
|
a3416112a5 | ||
|
21a08ca796 | ||
|
a9f497d05f | ||
|
cc9ba162f7 | ||
|
37799df978 | ||
|
0d93a8c273 | ||
|
3ae4f0a629 | ||
|
14a4f325d3 | ||
|
1bd2076b35 | ||
|
5454699bab | ||
|
d7c5c966d2 | ||
|
50894e392e | ||
|
008753f07a | ||
|
c92aa31f91 | ||
|
c25dd23820 | ||
|
acacb90b2e | ||
|
8459f566a8 | ||
|
232a14dc8b | ||
|
6e9f5fba24 | ||
|
c5d6df0078 | ||
|
8865540f3b | ||
|
141790b200 | ||
|
9668a2396f | ||
|
cb7347216c | ||
|
ae7f71e317 | ||
|
bc89f0bfc2 | ||
|
c959916346 | ||
|
f227e880d7 | ||
|
68ad81f989 | ||
|
f2e736719a | ||
|
81ef12a0fd | ||
|
1bc1538118 | ||
|
1cc3e7b24c | ||
|
3dd98c308c | ||
|
cb5d9a7e64 | ||
|
5ee555462f | ||
|
ad159f5219 | ||
|
2ffc21dd4f | ||
|
7b8a392ef5 | ||
|
3c7ccf5521 | ||
|
e7cae5a95b | ||
|
455d5c460d | ||
|
8faca783fa | ||
|
edbb9b047e | ||
|
32742d0715 | ||
|
d90c1a2331 | ||
|
2a12b41456 | ||
|
6c97d448b7 | ||
|
c3ce33df05 | ||
|
4431c8da65 | ||
|
2d11ab5977 | ||
|
4ebf16890d | ||
|
fe0bbfb3da | ||
|
2462b6dd5d | ||
|
49cfabeaf5 | ||
|
0f7292c69a | ||
|
8bbf2b5052 | ||
|
8c975bcc1f | ||
|
742ad56d30 | ||
|
bcc8d5c441 | ||
|
f659098d21 | ||
|
8621ae12f8 | ||
|
b338eb8473 | ||
|
5abd1c2c2c | ||
|
05336269f9 | ||
|
86df295ee2 | ||
|
85c9b1a263 | ||
|
577597a80a | ||
|
374dc9bfc9 | ||
|
93754f307f | ||
|
c7639bc3be | ||
|
0bc4ae9158 | ||
|
19a46e3925 | ||
|
68cd853aa2 | ||
|
25fe1bbaa5 | ||
|
e890307091 | ||
|
b708924590 | ||
|
5dcb250237 | ||
|
b4ff6addfe | ||
|
231a24ef8d | ||
|
6df4974234 | ||
|
a80e93d6db | ||
|
542c92c9a7 | ||
|
74738c63a7 | ||
|
a87e01f0d1 | ||
|
9779010a5a | ||
|
11d50d792b | ||
|
798e9911e9 | ||
|
2b2de29800 | ||
|
0f5c876c6b | ||
|
96a4dc9dec | ||
|
4616ca8ee6 | ||
|
36193b0a50 |
@ -1,14 +0,0 @@
|
|||||||
[alias]
|
|
||||||
lint = "clippy --workspace --tests --examples --bins -- -Dclippy::todo"
|
|
||||||
lint-all = "clippy --workspace --all-features --tests --examples --bins -- -Dclippy::todo"
|
|
||||||
|
|
||||||
# lib checking
|
|
||||||
ci-check-min = "hack --workspace check --no-default-features"
|
|
||||||
ci-check-default = "hack --workspace check"
|
|
||||||
ci-check-default-tests = "check --workspace --tests"
|
|
||||||
ci-check-all-feature-powerset="hack --workspace --feature-powerset --skip=__compress,io-uring check"
|
|
||||||
ci-check-all-feature-powerset-linux="hack --workspace --feature-powerset --skip=__compress check"
|
|
||||||
|
|
||||||
# testing
|
|
||||||
ci-doctest-default = "test --workspace --doc --no-fail-fast -- --nocapture"
|
|
||||||
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
|
7
.clippy.toml
Normal file
7
.clippy.toml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
disallowed-names = [
|
||||||
|
"e", # no single letter error bindings
|
||||||
|
]
|
||||||
|
disallowed-methods = [
|
||||||
|
"std::cell::RefCell::default()",
|
||||||
|
"std::rc::Rc::default()",
|
||||||
|
]
|
10
.github/ISSUE_TEMPLATE/bug_report.md
vendored
10
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -3,34 +3,40 @@ name: Bug Report
|
|||||||
about: Create a bug report.
|
about: Create a bug report.
|
||||||
---
|
---
|
||||||
|
|
||||||
Your issue may already be reported!
|
Your issue may already be reported! Please search on the [Actix Web issue tracker](https://github.com/actix/actix-web/issues) before creating one.
|
||||||
Please search on the [Actix Web issue tracker](https://github.com/actix/actix-web/issues) before creating one.
|
|
||||||
|
|
||||||
## Expected Behavior
|
## Expected Behavior
|
||||||
|
|
||||||
<!--- If you're describing a bug, tell us what should happen -->
|
<!--- If you're describing a bug, tell us what should happen -->
|
||||||
<!--- If you're suggesting a change/improvement, tell us how it should work -->
|
<!--- If you're suggesting a change/improvement, tell us how it should work -->
|
||||||
|
|
||||||
## Current Behavior
|
## Current Behavior
|
||||||
|
|
||||||
<!--- If describing a bug, tell us what happens instead of the expected behavior -->
|
<!--- If describing a bug, tell us what happens instead of the expected behavior -->
|
||||||
<!--- If suggesting a change/improvement, explain the difference from current behavior -->
|
<!--- If suggesting a change/improvement, explain the difference from current behavior -->
|
||||||
|
|
||||||
## Possible Solution
|
## Possible Solution
|
||||||
|
|
||||||
<!--- Not obligatory, but suggest a fix/reason for the bug, -->
|
<!--- Not obligatory, but suggest a fix/reason for the bug, -->
|
||||||
<!--- or ideas how to implement the addition or change -->
|
<!--- or ideas how to implement the addition or change -->
|
||||||
|
|
||||||
## Steps to Reproduce (for bugs)
|
## Steps to Reproduce (for bugs)
|
||||||
|
|
||||||
<!--- Provide a link to a live example, or an unambiguous set of steps to -->
|
<!--- Provide a link to a live example, or an unambiguous set of steps to -->
|
||||||
<!--- reproduce this bug. Include code to reproduce, if relevant -->
|
<!--- reproduce this bug. Include code to reproduce, if relevant -->
|
||||||
|
|
||||||
1.
|
1.
|
||||||
2.
|
2.
|
||||||
3.
|
3.
|
||||||
4.
|
4.
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
<!--- How has this issue affected you? What are you trying to accomplish? -->
|
<!--- How has this issue affected you? What are you trying to accomplish? -->
|
||||||
<!--- Providing context helps us come up with a solution that is most useful in the real world -->
|
<!--- Providing context helps us come up with a solution that is most useful in the real world -->
|
||||||
|
|
||||||
## Your Environment
|
## Your Environment
|
||||||
|
|
||||||
<!--- Include as many relevant details about the environment you experienced the bug in -->
|
<!--- Include as many relevant details about the environment you experienced the bug in -->
|
||||||
|
|
||||||
- Rust Version (I.e, output of `rustc -V`):
|
- Rust Version (I.e, output of `rustc -V`):
|
||||||
|
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -2,12 +2,14 @@
|
|||||||
<!-- Please fill out the following to get your PR reviewed quicker. -->
|
<!-- Please fill out the following to get your PR reviewed quicker. -->
|
||||||
|
|
||||||
## PR Type
|
## PR Type
|
||||||
|
|
||||||
<!-- What kind of change does this PR make? -->
|
<!-- What kind of change does this PR make? -->
|
||||||
<!-- Bug Fix / Feature / Refactor / Code Style / Other -->
|
<!-- Bug Fix / Feature / Refactor / Code Style / Other -->
|
||||||
|
|
||||||
PR_TYPE
|
PR_TYPE
|
||||||
|
|
||||||
|
|
||||||
## PR Checklist
|
## PR Checklist
|
||||||
|
|
||||||
<!-- Check your PR fulfills the following items. -->
|
<!-- Check your PR fulfills the following items. -->
|
||||||
<!-- For draft PRs check the boxes as you complete them. -->
|
<!-- For draft PRs check the boxes as you complete them. -->
|
||||||
|
|
||||||
@ -17,11 +19,10 @@ PR_TYPE
|
|||||||
- [ ] Format code with the latest stable rustfmt.
|
- [ ] Format code with the latest stable rustfmt.
|
||||||
- [ ] (Team) Label with affected crates and semver status.
|
- [ ] (Team) Label with affected crates and semver status.
|
||||||
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
<!-- Describe the current and new behavior. -->
|
<!-- Describe the current and new behavior. -->
|
||||||
<!-- Emphasize any breaking changes. -->
|
<!-- Emphasize any breaking changes. -->
|
||||||
|
|
||||||
|
|
||||||
<!-- If this PR fixes or closes an issue, reference it here. -->
|
<!-- If this PR fixes or closes an issue, reference it here. -->
|
||||||
<!-- Closes #000 -->
|
<!-- Closes #000 -->
|
||||||
|
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: cargo
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
28
.github/workflows/bench.yml
vendored
28
.github/workflows/bench.yml
vendored
@ -1,28 +1,28 @@
|
|||||||
name: Benchmark
|
name: Benchmark
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize, reopened]
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches: [master]
|
||||||
- master
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_benchmark:
|
check_benchmark:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: actions-rs/toolchain@v1
|
run: |
|
||||||
with:
|
rustup set profile minimal
|
||||||
toolchain: nightly
|
rustup install nightly
|
||||||
profile: minimal
|
rustup override set nightly
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Check benchmark
|
- name: Check benchmark
|
||||||
uses: actions-rs/cargo@v1
|
run: cargo bench --bench=server -- --sample-size=15
|
||||||
with:
|
|
||||||
command: bench
|
|
||||||
args: --bench=server -- --sample-size=15
|
|
||||||
|
66
.github/workflows/ci-master.yml
vendored
66
.github/workflows/ci-master.yml
vendored
@ -1,66 +0,0 @@
|
|||||||
name: CI (master only)
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
ci_feature_powerset_check:
|
|
||||||
name: Verify Feature Combinations
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install stable
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable-x86_64-unknown-linux-gnu
|
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with: { command: generate-lockfile }
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: Install cargo-hack
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: install
|
|
||||||
args: cargo-hack
|
|
||||||
|
|
||||||
- name: check feature combinations
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with: { command: ci-check-all-feature-powerset }
|
|
||||||
|
|
||||||
- name: check feature combinations
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with: { command: ci-check-all-feature-powerset-linux }
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
name: coverage
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install stable
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable-x86_64-unknown-linux-gnu
|
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with: { command: generate-lockfile }
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: Generate coverage file
|
|
||||||
run: |
|
|
||||||
cargo install cargo-tarpaulin --vers "^0.13"
|
|
||||||
cargo tarpaulin --workspace --features=rustls,openssl --out Xml --verbose
|
|
||||||
- name: Upload to Codecov
|
|
||||||
uses: codecov/codecov-action@v1
|
|
||||||
with: { file: cobertura.xml }
|
|
91
.github/workflows/ci-post-merge.yml
vendored
Normal file
91
.github/workflows/ci-post-merge.yml
vendored
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
name: CI (post-merge)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build_and_test_nightly:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
# prettier-ignore
|
||||||
|
target:
|
||||||
|
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
||||||
|
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||||
|
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
|
||||||
|
version:
|
||||||
|
- { name: nightly, version: nightly }
|
||||||
|
|
||||||
|
name: ${{ matrix.target.name }} / ${{ matrix.version.name }}
|
||||||
|
runs-on: ${{ matrix.target.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install nasm
|
||||||
|
if: matrix.target.os == 'windows-latest'
|
||||||
|
uses: ilammy/setup-nasm@v1.5.2
|
||||||
|
|
||||||
|
- name: Install OpenSSL
|
||||||
|
if: matrix.target.os == 'windows-latest'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
choco install openssl --version=1.1.1.2100 -y --no-progress
|
||||||
|
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
|
||||||
|
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
|
uses: taiki-e/install-action@v2.49.17
|
||||||
|
with:
|
||||||
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
|
|
||||||
|
- name: check minimal
|
||||||
|
run: just check-min
|
||||||
|
|
||||||
|
- name: check default
|
||||||
|
run: just check-default
|
||||||
|
|
||||||
|
- name: tests
|
||||||
|
timeout-minutes: 60
|
||||||
|
run: just test
|
||||||
|
|
||||||
|
- name: CI cache clean
|
||||||
|
run: cargo-ci-cache-clean
|
||||||
|
|
||||||
|
ci_feature_powerset_check:
|
||||||
|
name: Verify Feature Combinations
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Free Disk Space
|
||||||
|
run: ./scripts/free-disk-space.sh
|
||||||
|
|
||||||
|
- name: Setup mold linker
|
||||||
|
uses: rui314/setup-mold@v1
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
|
||||||
|
- name: Install just, cargo-hack
|
||||||
|
uses: taiki-e/install-action@v2.49.17
|
||||||
|
with:
|
||||||
|
tool: just,cargo-hack
|
||||||
|
|
||||||
|
- name: Check feature combinations
|
||||||
|
run: just check-feature-combinations
|
144
.github/workflows/ci.yml
vendored
144
.github/workflows/ci.yml
vendored
@ -3,119 +3,119 @@ name: CI
|
|||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
types: [opened, synchronize, reopened]
|
types: [opened, synchronize, reopened]
|
||||||
|
merge_group:
|
||||||
|
types: [checks_requested]
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
read_msrv:
|
||||||
|
name: Read MSRV
|
||||||
|
uses: actions-rust-lang/msrv/.github/workflows/msrv.yml@v0.1.0
|
||||||
|
|
||||||
build_and_test:
|
build_and_test:
|
||||||
|
needs: read_msrv
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
# prettier-ignore
|
||||||
target:
|
target:
|
||||||
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
- { name: Linux, os: ubuntu-latest, triple: x86_64-unknown-linux-gnu }
|
||||||
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
|
||||||
- { name: Windows, os: windows-2022, triple: x86_64-pc-windows-msvc }
|
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
|
||||||
version:
|
version:
|
||||||
- 1.52.0 # MSRV
|
- { name: msrv, version: "${{ needs.read_msrv.outputs.msrv }}" }
|
||||||
- stable
|
- { name: stable, version: stable }
|
||||||
- nightly
|
|
||||||
|
|
||||||
name: ${{ matrix.target.name }} / ${{ matrix.version }}
|
name: ${{ matrix.target.name }} / ${{ matrix.version.name }}
|
||||||
runs-on: ${{ matrix.target.os }}
|
runs-on: ${{ matrix.target.os }}
|
||||||
|
|
||||||
env:
|
|
||||||
CI: 1
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
VCPKGRS_DYNAMIC: 1
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install nasm
|
||||||
|
if: matrix.target.os == 'windows-latest'
|
||||||
|
uses: ilammy/setup-nasm@v1.5.2
|
||||||
|
|
||||||
# install OpenSSL on Windows
|
|
||||||
# TODO: GitHub actions docs state that OpenSSL is
|
|
||||||
# already installed on these Windows machines somewhere
|
|
||||||
- name: Set vcpkg root
|
|
||||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
|
||||||
run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
|
|
||||||
- name: Install OpenSSL
|
- name: Install OpenSSL
|
||||||
if: matrix.target.triple == 'x86_64-pc-windows-msvc'
|
if: matrix.target.os == 'windows-latest'
|
||||||
run: vcpkg install openssl:x64-windows
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
choco install openssl --version=1.1.1.2100 -y --no-progress
|
||||||
|
echo 'OPENSSL_DIR=C:\Program Files\OpenSSL' >> $GITHUB_ENV
|
||||||
|
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install ${{ matrix.version }}
|
- name: Setup mold linker
|
||||||
uses: actions-rs/toolchain@v1
|
if: matrix.target.os == 'ubuntu-latest'
|
||||||
|
uses: rui314/setup-mold@v1
|
||||||
|
|
||||||
|
- name: Install Rust (${{ matrix.version.name }})
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ matrix.version }}-${{ matrix.target.triple }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
uses: actions-rs/cargo@v1
|
uses: taiki-e/install-action@v2.49.17
|
||||||
with: { command: generate-lockfile }
|
|
||||||
- name: Cache Dependencies
|
|
||||||
uses: Swatinem/rust-cache@v1.2.0
|
|
||||||
|
|
||||||
- name: Install cargo-hack
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
with:
|
||||||
command: install
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
args: cargo-hack
|
|
||||||
|
- name: workaround MSRV issues
|
||||||
|
if: matrix.version.name == 'msrv'
|
||||||
|
run: just downgrade-for-msrv
|
||||||
|
|
||||||
- name: check minimal
|
- name: check minimal
|
||||||
uses: actions-rs/cargo@v1
|
run: just check-min
|
||||||
with: { command: ci-check-min }
|
|
||||||
|
|
||||||
- name: check default
|
- name: check default
|
||||||
uses: actions-rs/cargo@v1
|
run: just check-default
|
||||||
with: { command: ci-check-default }
|
|
||||||
|
|
||||||
- name: tests
|
- name: tests
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
run: |
|
run: just test
|
||||||
cargo test --lib --tests -p=actix-router --all-features
|
|
||||||
cargo test --lib --tests -p=actix-http --all-features
|
- name: CI cache clean
|
||||||
cargo test --lib --tests -p=actix-web --features=rustls,openssl -- --skip=test_reading_deflate_encoding_large_random_rustls
|
run: cargo-ci-cache-clean
|
||||||
cargo test --lib --tests -p=actix-web-codegen --all-features
|
|
||||||
cargo test --lib --tests -p=awc --all-features
|
io-uring:
|
||||||
cargo test --lib --tests -p=actix-http-test --all-features
|
name: io-uring tests
|
||||||
cargo test --lib --tests -p=actix-test --all-features
|
runs-on: ubuntu-latest
|
||||||
cargo test --lib --tests -p=actix-files
|
steps:
|
||||||
cargo test --lib --tests -p=actix-multipart --all-features
|
- uses: actions/checkout@v4
|
||||||
cargo test --lib --tests -p=actix-web-actors --all-features
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
|
||||||
- name: tests (io-uring)
|
- name: tests (io-uring)
|
||||||
if: matrix.target.os == 'ubuntu-latest'
|
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
run: >
|
run: >
|
||||||
sudo bash -c "ulimit -Sl 512
|
sudo bash -c "ulimit -Sl 512 && ulimit -Hl 512 && PATH=$PATH:/usr/share/rust/.cargo/bin && RUSTUP_TOOLCHAIN=stable cargo test --lib --tests -p=actix-files --all-features"
|
||||||
&& ulimit -Hl 512
|
|
||||||
&& PATH=$PATH:/usr/share/rust/.cargo/bin
|
|
||||||
&& RUSTUP_TOOLCHAIN=${{ matrix.version }} cargo test --lib --tests -p=actix-files --all-features"
|
|
||||||
|
|
||||||
- name: Clear the cargo caches
|
|
||||||
run: |
|
|
||||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
|
||||||
cargo-cache
|
|
||||||
|
|
||||||
rustdoc:
|
rustdoc:
|
||||||
name: doc tests
|
name: doc tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust (nightly)
|
- name: Install Rust (nightly)
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-x86_64-unknown-linux-gnu
|
toolchain: nightly
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Generate Cargo.lock
|
- name: Install just
|
||||||
uses: actions-rs/cargo@v1
|
uses: taiki-e/install-action@v2.49.17
|
||||||
with: { command: generate-lockfile }
|
with:
|
||||||
- name: Cache Dependencies
|
tool: just
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
|
||||||
|
|
||||||
- name: doc tests
|
- name: doc tests
|
||||||
uses: actions-rs/cargo@v1
|
run: just test-docs
|
||||||
timeout-minutes: 60
|
|
||||||
with: { command: ci-doctest }
|
|
||||||
|
39
.github/workflows/clippy-fmt.yml
vendored
39
.github/workflows/clippy-fmt.yml
vendored
@ -1,39 +0,0 @@
|
|||||||
name: Lint
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize, reopened]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
components: rustfmt
|
|
||||||
- name: Check with rustfmt
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: fmt
|
|
||||||
args: --all -- --check
|
|
||||||
|
|
||||||
clippy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
components: clippy
|
|
||||||
override: true
|
|
||||||
- name: Check with Clippy
|
|
||||||
uses: actions-rs/clippy-check@v1
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
args: --workspace --all-features --tests
|
|
40
.github/workflows/coverage.yml
vendored
Normal file
40
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
name: Coverage
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
coverage:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: llvm-tools
|
||||||
|
|
||||||
|
- name: Install just, cargo-llvm-cov, cargo-nextest
|
||||||
|
uses: taiki-e/install-action@v2.49.17
|
||||||
|
with:
|
||||||
|
tool: just,cargo-llvm-cov,cargo-nextest
|
||||||
|
|
||||||
|
- name: Generate code coverage
|
||||||
|
run: just test-coverage-codecov
|
||||||
|
|
||||||
|
- name: Upload coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v5.4.0
|
||||||
|
with:
|
||||||
|
files: codecov.json
|
||||||
|
fail_ci_if_error: true
|
||||||
|
env:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
90
.github/workflows/lint.yml
vendored
Normal file
90
.github/workflows/lint.yml
vendored
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
name: Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
fmt:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
|
- name: Check with Rustfmt
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
|
clippy:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
checks: write # to add clippy checks to PR diffs
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
components: clippy
|
||||||
|
|
||||||
|
- name: Check with Clippy
|
||||||
|
uses: giraffate/clippy-action@v1.0.1
|
||||||
|
with:
|
||||||
|
reporter: github-pr-check
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
clippy_flags: >-
|
||||||
|
--workspace --all-features --tests --examples --bins --
|
||||||
|
-A unknown_lints -D clippy::todo -D clippy::dbg_macro
|
||||||
|
|
||||||
|
lint-docs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (nightly)
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: rust-docs
|
||||||
|
|
||||||
|
- name: Check for broken intra-doc links
|
||||||
|
env:
|
||||||
|
RUSTDOCFLAGS: -D warnings
|
||||||
|
run: cargo +nightly doc --no-deps --workspace --all-features
|
||||||
|
|
||||||
|
check-external-types:
|
||||||
|
if: false # rustdoc mismatch currently
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Rust (${{ vars.RUST_VERSION_EXTERNAL_TYPES }})
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
with:
|
||||||
|
toolchain: ${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
|
||||||
|
|
||||||
|
- name: Install just
|
||||||
|
uses: taiki-e/install-action@v2.49.17
|
||||||
|
with:
|
||||||
|
tool: just
|
||||||
|
|
||||||
|
- name: Install cargo-check-external-types
|
||||||
|
uses: taiki-e/cache-cargo-install-action@v2.1.1
|
||||||
|
with:
|
||||||
|
tool: cargo-check-external-types
|
||||||
|
|
||||||
|
- name: check external types
|
||||||
|
run: just check-external-types-all +${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
|
35
.github/workflows/upload-doc.yml
vendored
35
.github/workflows/upload-doc.yml
vendored
@ -1,35 +0,0 @@
|
|||||||
name: Upload Documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly-x86_64-unknown-linux-gnu
|
|
||||||
profile: minimal
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Build Docs
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: doc
|
|
||||||
args: --workspace --all-features --no-deps
|
|
||||||
|
|
||||||
- name: Tweak HTML
|
|
||||||
run: echo '<meta http-equiv="refresh" content="0;url=actix_web/index.html">' > target/doc/index.html
|
|
||||||
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
uses: JamesIves/github-pages-deploy-action@3.7.1
|
|
||||||
with:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
BRANCH: gh-pages
|
|
||||||
FOLDER: target/doc
|
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -19,3 +19,7 @@ guide/build/
|
|||||||
|
|
||||||
# Configuration directory generated by VSCode
|
# Configuration directory generated by VSCode
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
|
# code coverage
|
||||||
|
/lcov.info
|
||||||
|
/codecov.json
|
||||||
|
5
.prettierrc.yml
Normal file
5
.prettierrc.yml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
overrides:
|
||||||
|
- files: "*.md"
|
||||||
|
options:
|
||||||
|
printWidth: 9999
|
||||||
|
proseWrap: never
|
3
.rustfmt.toml
Normal file
3
.rustfmt.toml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
group_imports = "StdExternalCrate"
|
||||||
|
imports_granularity = "Crate"
|
||||||
|
use_field_init_shorthand = true
|
933
CHANGES.md
933
CHANGES.md
@ -1,932 +1,5 @@
|
|||||||
# Changes
|
# Changelog
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
Changelogs are kept separately for each crate in this repo.
|
||||||
|
|
||||||
|
Actix Web changelog [is now here →](./actix-web/CHANGES.md).
|
||||||
## 4.0.0-beta.16 - 2021-12-27
|
|
||||||
### Changed
|
|
||||||
- No longer require `Scope` service body type to be boxed. [#2523]
|
|
||||||
- No longer require `Resource` service body type to be boxed. [#2526]
|
|
||||||
|
|
||||||
[#2523]: https://github.com/actix/actix-web/pull/2523
|
|
||||||
[#2526]: https://github.com/actix/actix-web/pull/2526
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.15 - 2021-12-17
|
|
||||||
### Added
|
|
||||||
- Method on `Responder` trait (`customize`) for customizing responders and `CustomizeResponder` struct. [#2510]
|
|
||||||
- Implement `Debug` for `DefaultHeaders`. [#2510]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Align `DefaultHeader` method terminology, deprecating previous methods. [#2510]
|
|
||||||
- Response service types in `ErrorHandlers` middleware now use `ServiceResponse<EitherBody<B>>` to allow changing the body type. [#2515]
|
|
||||||
- Both variants in `ErrorHandlerResponse` now use `ServiceResponse<EitherBody<B>>`. [#2515]
|
|
||||||
- Rename `test::{default_service => simple_service}`. Old name is deprecated. [#2518]
|
|
||||||
- Rename `test::{read_response_json => call_and_read_body_json}`. Old name is deprecated. [#2518]
|
|
||||||
- Rename `test::{read_response => call_and_read_body}`. Old name is deprecated. [#2518]
|
|
||||||
- Relax body type and error bounds on test utilities. [#2518]
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- Top-level `EitherExtractError` export. [#2510]
|
|
||||||
- Conversion implementations for `either` crate. [#2516]
|
|
||||||
- `test::load_stream` and `test::load_body`; replace usage with `body::to_bytes`. [#2518]
|
|
||||||
|
|
||||||
[#2510]: https://github.com/actix/actix-web/pull/2510
|
|
||||||
[#2515]: https://github.com/actix/actix-web/pull/2515
|
|
||||||
[#2516]: https://github.com/actix/actix-web/pull/2516
|
|
||||||
[#2518]: https://github.com/actix/actix-web/pull/2518
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.14 - 2021-12-11
|
|
||||||
### Added
|
|
||||||
- Methods on `AcceptLanguage`: `ranked` and `preference`. [#2480]
|
|
||||||
- `AcceptEncoding` typed header. [#2482]
|
|
||||||
- `Range` typed header. [#2485]
|
|
||||||
- `HttpResponse::map_into_{left,right}_body` and `HttpResponse::map_into_boxed_body`. [#2468]
|
|
||||||
- `ServiceResponse::map_into_{left,right}_body` and `HttpResponse::map_into_boxed_body`. [#2468]
|
|
||||||
- Connection data set through the `HttpServer::on_connect` callback is now accessible only from the new `HttpRequest::conn_data()` and `ServiceRequest::conn_data()` methods. [#2491]
|
|
||||||
- `HttpRequest::{req_data,req_data_mut}`. [#2487]
|
|
||||||
- `ServiceResponse::into_parts`. [#2499]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Rename `Accept::{mime_precedence => ranked}`. [#2480]
|
|
||||||
- Rename `Accept::{mime_preference => preference}`. [#2480]
|
|
||||||
- Un-deprecate `App::data_factory`. [#2484]
|
|
||||||
- `HttpRequest::url_for` no longer constructs URLs with query or fragment components. [#2430]
|
|
||||||
- Remove `B` (body) type parameter on `App`. [#2493]
|
|
||||||
- Add `B` (body) type parameter on `Scope`. [#2492]
|
|
||||||
- Request-local data container is no longer part of a `RequestHead`. Instead it is a distinct part of a `Request`. [#2487]
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Accept wildcard `*` items in `AcceptLanguage`. [#2480]
|
|
||||||
- Re-exports `dev::{BodySize, MessageBody, SizedStream}`. They are exposed through the `body` module. [#2468]
|
|
||||||
- Typed headers containing lists that require one or more items now enforce this minimum. [#2482]
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- `ConnectionInfo::get`. [#2487]
|
|
||||||
|
|
||||||
[#2430]: https://github.com/actix/actix-web/pull/2430
|
|
||||||
[#2468]: https://github.com/actix/actix-web/pull/2468
|
|
||||||
[#2480]: https://github.com/actix/actix-web/pull/2480
|
|
||||||
[#2482]: https://github.com/actix/actix-web/pull/2482
|
|
||||||
[#2484]: https://github.com/actix/actix-web/pull/2484
|
|
||||||
[#2485]: https://github.com/actix/actix-web/pull/2485
|
|
||||||
[#2487]: https://github.com/actix/actix-web/pull/2487
|
|
||||||
[#2491]: https://github.com/actix/actix-web/pull/2491
|
|
||||||
[#2492]: https://github.com/actix/actix-web/pull/2492
|
|
||||||
[#2493]: https://github.com/actix/actix-web/pull/2493
|
|
||||||
[#2499]: https://github.com/actix/actix-web/pull/2499
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.13 - 2021-11-30
|
|
||||||
### Changed
|
|
||||||
- Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
|
||||||
|
|
||||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.12 - 2021-11-22
|
|
||||||
### Changed
|
|
||||||
- Compress middleware's response type is now `AnyBody<Encoder<B>>`. [#2448]
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Relax `Unpin` bound on `S` (stream) parameter of `HttpResponseBuilder::streaming`. [#2448]
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- `dev::ResponseBody` re-export; is function is replaced by the new `dev::AnyBody` enum. [#2446]
|
|
||||||
|
|
||||||
[#2446]: https://github.com/actix/actix-web/pull/2446
|
|
||||||
[#2448]: https://github.com/actix/actix-web/pull/2448
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.11 - 2021-11-15
|
|
||||||
### Added
|
|
||||||
- Re-export `dev::ServerHandle` from `actix-server`. [#2442]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- `ContentType::html` now produces `text/html; charset=utf-8` instead of `text/html`. [#2423]
|
|
||||||
- Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
|
||||||
|
|
||||||
[#2423]: https://github.com/actix/actix-web/pull/2423
|
|
||||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.10 - 2021-10-20
|
|
||||||
### Added
|
|
||||||
- Option to allow `Json` extractor to work without a `Content-Type` header present. [#2362]
|
|
||||||
- `#[actix_web::test]` macro for setting up tests with a runtime. [#2409]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Associated type `FromRequest::Config` was removed. [#2233]
|
|
||||||
- Inner field made private on `web::Payload`. [#2384]
|
|
||||||
- `Data::into_inner` and `Data::get_ref` no longer requires `T: Sized`. [#2403]
|
|
||||||
- Updated rustls to v0.20. [#2414]
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.52.
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- Useless `ServiceResponse::checked_expr` method. [#2401]
|
|
||||||
|
|
||||||
[#2233]: https://github.com/actix/actix-web/pull/2233
|
|
||||||
[#2362]: https://github.com/actix/actix-web/pull/2362
|
|
||||||
[#2384]: https://github.com/actix/actix-web/pull/2384
|
|
||||||
[#2401]: https://github.com/actix/actix-web/pull/2401
|
|
||||||
[#2403]: https://github.com/actix/actix-web/pull/2403
|
|
||||||
[#2409]: https://github.com/actix/actix-web/pull/2409
|
|
||||||
[#2414]: https://github.com/actix/actix-web/pull/2414
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.9 - 2021-09-09
|
|
||||||
### Added
|
|
||||||
- Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344]
|
|
||||||
- Move `BaseHttpResponse` to `dev::Response`. [#2379]
|
|
||||||
- Enable `TestRequest::param` to accept more than just static strings. [#2172]
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.51.
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Fix quality parse error in Accept-Encoding header. [#2344]
|
|
||||||
- Re-export correct type at `web::HttpResponse`. [#2379]
|
|
||||||
|
|
||||||
[#2172]: https://github.com/actix/actix-web/pull/2172
|
|
||||||
[#2325]: https://github.com/actix/actix-web/pull/2325
|
|
||||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
|
||||||
[#2379]: https://github.com/actix/actix-web/pull/2379
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.8 - 2021-06-26
|
|
||||||
### Added
|
|
||||||
- Add `ServiceRequest::parts_mut`. [#2177]
|
|
||||||
- Add extractors for `Uri` and `Method`. [#2263]
|
|
||||||
- Add extractors for `ConnectionInfo` and `PeerAddr`. [#2263]
|
|
||||||
- Add `Route::service` for using hand-written services as handlers. [#2262]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Change compression algorithm features flags. [#2250]
|
|
||||||
- Deprecate `App::data` and `App::data_factory`. [#2271]
|
|
||||||
- Smarter extraction of `ConnectionInfo` parts. [#2282]
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Scope and Resource middleware can access data items set on their own layer. [#2288]
|
|
||||||
|
|
||||||
[#2177]: https://github.com/actix/actix-web/pull/2177
|
|
||||||
[#2250]: https://github.com/actix/actix-web/pull/2250
|
|
||||||
[#2271]: https://github.com/actix/actix-web/pull/2271
|
|
||||||
[#2262]: https://github.com/actix/actix-web/pull/2262
|
|
||||||
[#2263]: https://github.com/actix/actix-web/pull/2263
|
|
||||||
[#2282]: https://github.com/actix/actix-web/pull/2282
|
|
||||||
[#2288]: https://github.com/actix/actix-web/pull/2288
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.7 - 2021-06-17
|
|
||||||
### Added
|
|
||||||
- `HttpServer::worker_max_blocking_threads` for setting block thread pool. [#2200]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Adjusted default JSON payload limit to 2MB (from 32kb) and included size and limits in the `JsonPayloadError::Overflow` error variant. [#2162]
|
|
||||||
[#2162]: (https://github.com/actix/actix-web/pull/2162)
|
|
||||||
- `ServiceResponse::error_response` now uses body type of `Body`. [#2201]
|
|
||||||
- `ServiceResponse::checked_expr` now returns a `Result`. [#2201]
|
|
||||||
- Update `language-tags` to `0.3`.
|
|
||||||
- `ServiceResponse::take_body`. [#2201]
|
|
||||||
- `ServiceResponse::map_body` closure receives and returns `B` instead of `ResponseBody<B>` types. [#2201]
|
|
||||||
- All error trait bounds in server service builders have changed from `Into<Error>` to `Into<Response<AnyBody>>`. [#2253]
|
|
||||||
- All error trait bounds in message body and stream impls changed from `Into<Error>` to `Into<Box<dyn std::error::Error>>`. [#2253]
|
|
||||||
- `HttpServer::{listen_rustls(), bind_rustls()}` now honor the ALPN protocols in the configuation parameter. [#2226]
|
|
||||||
- `middleware::normalize` now will not try to normalize URIs with no valid path [#2246]
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- `HttpResponse::take_body` and old `HttpResponse::into_body` method that casted body type. [#2201]
|
|
||||||
|
|
||||||
[#2200]: https://github.com/actix/actix-web/pull/2200
|
|
||||||
[#2201]: https://github.com/actix/actix-web/pull/2201
|
|
||||||
[#2253]: https://github.com/actix/actix-web/pull/2253
|
|
||||||
[#2246]: https://github.com/actix/actix-web/pull/2246
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.6 - 2021-04-17
|
|
||||||
### Added
|
|
||||||
- `HttpResponse` and `HttpResponseBuilder` structs. [#2065]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Most error types are now marked `#[non_exhaustive]`. [#2148]
|
|
||||||
- Methods on `ContentDisposition` that took `T: AsRef<str>` now take `impl AsRef<str>`.
|
|
||||||
|
|
||||||
[#2065]: https://github.com/actix/actix-web/pull/2065
|
|
||||||
[#2148]: https://github.com/actix/actix-web/pull/2148
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.5 - 2021-04-02
|
|
||||||
### Added
|
|
||||||
- `Header` extractor for extracting common HTTP headers in handlers. [#2094]
|
|
||||||
- Added `TestServer::client_headers` method. [#2097]
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Double ampersand in Logger format is escaped correctly. [#2067]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- `CustomResponder` would return error as `HttpResponse` when `CustomResponder::with_header` failed
|
|
||||||
instead of skipping. (Only the first error is kept when multiple error occur) [#2093]
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- The `client` mod was removed. Clients should now use `awc` directly.
|
|
||||||
[871ca5e4](https://github.com/actix/actix-web/commit/871ca5e4ae2bdc22d1ea02701c2992fa8d04aed7)
|
|
||||||
- Integration testing was moved to new `actix-test` crate. Namely these items from the `test`
|
|
||||||
module: `TestServer`, `TestServerConfig`, `start`, `start_with`, and `unused_addr`. [#2112]
|
|
||||||
|
|
||||||
[#2067]: https://github.com/actix/actix-web/pull/2067
|
|
||||||
[#2093]: https://github.com/actix/actix-web/pull/2093
|
|
||||||
[#2094]: https://github.com/actix/actix-web/pull/2094
|
|
||||||
[#2097]: https://github.com/actix/actix-web/pull/2097
|
|
||||||
[#2112]: https://github.com/actix/actix-web/pull/2112
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.4 - 2021-03-09
|
|
||||||
### Changed
|
|
||||||
- Feature `cookies` is now optional and enabled by default. [#1981]
|
|
||||||
- `JsonBody::new` returns a default limit of 32kB to be consistent with `JsonConfig` and the default
|
|
||||||
behaviour of the `web::Json<T>` extractor. [#2010]
|
|
||||||
|
|
||||||
[#1981]: https://github.com/actix/actix-web/pull/1981
|
|
||||||
[#2010]: https://github.com/actix/actix-web/pull/2010
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.3 - 2021-02-10
|
|
||||||
- Update `actix-web-codegen` to `0.5.0-beta.1`.
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.2 - 2021-02-10
|
|
||||||
### Added
|
|
||||||
- The method `Either<web::Json<T>, web::Form<T>>::into_inner()` which returns the inner type for
|
|
||||||
whichever variant was created. Also works for `Either<web::Form<T>, web::Json<T>>`. [#1894]
|
|
||||||
- Add `services!` macro for helping register multiple services to `App`. [#1933]
|
|
||||||
- Enable registering a vec of services of the same type to `App` [#1933]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Rework `Responder` trait to be sync and returns `Response`/`HttpResponse` directly.
|
|
||||||
Making it simpler and more performant. [#1891]
|
|
||||||
- `ServiceRequest::into_parts` and `ServiceRequest::from_parts` can no longer fail. [#1893]
|
|
||||||
- `ServiceRequest::from_request` can no longer fail. [#1893]
|
|
||||||
- Our `Either` type now uses `Left`/`Right` variants (instead of `A`/`B`) [#1894]
|
|
||||||
- `test::{call_service, read_response, read_response_json, send_request}` take `&Service`
|
|
||||||
in argument [#1905]
|
|
||||||
- `App::wrap_fn`, `Resource::wrap_fn` and `Scope::wrap_fn` provide `&Service` in closure
|
|
||||||
argument. [#1905]
|
|
||||||
- `web::block` no longer requires the output is a Result. [#1957]
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Multiple calls to `App::data` with the same type now keeps the latest call's data. [#1906]
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- Public field of `web::Path` has been made private. [#1894]
|
|
||||||
- Public field of `web::Query` has been made private. [#1894]
|
|
||||||
- `TestRequest::with_header`; use `TestRequest::default().insert_header()`. [#1869]
|
|
||||||
- `AppService::set_service_data`; for custom HTTP service factories adding application data, use the
|
|
||||||
layered data model by calling `ServiceRequest::add_data_container` when handling
|
|
||||||
requests instead. [#1906]
|
|
||||||
|
|
||||||
[#1891]: https://github.com/actix/actix-web/pull/1891
|
|
||||||
[#1893]: https://github.com/actix/actix-web/pull/1893
|
|
||||||
[#1894]: https://github.com/actix/actix-web/pull/1894
|
|
||||||
[#1869]: https://github.com/actix/actix-web/pull/1869
|
|
||||||
[#1905]: https://github.com/actix/actix-web/pull/1905
|
|
||||||
[#1906]: https://github.com/actix/actix-web/pull/1906
|
|
||||||
[#1933]: https://github.com/actix/actix-web/pull/1933
|
|
||||||
[#1957]: https://github.com/actix/actix-web/pull/1957
|
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.1 - 2021-01-07
|
|
||||||
### Added
|
|
||||||
- `Compat` middleware enabling generic response body/error type of middlewares like `Logger` and
|
|
||||||
`Compress` to be used in `middleware::Condition` and `Resource`, `Scope` services. [#1865]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Update `actix-*` dependencies to tokio `1.0` based versions. [#1813]
|
|
||||||
- Bumped `rand` to `0.8`.
|
|
||||||
- Update `rust-tls` to `0.19`. [#1813]
|
|
||||||
- Rename `Handler` to `HandlerService` and rename `Factory` to `Handler`. [#1852]
|
|
||||||
- The default `TrailingSlash` is now `Trim`, in line with existing documentation. See migration
|
|
||||||
guide for implications. [#1875]
|
|
||||||
- Rename `DefaultHeaders::{content_type => add_content_type}`. [#1875]
|
|
||||||
- MSRV is now 1.46.0.
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Added the underlying parse error to `test::read_body_json`'s panic message. [#1812]
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
- Public modules `middleware::{normalize, err_handlers}`. All necessary middleware structs are now
|
|
||||||
exposed directly by the `middleware` module.
|
|
||||||
- Remove `actix-threadpool` as dependency. `actix_threadpool::BlockingError` error type can be imported
|
|
||||||
from `actix_web::error` module. [#1878]
|
|
||||||
|
|
||||||
[#1812]: https://github.com/actix/actix-web/pull/1812
|
|
||||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
|
||||||
[#1852]: https://github.com/actix/actix-web/pull/1852
|
|
||||||
[#1865]: https://github.com/actix/actix-web/pull/1865
|
|
||||||
[#1875]: https://github.com/actix/actix-web/pull/1875
|
|
||||||
[#1878]: https://github.com/actix/actix-web/pull/1878
|
|
||||||
|
|
||||||
|
|
||||||
## 3.3.3 - 2021-12-18
|
|
||||||
### Changed
|
|
||||||
- Soft-deprecate `NormalizePath::default()`, noting upcoming behavior change in v4. [#2529]
|
|
||||||
|
|
||||||
[#2529]: https://github.com/actix/actix-web/pull/2529
|
|
||||||
|
|
||||||
|
|
||||||
## 3.3.2 - 2020-12-01
|
|
||||||
### Fixed
|
|
||||||
- Removed an occasional `unwrap` on `None` panic in `NormalizePathNormalization`. [#1762]
|
|
||||||
- Fix `match_pattern()` returning `None` for scope with empty path resource. [#1798]
|
|
||||||
- Increase minimum `socket2` version. [#1803]
|
|
||||||
|
|
||||||
[#1762]: https://github.com/actix/actix-web/pull/1762
|
|
||||||
[#1798]: https://github.com/actix/actix-web/pull/1798
|
|
||||||
[#1803]: https://github.com/actix/actix-web/pull/1803
|
|
||||||
|
|
||||||
|
|
||||||
## 3.3.1 - 2020-11-29
|
|
||||||
- Ensure `actix-http` dependency uses same `serde_urlencoded`.
|
|
||||||
|
|
||||||
|
|
||||||
## 3.3.0 - 2020-11-25
|
|
||||||
### Added
|
|
||||||
- Add `Either<A, B>` extractor helper. [#1788]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Upgrade `serde_urlencoded` to `0.7`. [#1773]
|
|
||||||
|
|
||||||
[#1773]: https://github.com/actix/actix-web/pull/1773
|
|
||||||
[#1788]: https://github.com/actix/actix-web/pull/1788
|
|
||||||
|
|
||||||
|
|
||||||
## 3.2.0 - 2020-10-30
|
|
||||||
### Added
|
|
||||||
- Implement `exclude_regex` for Logger middleware. [#1723]
|
|
||||||
- Add request-local data extractor `web::ReqData`. [#1748]
|
|
||||||
- Add ability to register closure for request middleware logging. [#1749]
|
|
||||||
- Add `app_data` to `ServiceConfig`. [#1757]
|
|
||||||
- Expose `on_connect` for access to the connection stream before request is handled. [#1754]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Updated actix-web-codegen dependency for access to new `#[route(...)]` multi-method macro.
|
|
||||||
- Print non-configured `Data<T>` type when attempting extraction. [#1743]
|
|
||||||
- Re-export bytes::Buf{Mut} in web module. [#1750]
|
|
||||||
- Upgrade `pin-project` to `1.0`.
|
|
||||||
|
|
||||||
[#1723]: https://github.com/actix/actix-web/pull/1723
|
|
||||||
[#1743]: https://github.com/actix/actix-web/pull/1743
|
|
||||||
[#1748]: https://github.com/actix/actix-web/pull/1748
|
|
||||||
[#1750]: https://github.com/actix/actix-web/pull/1750
|
|
||||||
[#1754]: https://github.com/actix/actix-web/pull/1754
|
|
||||||
[#1749]: https://github.com/actix/actix-web/pull/1749
|
|
||||||
|
|
||||||
|
|
||||||
## 3.1.0 - 2020-09-29
|
|
||||||
### Changed
|
|
||||||
- Add `TrailingSlash::MergeOnly` behaviour to `NormalizePath`, which allows `NormalizePath`
|
|
||||||
to retain any trailing slashes. [#1695]
|
|
||||||
- Remove bound `std::marker::Sized` from `web::Data` to support storing `Arc<dyn Trait>`
|
|
||||||
via `web::Data::from` [#1710]
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- `ResourceMap` debug printing is no longer infinitely recursive. [#1708]
|
|
||||||
|
|
||||||
[#1695]: https://github.com/actix/actix-web/pull/1695
|
|
||||||
[#1708]: https://github.com/actix/actix-web/pull/1708
|
|
||||||
[#1710]: https://github.com/actix/actix-web/pull/1710
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.2 - 2020-09-15
|
|
||||||
### Fixed
|
|
||||||
- `NormalizePath` when used with `TrailingSlash::Trim` no longer trims the root path "/". [#1678]
|
|
||||||
|
|
||||||
[#1678]: https://github.com/actix/actix-web/pull/1678
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.1 - 2020-09-13
|
|
||||||
### Changed
|
|
||||||
- `middleware::normalize::TrailingSlash` enum is now accessible. [#1673]
|
|
||||||
|
|
||||||
[#1673]: https://github.com/actix/actix-web/pull/1673
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0 - 2020-09-11
|
|
||||||
- No significant changes from `3.0.0-beta.4`.
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0-beta.4 - 2020-09-09
|
|
||||||
### Added
|
|
||||||
- `middleware::NormalizePath` now has configurable behavior for either always having a trailing
|
|
||||||
slash, or as the new addition, always trimming trailing slashes. [#1639]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Update actix-codec and actix-utils dependencies. [#1634]
|
|
||||||
- `FormConfig` and `JsonConfig` configurations are now also considered when set
|
|
||||||
using `App::data`. [#1641]
|
|
||||||
- `HttpServer::maxconn` is renamed to the more expressive `HttpServer::max_connections`. [#1655]
|
|
||||||
- `HttpServer::maxconnrate` is renamed to the more expressive
|
|
||||||
`HttpServer::max_connection_rate`. [#1655]
|
|
||||||
|
|
||||||
[#1639]: https://github.com/actix/actix-web/pull/1639
|
|
||||||
[#1641]: https://github.com/actix/actix-web/pull/1641
|
|
||||||
[#1634]: https://github.com/actix/actix-web/pull/1634
|
|
||||||
[#1655]: https://github.com/actix/actix-web/pull/1655
|
|
||||||
|
|
||||||
## 3.0.0-beta.3 - 2020-08-17
|
|
||||||
### Changed
|
|
||||||
- Update `rustls` to 0.18
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0-beta.2 - 2020-08-17
|
|
||||||
### Changed
|
|
||||||
- `PayloadConfig` is now also considered in `Bytes` and `String` extractors when set
|
|
||||||
using `App::data`. [#1610]
|
|
||||||
- `web::Path` now has a public representation: `web::Path(pub T)` that enables
|
|
||||||
destructuring. [#1594]
|
|
||||||
- `ServiceRequest::app_data` allows retrieval of non-Data data without splitting into parts to
|
|
||||||
access `HttpRequest` which already allows this. [#1618]
|
|
||||||
- Re-export all error types from `awc`. [#1621]
|
|
||||||
- MSRV is now 1.42.0.
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- Memory leak of app data in pooled requests. [#1609]
|
|
||||||
|
|
||||||
[#1594]: https://github.com/actix/actix-web/pull/1594
|
|
||||||
[#1609]: https://github.com/actix/actix-web/pull/1609
|
|
||||||
[#1610]: https://github.com/actix/actix-web/pull/1610
|
|
||||||
[#1618]: https://github.com/actix/actix-web/pull/1618
|
|
||||||
[#1621]: https://github.com/actix/actix-web/pull/1621
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0-beta.1 - 2020-07-13
|
|
||||||
### Added
|
|
||||||
- Re-export `actix_rt::main` as `actix_web::main`.
|
|
||||||
- `HttpRequest::match_pattern` and `ServiceRequest::match_pattern` for extracting the matched
|
|
||||||
resource pattern.
|
|
||||||
- `HttpRequest::match_name` and `ServiceRequest::match_name` for extracting matched resource name.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Fix actix_http::h1::dispatcher so it returns when HW_BUFFER_SIZE is reached. Should reduce peak memory consumption during large uploads. [#1550]
|
|
||||||
- Migrate cookie handling to `cookie` crate. Actix-web no longer requires `ring` dependency.
|
|
||||||
- MSRV is now 1.41.1
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- `NormalizePath` improved consistency when path needs slashes added _and_ removed.
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0-alpha.3 - 2020-05-21
|
|
||||||
### Added
|
|
||||||
- Add option to create `Data<T>` from `Arc<T>` [#1509]
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- Resources and Scopes can now access non-overridden data types set on App (or containing scopes) when setting their own data. [#1486]
|
|
||||||
- Fix audit issue logging by default peer address [#1485]
|
|
||||||
- Bump minimum supported Rust version to 1.40
|
|
||||||
- Replace deprecated `net2` crate with `socket2`
|
|
||||||
|
|
||||||
[#1485]: https://github.com/actix/actix-web/pull/1485
|
|
||||||
[#1509]: https://github.com/actix/actix-web/pull/1509
|
|
||||||
|
|
||||||
## [3.0.0-alpha.2] - 2020-05-08
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- `{Resource,Scope}::default_service(f)` handlers now support app data extraction. [#1452]
|
|
||||||
- Implement `std::error::Error` for our custom errors [#1422]
|
|
||||||
- NormalizePath middleware now appends trailing / so that routes of form /example/ respond to /example requests. [#1433]
|
|
||||||
- Remove the `failure` feature and support.
|
|
||||||
|
|
||||||
[#1422]: https://github.com/actix/actix-web/pull/1422
|
|
||||||
[#1433]: https://github.com/actix/actix-web/pull/1433
|
|
||||||
[#1452]: https://github.com/actix/actix-web/pull/1452
|
|
||||||
[#1486]: https://github.com/actix/actix-web/pull/1486
|
|
||||||
|
|
||||||
|
|
||||||
## [3.0.0-alpha.1] - 2020-03-11
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add helper function for creating routes with `TRACE` method guard `web::trace()`
|
|
||||||
- Add convenience functions `test::read_body_json()` and `test::TestRequest::send_request()` for testing.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Use `sha-1` crate instead of unmaintained `sha1` crate
|
|
||||||
- Skip empty chunks when returning response from a `Stream` [#1308]
|
|
||||||
- Update the `time` dependency to 0.2.7
|
|
||||||
- Update `actix-tls` dependency to 2.0.0-alpha.1
|
|
||||||
- Update `rustls` dependency to 0.17
|
|
||||||
|
|
||||||
[#1308]: https://github.com/actix/actix-web/pull/1308
|
|
||||||
|
|
||||||
## [2.0.0] - 2019-12-25
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Rename `HttpServer::start()` to `HttpServer::run()`
|
|
||||||
|
|
||||||
- Allow to gracefully stop test server via `TestServer::stop()`
|
|
||||||
|
|
||||||
- Allow to specify multi-patterns for resources
|
|
||||||
|
|
||||||
## [2.0.0-rc] - 2019-12-20
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Move `BodyEncoding` to `dev` module #1220
|
|
||||||
|
|
||||||
- Allow to set `peer_addr` for TestRequest #1074
|
|
||||||
|
|
||||||
- Make web::Data deref to Arc<T> #1214
|
|
||||||
|
|
||||||
- Rename `App::register_data()` to `App::app_data()`
|
|
||||||
|
|
||||||
- `HttpRequest::app_data<T>()` returns `Option<&T>` instead of `Option<&Data<T>>`
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Fix `AppConfig::secure()` is always false. #1202
|
|
||||||
|
|
||||||
|
|
||||||
## [2.0.0-alpha.6] - 2019-12-15
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Fixed compilation with default features off
|
|
||||||
|
|
||||||
## [2.0.0-alpha.5] - 2019-12-13
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add test server, `test::start()` and `test::start_with()`
|
|
||||||
|
|
||||||
## [2.0.0-alpha.4] - 2019-12-08
|
|
||||||
|
|
||||||
### Deleted
|
|
||||||
|
|
||||||
- Delete HttpServer::run(), it is not useful with async/await
|
|
||||||
|
|
||||||
## [2.0.0-alpha.3] - 2019-12-07
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Migrate to tokio 0.2
|
|
||||||
|
|
||||||
|
|
||||||
## [2.0.0-alpha.1] - 2019-11-22
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Migrated to `std::future`
|
|
||||||
|
|
||||||
- Remove implementation of `Responder` for `()`. (#1167)
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.9] - 2019-11-14
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add `Payload::into_inner` method and make stored `def::Payload` public. (#1110)
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Support `Host` guards when the `Host` header is unset (e.g. HTTP/2 requests) (#1129)
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.8] - 2019-09-25
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add `Scope::register_data` and `Resource::register_data` methods, parallel to
|
|
||||||
`App::register_data`.
|
|
||||||
|
|
||||||
- Add `middleware::Condition` that conditionally enables another middleware
|
|
||||||
|
|
||||||
- Allow to re-construct `ServiceRequest` from `HttpRequest` and `Payload`
|
|
||||||
|
|
||||||
- Add `HttpServer::listen_uds` for ability to listen on UDS FD rather than path,
|
|
||||||
which is useful for example with systemd.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Make UrlEncodedError::Overflow more informative
|
|
||||||
|
|
||||||
- Use actix-testing for testing utils
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.7] - 2019-08-29
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Request Extensions leak #1062
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.6] - 2019-08-28
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Re-implement Host predicate (#989)
|
|
||||||
|
|
||||||
- Form implements Responder, returning a `application/x-www-form-urlencoded` response
|
|
||||||
|
|
||||||
- Add `into_inner` to `Data`
|
|
||||||
|
|
||||||
- Add `test::TestRequest::set_form()` convenience method to automatically serialize data and set
|
|
||||||
the header in test requests.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- `Query` payload made `pub`. Allows user to pattern-match the payload.
|
|
||||||
|
|
||||||
- Enable `rust-tls` feature for client #1045
|
|
||||||
|
|
||||||
- Update serde_urlencoded to 0.6.1
|
|
||||||
|
|
||||||
- Update url to 2.1
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.5] - 2019-07-18
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Unix domain sockets (HttpServer::bind_uds) #92
|
|
||||||
|
|
||||||
- Actix now logs errors resulting in "internal server error" responses always, with the `error`
|
|
||||||
logging level
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Restored logging of errors through the `Logger` middleware
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.4] - 2019-07-17
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add `Responder` impl for `(T, StatusCode) where T: Responder`
|
|
||||||
|
|
||||||
- Allow to access app's resource map via
|
|
||||||
`ServiceRequest::resource_map()` and `HttpRequest::resource_map()` methods.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Upgrade `rand` dependency version to 0.7
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.3] - 2019-06-28
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Support asynchronous data factories #850
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Use `encoding_rs` crate instead of unmaintained `encoding` crate
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.2] - 2019-06-17
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Move cors middleware to `actix-cors` crate.
|
|
||||||
|
|
||||||
- Move identity middleware to `actix-identity` crate.
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.1] - 2019-06-17
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add support for PathConfig #903
|
|
||||||
|
|
||||||
- Add `middleware::identity::RequestIdentity` trait to `get_identity` from `HttpMessage`.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Move cors middleware to `actix-cors` crate.
|
|
||||||
|
|
||||||
- Move identity middleware to `actix-identity` crate.
|
|
||||||
|
|
||||||
- Disable default feature `secure-cookies`.
|
|
||||||
|
|
||||||
- Allow to test an app that uses async actors #897
|
|
||||||
|
|
||||||
- Re-apply patch from #637 #894
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- HttpRequest::url_for is broken with nested scopes #915
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0] - 2019-06-05
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add `Scope::configure()` method.
|
|
||||||
|
|
||||||
- Add `ServiceRequest::set_payload()` method.
|
|
||||||
|
|
||||||
- Add `test::TestRequest::set_json()` convenience method to automatically
|
|
||||||
serialize data and set header in test requests.
|
|
||||||
|
|
||||||
- Add macros for head, options, trace, connect and patch http methods
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Drop an unnecessary `Option<_>` indirection around `ServerBuilder` from `HttpServer`. #863
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Fix Logger request time format, and use rfc3339. #867
|
|
||||||
|
|
||||||
- Clear http requests pool on app service drop #860
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-rc] - 2019-05-18
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add `Query<T>::from_query()` to extract parameters from a query string. #846
|
|
||||||
- `QueryConfig`, similar to `JsonConfig` for customizing error handling of query extractors.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- `JsonConfig` is now `Send + Sync`, this implies that `error_handler` must be `Send + Sync` too.
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Codegen with parameters in the path only resolves the first registered endpoint #841
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-beta.4] - 2019-05-12
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Allow to set/override app data on scope level
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- `App::configure` take an `FnOnce` instead of `Fn`
|
|
||||||
- Upgrade actix-net crates
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-beta.3] - 2019-05-04
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add helper function for executing futures `test::block_fn()`
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Extractor configuration could be registered with `App::data()`
|
|
||||||
or with `Resource::data()` #775
|
|
||||||
|
|
||||||
- Route data is unified with app data, `Route::data()` moved to resource
|
|
||||||
level to `Resource::data()`
|
|
||||||
|
|
||||||
- CORS handling without headers #702
|
|
||||||
|
|
||||||
- Allow constructing `Data` instances to avoid double `Arc` for `Send + Sync` types.
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Fix `NormalizePath` middleware impl #806
|
|
||||||
|
|
||||||
### Deleted
|
|
||||||
|
|
||||||
- `App::data_factory()` is deleted.
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-beta.2] - 2019-04-24
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add raw services support via `web::service()`
|
|
||||||
|
|
||||||
- Add helper functions for reading response body `test::read_body()`
|
|
||||||
|
|
||||||
- Add support for `remainder match` (i.e "/path/{tail}*")
|
|
||||||
|
|
||||||
- Extend `Responder` trait, allow to override status code and headers.
|
|
||||||
|
|
||||||
- Store visit and login timestamp in the identity cookie #502
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- `.to_async()` handler can return `Responder` type #792
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Fix async web::Data factory handling
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-beta.1] - 2019-04-20
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Add helper functions for reading test response body,
|
|
||||||
`test::read_response()` and test::read_response_json()`
|
|
||||||
|
|
||||||
- Add `.peer_addr()` #744
|
|
||||||
|
|
||||||
- Add `NormalizePath` middleware
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Rename `RouterConfig` to `ServiceConfig`
|
|
||||||
|
|
||||||
- Rename `test::call_success` to `test::call_service`
|
|
||||||
|
|
||||||
- Removed `ServiceRequest::from_parts()` as it is unsafe to create from parts.
|
|
||||||
|
|
||||||
- `CookieIdentityPolicy::max_age()` accepts value in seconds
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Fixed `TestRequest::app_data()`
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-alpha.6] - 2019-04-14
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Allow using any service as default service.
|
|
||||||
|
|
||||||
- Remove generic type for request payload, always use default.
|
|
||||||
|
|
||||||
- Removed `Decompress` middleware. Bytes, String, Json, Form extractors
|
|
||||||
automatically decompress payload.
|
|
||||||
|
|
||||||
- Make extractor config type explicit. Add `FromRequest::Config` associated type.
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-alpha.5] - 2019-04-12
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Added async io `TestBuffer` for testing.
|
|
||||||
|
|
||||||
### Deleted
|
|
||||||
|
|
||||||
- Removed native-tls support
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-alpha.4] - 2019-04-08
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- `App::configure()` allow to offload app configuration to different methods
|
|
||||||
|
|
||||||
- Added `URLPath` option for logger
|
|
||||||
|
|
||||||
- Added `ServiceRequest::app_data()`, returns `Data<T>`
|
|
||||||
|
|
||||||
- Added `ServiceFromRequest::app_data()`, returns `Data<T>`
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- `FromRequest` trait refactoring
|
|
||||||
|
|
||||||
- Move multipart support to actix-multipart crate
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- Fix body propagation in Response::from_error. #760
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-alpha.3] - 2019-04-02
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Renamed `TestRequest::to_service()` to `TestRequest::to_srv_request()`
|
|
||||||
|
|
||||||
- Renamed `TestRequest::to_response()` to `TestRequest::to_srv_response()`
|
|
||||||
|
|
||||||
- Removed `Deref` impls
|
|
||||||
|
|
||||||
### Removed
|
|
||||||
|
|
||||||
- Removed unused `actix_web::web::md()`
|
|
||||||
|
|
||||||
|
|
||||||
## [1.0.0-alpha.2] - 2019-03-29
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- Rustls support
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Use forked cookie
|
|
||||||
|
|
||||||
- Multipart::Field renamed to MultipartField
|
|
||||||
|
|
||||||
## [1.0.0-alpha.1] - 2019-03-28
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- Complete architecture re-design.
|
|
||||||
|
|
||||||
- Return 405 response if no matching route found within resource #538
|
|
||||||
|
161
Cargo.toml
161
Cargo.toml
@ -1,125 +1,25 @@
|
|||||||
[package]
|
|
||||||
name = "actix-web"
|
|
||||||
version = "4.0.0-beta.16"
|
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
|
||||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
|
||||||
keywords = ["actix", "http", "web", "framework", "async"]
|
|
||||||
categories = [
|
|
||||||
"network-programming",
|
|
||||||
"asynchronous",
|
|
||||||
"web-programming::http-server",
|
|
||||||
"web-programming::websocket"
|
|
||||||
]
|
|
||||||
homepage = "https://actix.rs"
|
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
|
||||||
license = "MIT OR Apache-2.0"
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
# features that docs.rs will build with
|
|
||||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "actix_web"
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
".",
|
|
||||||
"awc",
|
|
||||||
"actix-http",
|
|
||||||
"actix-files",
|
"actix-files",
|
||||||
|
"actix-http-test",
|
||||||
|
"actix-http",
|
||||||
"actix-multipart",
|
"actix-multipart",
|
||||||
|
"actix-multipart-derive",
|
||||||
|
"actix-router",
|
||||||
|
"actix-test",
|
||||||
"actix-web-actors",
|
"actix-web-actors",
|
||||||
"actix-web-codegen",
|
"actix-web-codegen",
|
||||||
"actix-http-test",
|
"actix-web",
|
||||||
"actix-test",
|
"awc",
|
||||||
"actix-router",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[features]
|
[workspace.package]
|
||||||
default = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
homepage = "https://actix.rs"
|
||||||
|
repository = "https://github.com/actix/actix-web"
|
||||||
# Brotli algorithm content-encoding support
|
license = "MIT OR Apache-2.0"
|
||||||
compress-brotli = ["actix-http/compress-brotli", "__compress"]
|
edition = "2021"
|
||||||
# Gzip and deflate algorithms content-encoding support
|
rust-version = "1.75"
|
||||||
compress-gzip = ["actix-http/compress-gzip", "__compress"]
|
|
||||||
# Zstd algorithm content-encoding support
|
|
||||||
compress-zstd = ["actix-http/compress-zstd", "__compress"]
|
|
||||||
|
|
||||||
# support for cookies
|
|
||||||
cookies = ["cookie"]
|
|
||||||
|
|
||||||
# secure cookies feature
|
|
||||||
secure-cookies = ["cookie/secure"]
|
|
||||||
|
|
||||||
# openssl
|
|
||||||
openssl = ["actix-http/openssl", "actix-tls/accept", "actix-tls/openssl"]
|
|
||||||
|
|
||||||
# rustls
|
|
||||||
rustls = ["actix-http/rustls", "actix-tls/accept", "actix-tls/rustls"]
|
|
||||||
|
|
||||||
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
|
||||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
|
||||||
__compress = []
|
|
||||||
|
|
||||||
# io-uring feature only avaiable for Linux OSes.
|
|
||||||
experimental-io-uring = ["actix-server/io-uring"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
actix-codec = "0.4.1"
|
|
||||||
actix-macros = "0.2.3"
|
|
||||||
actix-rt = "2.3"
|
|
||||||
actix-server = "2.0.0-rc.2"
|
|
||||||
actix-service = "2.0.0"
|
|
||||||
actix-utils = "3.0.0"
|
|
||||||
actix-tls = { version = "3.0.0", default-features = false, optional = true }
|
|
||||||
|
|
||||||
actix-http = "3.0.0-beta.17"
|
|
||||||
actix-router = "0.5.0-beta.3"
|
|
||||||
actix-web-codegen = "0.5.0-beta.6"
|
|
||||||
|
|
||||||
ahash = "0.7"
|
|
||||||
bytes = "1"
|
|
||||||
cfg-if = "1"
|
|
||||||
cookie = { version = "0.15", features = ["percent-encode"], optional = true }
|
|
||||||
derive_more = "0.99.5"
|
|
||||||
encoding_rs = "0.8"
|
|
||||||
futures-core = { version = "0.3.7", default-features = false }
|
|
||||||
futures-util = { version = "0.3.7", default-features = false }
|
|
||||||
itoa = "1"
|
|
||||||
language-tags = "0.3"
|
|
||||||
once_cell = "1.5"
|
|
||||||
log = "0.4"
|
|
||||||
mime = "0.3"
|
|
||||||
paste = "1"
|
|
||||||
pin-project-lite = "0.2.7"
|
|
||||||
regex = "1.4"
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
serde_json = "1.0"
|
|
||||||
serde_urlencoded = "0.7"
|
|
||||||
smallvec = "1.6.1"
|
|
||||||
socket2 = "0.4.0"
|
|
||||||
time = { version = "0.3", default-features = false, features = ["formatting"] }
|
|
||||||
url = "2.1"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
actix-test = { version = "0.1.0-beta.10", features = ["openssl", "rustls"] }
|
|
||||||
awc = { version = "3.0.0-beta.15", features = ["openssl"] }
|
|
||||||
|
|
||||||
brotli2 = "0.3.2"
|
|
||||||
criterion = { version = "0.3", features = ["html_reports"] }
|
|
||||||
env_logger = "0.9"
|
|
||||||
flate2 = "1.0.13"
|
|
||||||
futures-util = { version = "0.3.7", default-features = false, features = ["std"] }
|
|
||||||
rand = "0.8"
|
|
||||||
rcgen = "0.8"
|
|
||||||
rustls-pemfile = "0.2"
|
|
||||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
|
||||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
|
||||||
zstd = "0.9"
|
|
||||||
|
|
||||||
[profile.dev]
|
[profile.dev]
|
||||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||||
@ -135,9 +35,10 @@ actix-files = { path = "actix-files" }
|
|||||||
actix-http = { path = "actix-http" }
|
actix-http = { path = "actix-http" }
|
||||||
actix-http-test = { path = "actix-http-test" }
|
actix-http-test = { path = "actix-http-test" }
|
||||||
actix-multipart = { path = "actix-multipart" }
|
actix-multipart = { path = "actix-multipart" }
|
||||||
|
actix-multipart-derive = { path = "actix-multipart-derive" }
|
||||||
actix-router = { path = "actix-router" }
|
actix-router = { path = "actix-router" }
|
||||||
actix-test = { path = "actix-test" }
|
actix-test = { path = "actix-test" }
|
||||||
actix-web = { path = "." }
|
actix-web = { path = "actix-web" }
|
||||||
actix-web-actors = { path = "actix-web-actors" }
|
actix-web-actors = { path = "actix-web-actors" }
|
||||||
actix-web-codegen = { path = "actix-web-codegen" }
|
actix-web-codegen = { path = "actix-web-codegen" }
|
||||||
awc = { path = "awc" }
|
awc = { path = "awc" }
|
||||||
@ -151,30 +52,10 @@ awc = { path = "awc" }
|
|||||||
# actix-tls = { path = "../actix-net/actix-tls" }
|
# actix-tls = { path = "../actix-net/actix-tls" }
|
||||||
# actix-server = { path = "../actix-net/actix-server" }
|
# actix-server = { path = "../actix-net/actix-server" }
|
||||||
|
|
||||||
[[test]]
|
[workspace.lints.rust]
|
||||||
name = "test_server"
|
rust_2018_idioms = { level = "deny" }
|
||||||
required-features = ["compress-brotli", "compress-gzip", "compress-zstd", "cookies"]
|
future_incompatible = { level = "deny" }
|
||||||
|
nonstandard_style = { level = "deny" }
|
||||||
|
|
||||||
[[example]]
|
[workspace.lints.clippy]
|
||||||
name = "basic"
|
# clone_on_ref_ptr = { level = "deny" }
|
||||||
required-features = ["compress-gzip"]
|
|
||||||
|
|
||||||
[[example]]
|
|
||||||
name = "uds"
|
|
||||||
required-features = ["compress-gzip"]
|
|
||||||
|
|
||||||
[[example]]
|
|
||||||
name = "on_connect"
|
|
||||||
required-features = []
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "server"
|
|
||||||
harness = false
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "service"
|
|
||||||
harness = false
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "responder"
|
|
||||||
harness = false
|
|
||||||
|
677
MIGRATION.md
677
MIGRATION.md
@ -1,677 +0,0 @@
|
|||||||
## Unreleased
|
|
||||||
|
|
||||||
- The default `NormalizePath` behavior now strips trailing slashes by default. This was
|
|
||||||
previously documented to be the case in v3 but the behavior now matches. The effect is that
|
|
||||||
routes defined with trailing slashes will become inaccessible when
|
|
||||||
using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
|
|
||||||
It is advised that the `new` method be used instead.
|
|
||||||
|
|
||||||
Before: `#[get("/test/")]`
|
|
||||||
After: `#[get("/test")]`
|
|
||||||
|
|
||||||
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
|
|
||||||
|
|
||||||
- The `type Config` of `FromRequest` was removed.
|
|
||||||
|
|
||||||
- Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
|
|
||||||
By default all compression algorithms are enabled.
|
|
||||||
To select algorithm you want to include with `middleware::Compress` use following flags:
|
|
||||||
- `compress-brotli`
|
|
||||||
- `compress-gzip`
|
|
||||||
- `compress-zstd`
|
|
||||||
If you have set in your `Cargo.toml` dedicated `actix-web` features and you still want
|
|
||||||
to have compression enabled. Please change features selection like bellow:
|
|
||||||
|
|
||||||
Before: `"compress"`
|
|
||||||
After: `"compress-brotli", "compress-gzip", "compress-zstd"`
|
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0
|
|
||||||
|
|
||||||
- The return type for `ServiceRequest::app_data::<T>()` was changed from returning a `Data<T>` to
|
|
||||||
simply a `T`. To access a `Data<T>` use `ServiceRequest::app_data::<Data<T>>()`.
|
|
||||||
|
|
||||||
- Cookie handling has been offloaded to the `cookie` crate:
|
|
||||||
* `USERINFO_ENCODE_SET` is no longer exposed. Percent-encoding is still supported; check docs.
|
|
||||||
* Some types now require lifetime parameters.
|
|
||||||
|
|
||||||
- The time crate was updated to `v0.2`, a major breaking change to the time crate, which affects
|
|
||||||
any `actix-web` method previously expecting a time v0.1 input.
|
|
||||||
|
|
||||||
- Setting a cookie's SameSite property, explicitly, to `SameSite::None` will now
|
|
||||||
result in `SameSite=None` being sent with the response Set-Cookie header.
|
|
||||||
To create a cookie without a SameSite attribute, remove any calls setting same_site.
|
|
||||||
|
|
||||||
- actix-http support for Actors messages was moved to actix-http crate and is enabled
|
|
||||||
with feature `actors`
|
|
||||||
|
|
||||||
- content_length function is removed from actix-http.
|
|
||||||
You can set Content-Length by normally setting the response body or calling no_chunking function.
|
|
||||||
|
|
||||||
- `BodySize::Sized64` variant has been removed. `BodySize::Sized` now receives a
|
|
||||||
`u64` instead of a `usize`.
|
|
||||||
|
|
||||||
- Code that was using `path.<index>` to access a `web::Path<(A, B, C)>`s elements now needs to use
|
|
||||||
destructuring or `.into_inner()`. For example:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
// Previously:
|
|
||||||
async fn some_route(path: web::Path<(String, String)>) -> String {
|
|
||||||
format!("Hello, {} {}", path.0, path.1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now (this also worked before):
|
|
||||||
async fn some_route(path: web::Path<(String, String)>) -> String {
|
|
||||||
let (first_name, last_name) = path.into_inner();
|
|
||||||
format!("Hello, {} {}", first_name, last_name)
|
|
||||||
}
|
|
||||||
// Or (this wasn't previously supported):
|
|
||||||
async fn some_route(web::Path((first_name, last_name)): web::Path<(String, String)>) -> String {
|
|
||||||
format!("Hello, {} {}", first_name, last_name)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- `middleware::NormalizePath` can now also be configured to trim trailing slashes instead of always keeping one.
|
|
||||||
It will need `middleware::normalize::TrailingSlash` when being constructed with `NormalizePath::new(...)`,
|
|
||||||
or for an easier migration you can replace `wrap(middleware::NormalizePath)` with `wrap(middleware::NormalizePath::new(TrailingSlash::MergeOnly))`.
|
|
||||||
|
|
||||||
- `HttpServer::maxconn` is renamed to the more expressive `HttpServer::max_connections`.
|
|
||||||
|
|
||||||
- `HttpServer::maxconnrate` is renamed to the more expressive `HttpServer::max_connection_rate`.
|
|
||||||
|
|
||||||
|
|
||||||
## 2.0.0
|
|
||||||
|
|
||||||
- `HttpServer::start()` renamed to `HttpServer::run()`. It also possible to
|
|
||||||
`.await` on `run` method result, in that case it awaits server exit.
|
|
||||||
|
|
||||||
- `App::register_data()` renamed to `App::app_data()` and accepts any type `T: 'static`.
|
|
||||||
Stored data is available via `HttpRequest::app_data()` method at runtime.
|
|
||||||
|
|
||||||
- Extractor configuration must be registered with `App::app_data()` instead of `App::data()`
|
|
||||||
|
|
||||||
- Sync handlers has been removed. `.to_async()` method has been renamed to `.to()`
|
|
||||||
replace `fn` with `async fn` to convert sync handler to async
|
|
||||||
|
|
||||||
- `actix_http_test::TestServer` moved to `actix_web::test` module. To start
|
|
||||||
test server use `test::start()` or `test_start_with_config()` methods
|
|
||||||
|
|
||||||
- `ResponseError` trait has been reafctored. `ResponseError::error_response()` renders
|
|
||||||
http response.
|
|
||||||
|
|
||||||
- Feature `rust-tls` renamed to `rustls`
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
actix-web = { version = "2.0.0", features = ["rust-tls"] }
|
|
||||||
```
|
|
||||||
|
|
||||||
use
|
|
||||||
|
|
||||||
```rust
|
|
||||||
actix-web = { version = "2.0.0", features = ["rustls"] }
|
|
||||||
```
|
|
||||||
|
|
||||||
- Feature `ssl` renamed to `openssl`
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
actix-web = { version = "2.0.0", features = ["ssl"] }
|
|
||||||
```
|
|
||||||
|
|
||||||
use
|
|
||||||
|
|
||||||
```rust
|
|
||||||
actix-web = { version = "2.0.0", features = ["openssl"] }
|
|
||||||
```
|
|
||||||
- `Cors` builder now requires that you call `.finish()` to construct the middleware
|
|
||||||
|
|
||||||
## 1.0.1
|
|
||||||
|
|
||||||
- Cors middleware has been moved to `actix-cors` crate
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use actix_web::middleware::cors::Cors;
|
|
||||||
```
|
|
||||||
|
|
||||||
use
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use actix_cors::Cors;
|
|
||||||
```
|
|
||||||
|
|
||||||
- Identity middleware has been moved to `actix-identity` crate
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use actix_web::middleware::identity::{Identity, CookieIdentityPolicy, IdentityService};
|
|
||||||
```
|
|
||||||
|
|
||||||
use
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use actix_identity::{Identity, CookieIdentityPolicy, IdentityService};
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## 1.0.0
|
|
||||||
|
|
||||||
- Extractor configuration. In version 1.0 this is handled with the new `Data` mechanism for both setting and retrieving the configuration
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct ExtractorConfig {
|
|
||||||
config: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromRequest for YourExtractor {
|
|
||||||
type Config = ExtractorConfig;
|
|
||||||
type Result = Result<YourExtractor, Error>;
|
|
||||||
|
|
||||||
fn from_request(req: &HttpRequest, cfg: &Self::Config) -> Self::Result {
|
|
||||||
println!("use the config: {:?}", cfg.config);
|
|
||||||
...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
App::new().resource("/route_with_config", |r| {
|
|
||||||
r.post().with_config(handler_fn, |cfg| {
|
|
||||||
cfg.0.config = "test".to_string();
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
use the HttpRequest to get the configuration like any other `Data` with `req.app_data::<C>()` and set it with the `data()` method on the `resource`
|
|
||||||
|
|
||||||
```rust
|
|
||||||
#[derive(Default)]
|
|
||||||
struct ExtractorConfig {
|
|
||||||
config: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromRequest for YourExtractor {
|
|
||||||
type Error = Error;
|
|
||||||
type Future = Result<Self, Self::Error>;
|
|
||||||
type Config = ExtractorConfig;
|
|
||||||
|
|
||||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
|
||||||
let cfg = req.app_data::<ExtractorConfig>();
|
|
||||||
println!("config data?: {:?}", cfg.unwrap().role);
|
|
||||||
...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
App::new().service(
|
|
||||||
resource("/route_with_config")
|
|
||||||
.data(ExtractorConfig {
|
|
||||||
config: "test".to_string(),
|
|
||||||
})
|
|
||||||
.route(post().to(handler_fn)),
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
- Resource registration. 1.0 version uses generalized resource
|
|
||||||
registration via `.service()` method.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.new().resource("/welcome", |r| r.f(welcome))
|
|
||||||
```
|
|
||||||
|
|
||||||
use App's or Scope's `.service()` method. `.service()` method accepts
|
|
||||||
object that implements `HttpServiceFactory` trait. By default
|
|
||||||
actix-web provides `Resource` and `Scope` services.
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.new().service(
|
|
||||||
web::resource("/welcome")
|
|
||||||
.route(web::get().to(welcome))
|
|
||||||
.route(web::post().to(post_handler))
|
|
||||||
```
|
|
||||||
|
|
||||||
- Scope registration.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let app = App::new().scope("/{project_id}", |scope| {
|
|
||||||
scope
|
|
||||||
.resource("/path1", |r| r.f(|_| HttpResponse::Ok()))
|
|
||||||
.resource("/path2", |r| r.f(|_| HttpResponse::Ok()))
|
|
||||||
.resource("/path3", |r| r.f(|_| HttpResponse::MethodNotAllowed()))
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
use `.service()` for registration and `web::scope()` as scope object factory.
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let app = App::new().service(
|
|
||||||
web::scope("/{project_id}")
|
|
||||||
.service(web::resource("/path1").to(|| HttpResponse::Ok()))
|
|
||||||
.service(web::resource("/path2").to(|| HttpResponse::Ok()))
|
|
||||||
.service(web::resource("/path3").to(|| HttpResponse::MethodNotAllowed()))
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
- `.with()`, `.with_async()` registration methods have been renamed to `.to()` and `.to_async()`.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.new().resource("/welcome", |r| r.with(welcome))
|
|
||||||
```
|
|
||||||
|
|
||||||
use `.to()` or `.to_async()` methods
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.new().service(web::resource("/welcome").to(welcome))
|
|
||||||
```
|
|
||||||
|
|
||||||
- Passing arguments to handler with extractors, multiple arguments are allowed
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn welcome((body, req): (Bytes, HttpRequest)) -> ... {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
use multiple arguments
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn welcome(body: Bytes, req: HttpRequest) -> ... {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- `.f()`, `.a()` and `.h()` handler registration methods have been removed.
|
|
||||||
Use `.to()` for handlers and `.to_async()` for async handlers. Handler function
|
|
||||||
must use extractors.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.new().resource("/welcome", |r| r.f(welcome))
|
|
||||||
```
|
|
||||||
|
|
||||||
use App's `to()` or `to_async()` methods
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.new().service(web::resource("/welcome").to(welcome))
|
|
||||||
```
|
|
||||||
|
|
||||||
- `HttpRequest` does not provide access to request's payload stream.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(req: &HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {
|
|
||||||
req
|
|
||||||
.payload()
|
|
||||||
.from_err()
|
|
||||||
.fold((), |_, chunk| {
|
|
||||||
...
|
|
||||||
})
|
|
||||||
.map(|_| HttpResponse::Ok().finish())
|
|
||||||
.responder()
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
use `Payload` extractor
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(stream: web::Payload) -> impl Future<Item=HttpResponse, Error=Error> {
|
|
||||||
stream
|
|
||||||
.from_err()
|
|
||||||
.fold((), |_, chunk| {
|
|
||||||
...
|
|
||||||
})
|
|
||||||
.map(|_| HttpResponse::Ok().finish())
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- `State` is now `Data`. You register Data during the App initialization process
|
|
||||||
and then access it from handlers either using a Data extractor or using
|
|
||||||
HttpRequest's api.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.with_state(T)
|
|
||||||
```
|
|
||||||
|
|
||||||
use App's `data` method
|
|
||||||
|
|
||||||
```rust
|
|
||||||
App.new()
|
|
||||||
.data(T)
|
|
||||||
```
|
|
||||||
|
|
||||||
and either use the Data extractor within your handler
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use actix_web::web::Data;
|
|
||||||
|
|
||||||
fn endpoint_handler(Data<T>)){
|
|
||||||
...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
.. or access your Data element from the HttpRequest
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn endpoint_handler(req: HttpRequest) {
|
|
||||||
let data: Option<Data<T>> = req.app_data::<T>();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
- AsyncResponder is removed, use `.to_async()` registration method and `impl Future<>` as result type.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use actix_web::AsyncResponder;
|
|
||||||
|
|
||||||
fn endpoint_handler(...) -> impl Future<Item=HttpResponse, Error=Error>{
|
|
||||||
...
|
|
||||||
.responder()
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
.. simply omit AsyncResponder and the corresponding responder() finish method
|
|
||||||
|
|
||||||
|
|
||||||
- Middleware
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let app = App::new()
|
|
||||||
.middleware(middleware::Logger::default())
|
|
||||||
```
|
|
||||||
|
|
||||||
use `.wrap()` method
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let app = App::new()
|
|
||||||
.wrap(middleware::Logger::default())
|
|
||||||
.route("/index.html", web::get().to(index));
|
|
||||||
```
|
|
||||||
|
|
||||||
- `HttpRequest::body()`, `HttpRequest::urlencoded()`, `HttpRequest::json()`, `HttpRequest::multipart()`
|
|
||||||
method have been removed. Use `Bytes`, `String`, `Form`, `Json`, `Multipart` extractors instead.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(req: &HttpRequest) -> Responder {
|
|
||||||
req.body()
|
|
||||||
.and_then(|body| {
|
|
||||||
...
|
|
||||||
})
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
use
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(body: Bytes) -> Responder {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- `actix_web::server` module has been removed. To start http server use `actix_web::HttpServer` type
|
|
||||||
|
|
||||||
- StaticFiles and NamedFile have been moved to a separate crate.
|
|
||||||
|
|
||||||
instead of `use actix_web::fs::StaticFile`
|
|
||||||
|
|
||||||
use `use actix_files::Files`
|
|
||||||
|
|
||||||
instead of `use actix_web::fs::Namedfile`
|
|
||||||
|
|
||||||
use `use actix_files::NamedFile`
|
|
||||||
|
|
||||||
- Multipart has been moved to a separate crate.
|
|
||||||
|
|
||||||
instead of `use actix_web::multipart::Multipart`
|
|
||||||
|
|
||||||
use `use actix_multipart::Multipart`
|
|
||||||
|
|
||||||
- Response compression is not enabled by default.
|
|
||||||
To enable, use `Compress` middleware, `App::new().wrap(Compress::default())`.
|
|
||||||
|
|
||||||
- Session middleware moved to actix-session crate
|
|
||||||
|
|
||||||
- Actors support have been moved to `actix-web-actors` crate
|
|
||||||
|
|
||||||
- Custom Error
|
|
||||||
|
|
||||||
Instead of error_response method alone, ResponseError now provides two methods: error_response and render_response respectively. Where, error_response creates the error response and render_response returns the error response to the caller.
|
|
||||||
|
|
||||||
Simplest migration from 0.7 to 1.0 shall include below method to the custom implementation of ResponseError:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn render_response(&self) -> HttpResponse {
|
|
||||||
self.error_response()
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## 0.7.15
|
|
||||||
|
|
||||||
- The `' '` character is not percent decoded anymore before matching routes. If you need to use it in
|
|
||||||
your routes, you should use `%20`.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn main() {
|
|
||||||
let app = App::new().resource("/my index", |r| {
|
|
||||||
r.method(http::Method::GET)
|
|
||||||
.with(index);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
use
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn main() {
|
|
||||||
let app = App::new().resource("/my%20index", |r| {
|
|
||||||
r.method(http::Method::GET)
|
|
||||||
.with(index);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- If you used `AsyncResult::async` you need to replace it with `AsyncResult::future`
|
|
||||||
|
|
||||||
|
|
||||||
## 0.7.4
|
|
||||||
|
|
||||||
- `Route::with_config()`/`Route::with_async_config()` always passes configuration objects as tuple
|
|
||||||
even for handler with one parameter.
|
|
||||||
|
|
||||||
|
|
||||||
## 0.7
|
|
||||||
|
|
||||||
- `HttpRequest` does not implement `Stream` anymore. If you need to read request payload
|
|
||||||
use `HttpMessage::payload()` method.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(req: HttpRequest) -> impl Responder {
|
|
||||||
req
|
|
||||||
.from_err()
|
|
||||||
.fold(...)
|
|
||||||
....
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
use `.payload()`
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(req: HttpRequest) -> impl Responder {
|
|
||||||
req
|
|
||||||
.payload() // <- get request payload stream
|
|
||||||
.from_err()
|
|
||||||
.fold(...)
|
|
||||||
....
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- [Middleware](https://actix.rs/actix-web/actix_web/middleware/trait.Middleware.html)
|
|
||||||
trait uses `&HttpRequest` instead of `&mut HttpRequest`.
|
|
||||||
|
|
||||||
- Removed `Route::with2()` and `Route::with3()` use tuple of extractors instead.
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(query: Query<..>, info: Json<MyStruct) -> impl Responder {}
|
|
||||||
```
|
|
||||||
|
|
||||||
use tuple of extractors and use `.with()` for registration:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index((query, json): (Query<..>, Json<MyStruct)) -> impl Responder {}
|
|
||||||
```
|
|
||||||
|
|
||||||
- `Handler::handle()` uses `&self` instead of `&mut self`
|
|
||||||
|
|
||||||
- `Handler::handle()` accepts reference to `HttpRequest<_>` instead of value
|
|
||||||
|
|
||||||
- Removed deprecated `HttpServer::threads()`, use
|
|
||||||
[HttpServer::workers()](https://actix.rs/actix-web/actix_web/server/struct.HttpServer.html#method.workers) instead.
|
|
||||||
|
|
||||||
- Renamed `client::ClientConnectorError::Connector` to
|
|
||||||
`client::ClientConnectorError::Resolver`
|
|
||||||
|
|
||||||
- `Route::with()` does not return `ExtractorConfig`, to configure
|
|
||||||
extractor use `Route::with_config()`
|
|
||||||
|
|
||||||
instead of
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn main() {
|
|
||||||
let app = App::new().resource("/index.html", |r| {
|
|
||||||
r.method(http::Method::GET)
|
|
||||||
.with(index)
|
|
||||||
.limit(4096); // <- limit size of the payload
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
use
|
|
||||||
|
|
||||||
```rust
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let app = App::new().resource("/index.html", |r| {
|
|
||||||
r.method(http::Method::GET)
|
|
||||||
.with_config(index, |cfg| { // <- register handler
|
|
||||||
cfg.limit(4096); // <- limit size of the payload
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- `Route::with_async()` does not return `ExtractorConfig`, to configure
|
|
||||||
extractor use `Route::with_async_config()`
|
|
||||||
|
|
||||||
|
|
||||||
## 0.6
|
|
||||||
|
|
||||||
- `Path<T>` extractor return `ErrorNotFound` on failure instead of `ErrorBadRequest`
|
|
||||||
|
|
||||||
- `ws::Message::Close` now includes optional close reason.
|
|
||||||
`ws::CloseCode::Status` and `ws::CloseCode::Empty` have been removed.
|
|
||||||
|
|
||||||
- `HttpServer::threads()` renamed to `HttpServer::workers()`.
|
|
||||||
|
|
||||||
- `HttpServer::start_ssl()` and `HttpServer::start_tls()` deprecated.
|
|
||||||
Use `HttpServer::bind_ssl()` and `HttpServer::bind_tls()` instead.
|
|
||||||
|
|
||||||
- `HttpRequest::extensions()` returns read only reference to the request's Extension
|
|
||||||
`HttpRequest::extensions_mut()` returns mutable reference.
|
|
||||||
|
|
||||||
- Instead of
|
|
||||||
|
|
||||||
`use actix_web::middleware::{
|
|
||||||
CookieSessionBackend, CookieSessionError, RequestSession,
|
|
||||||
Session, SessionBackend, SessionImpl, SessionStorage};`
|
|
||||||
|
|
||||||
use `actix_web::middleware::session`
|
|
||||||
|
|
||||||
`use actix_web::middleware::session{CookieSessionBackend, CookieSessionError,
|
|
||||||
RequestSession, Session, SessionBackend, SessionImpl, SessionStorage};`
|
|
||||||
|
|
||||||
- `FromRequest::from_request()` accepts mutable reference to a request
|
|
||||||
|
|
||||||
- `FromRequest::Result` has to implement `Into<Reply<Self>>`
|
|
||||||
|
|
||||||
- [`Responder::respond_to()`](
|
|
||||||
https://actix.rs/actix-web/actix_web/trait.Responder.html#tymethod.respond_to)
|
|
||||||
is generic over `S`
|
|
||||||
|
|
||||||
- Use `Query` extractor instead of HttpRequest::query()`.
|
|
||||||
|
|
||||||
```rust
|
|
||||||
fn index(q: Query<HashMap<String, String>>) -> Result<..> {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
```rust
|
|
||||||
let q = Query::<HashMap<String, String>>::extract(req);
|
|
||||||
```
|
|
||||||
|
|
||||||
- Websocket operations are implemented as `WsWriter` trait.
|
|
||||||
you need to use `use actix_web::ws::WsWriter`
|
|
||||||
|
|
||||||
|
|
||||||
## 0.5
|
|
||||||
|
|
||||||
- `HttpResponseBuilder::body()`, `.finish()`, `.json()`
|
|
||||||
methods return `HttpResponse` instead of `Result<HttpResponse>`
|
|
||||||
|
|
||||||
- `actix_web::Method`, `actix_web::StatusCode`, `actix_web::Version`
|
|
||||||
moved to `actix_web::http` module
|
|
||||||
|
|
||||||
- `actix_web::header` moved to `actix_web::http::header`
|
|
||||||
|
|
||||||
- `NormalizePath` moved to `actix_web::http` module
|
|
||||||
|
|
||||||
- `HttpServer` moved to `actix_web::server`, added new `actix_web::server::new()` function,
|
|
||||||
shortcut for `actix_web::server::HttpServer::new()`
|
|
||||||
|
|
||||||
- `DefaultHeaders` middleware does not use separate builder, all builder methods moved to type itself
|
|
||||||
|
|
||||||
- `StaticFiles::new()`'s show_index parameter removed, use `show_files_listing()` method instead.
|
|
||||||
|
|
||||||
- `CookieSessionBackendBuilder` removed, all methods moved to `CookieSessionBackend` type
|
|
||||||
|
|
||||||
- `actix_web::httpcodes` module is deprecated, `HttpResponse::Ok()`, `HttpResponse::Found()` and other `HttpResponse::XXX()`
|
|
||||||
functions should be used instead
|
|
||||||
|
|
||||||
- `ClientRequestBuilder::body()` returns `Result<_, actix_web::Error>`
|
|
||||||
instead of `Result<_, http::Error>`
|
|
||||||
|
|
||||||
- `Application` renamed to a `App`
|
|
||||||
|
|
||||||
- `actix_web::Reply`, `actix_web::Resource` moved to `actix_web::dev`
|
|
109
README.md
109
README.md
@ -1,109 +0,0 @@
|
|||||||
<div align="center">
|
|
||||||
<h1>Actix Web</h1>
|
|
||||||
<p>
|
|
||||||
<strong>Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust</strong>
|
|
||||||
</p>
|
|
||||||
<p>
|
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-web)
|
|
||||||
[](https://docs.rs/actix-web/4.0.0-beta.16)
|
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|
|
||||||

|
|
||||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.16)
|
|
||||||
<br />
|
|
||||||
[](https://github.com/actix/actix-web/actions)
|
|
||||||
[](https://codecov.io/gh/actix/actix-web)
|
|
||||||

|
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
|
||||||
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- Supports *HTTP/1.x* and *HTTP/2*
|
|
||||||
- Streaming and pipelining
|
|
||||||
- Keep-alive and slow requests handling
|
|
||||||
- Client/server [WebSockets](https://actix.rs/docs/websockets/) support
|
|
||||||
- Transparent content compression/decompression (br, gzip, deflate, zstd)
|
|
||||||
- Powerful [request routing](https://actix.rs/docs/url-dispatch/)
|
|
||||||
- Multipart streams
|
|
||||||
- Static assets
|
|
||||||
- SSL support using OpenSSL or Rustls
|
|
||||||
- Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
|
|
||||||
- Includes an async [HTTP client](https://docs.rs/awc/)
|
|
||||||
- Runs on stable Rust 1.52+
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
- [Website & User Guide](https://actix.rs)
|
|
||||||
- [Examples Repository](https://github.com/actix/examples)
|
|
||||||
- [API Documentation](https://docs.rs/actix-web)
|
|
||||||
- [API Documentation (master branch)](https://actix.rs/actix-web/actix_web)
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[dependencies]
|
|
||||||
actix-web = "3"
|
|
||||||
```
|
|
||||||
|
|
||||||
Code:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
use actix_web::{get, web, App, HttpServer, Responder};
|
|
||||||
|
|
||||||
#[get("/{id}/{name}/index.html")]
|
|
||||||
async fn index(web::Path((id, name)): web::Path<(u32, String)>) -> impl Responder {
|
|
||||||
format!("Hello {}! id:{}", name, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_web::main]
|
|
||||||
async fn main() -> std::io::Result<()> {
|
|
||||||
HttpServer::new(|| App::new().service(index))
|
|
||||||
.bind("127.0.0.1:8080")?
|
|
||||||
.run()
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### More examples
|
|
||||||
|
|
||||||
- [Basic Setup](https://github.com/actix/examples/tree/master/basics/basics/)
|
|
||||||
- [Application State](https://github.com/actix/examples/tree/master/basics/state/)
|
|
||||||
- [JSON Handling](https://github.com/actix/examples/tree/master/json/json/)
|
|
||||||
- [Multipart Streams](https://github.com/actix/examples/tree/master/forms/multipart/)
|
|
||||||
- [Diesel Integration](https://github.com/actix/examples/tree/master/database_interactions/diesel/)
|
|
||||||
- [r2d2 Integration](https://github.com/actix/examples/tree/master/database_interactions/r2d2/)
|
|
||||||
- [Simple WebSocket](https://github.com/actix/examples/tree/master/websockets/websocket/)
|
|
||||||
- [Tera Templates](https://github.com/actix/examples/tree/master/template_engines/tera/)
|
|
||||||
- [Askama Templates](https://github.com/actix/examples/tree/master/template_engines/askama/)
|
|
||||||
- [HTTPS using Rustls](https://github.com/actix/examples/tree/master/security/rustls/)
|
|
||||||
- [HTTPS using OpenSSL](https://github.com/actix/examples/tree/master/security/openssl/)
|
|
||||||
- [WebSocket Chat](https://github.com/actix/examples/tree/master/websockets/chat/)
|
|
||||||
|
|
||||||
You may consider checking out
|
|
||||||
[this directory](https://github.com/actix/examples/tree/master/) for more examples.
|
|
||||||
|
|
||||||
## Benchmarks
|
|
||||||
|
|
||||||
One of the fastest web frameworks available according to the
|
|
||||||
[TechEmpower Framework Benchmark](https://www.techempower.com/benchmarks/#section=data-r20&test=composite).
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This project is licensed under either of
|
|
||||||
|
|
||||||
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
|
||||||
[http://www.apache.org/licenses/LICENSE-2.0])
|
|
||||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
|
||||||
[http://opensource.org/licenses/MIT])
|
|
||||||
|
|
||||||
at your option.
|
|
||||||
|
|
||||||
## Code of Conduct
|
|
||||||
|
|
||||||
Contribution to the actix-web repo is organized under the terms of the Contributor Covenant.
|
|
||||||
The Actix team promises to intervene to uphold that code of conduct.
|
|
@ -1,17 +1,88 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||||
|
|
||||||
## 0.6.0-beta.11 - 2021-12-27
|
## 0.6.6
|
||||||
* No significant changes since `0.6.0-beta.10`.
|
|
||||||
|
|
||||||
|
- Update `tokio-uring` dependency to `0.4`.
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
|
## 0.6.5
|
||||||
|
|
||||||
|
- Fix handling of special characters in filenames.
|
||||||
|
|
||||||
|
## 0.6.4
|
||||||
|
|
||||||
|
- Fix handling of newlines in filenames.
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
## 0.6.3
|
||||||
|
|
||||||
|
- XHTML files now use `Content-Disposition: inline` instead of `attachment`. [#2903]
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.59 due to transitive `time` dependency.
|
||||||
|
- Update `tokio-uring` dependency to `0.4`.
|
||||||
|
|
||||||
|
[#2903]: https://github.com/actix/actix-web/pull/2903
|
||||||
|
|
||||||
|
## 0.6.2
|
||||||
|
|
||||||
|
- Allow partial range responses for video content to start streaming sooner. [#2817]
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
[#2817]: https://github.com/actix/actix-web/pull/2817
|
||||||
|
|
||||||
|
## 0.6.1
|
||||||
|
|
||||||
|
- Add `NamedFile::{modified, metadata, content_type, content_disposition, encoding}()` getters. [#2021]
|
||||||
|
- Update `tokio-uring` dependency to `0.3`.
|
||||||
|
- Audio files now use `Content-Disposition: inline` instead of `attachment`. [#2645]
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.56 due to transitive `hashbrown` dependency.
|
||||||
|
|
||||||
|
[#2021]: https://github.com/actix/actix-web/pull/2021
|
||||||
|
[#2645]: https://github.com/actix/actix-web/pull/2645
|
||||||
|
|
||||||
|
## 0.6.0
|
||||||
|
|
||||||
|
- No significant changes since `0.6.0-beta.16`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.16
|
||||||
|
|
||||||
|
- No significant changes since `0.6.0-beta.15`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.15
|
||||||
|
|
||||||
|
- No significant changes since `0.6.0-beta.14`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.14
|
||||||
|
|
||||||
|
- The `prefer_utf8` option introduced in `0.4.0` is now true by default. [#2583]
|
||||||
|
|
||||||
|
[#2583]: https://github.com/actix/actix-web/pull/2583
|
||||||
|
|
||||||
|
## 0.6.0-beta.13
|
||||||
|
|
||||||
|
- The `Files` service now rejects requests with URL paths that include `%2F` (decoded: `/`). [#2398]
|
||||||
|
- The `Files` service now correctly decodes `%25` in the URL path to `%` for the file path. [#2398]
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.54.
|
||||||
|
|
||||||
|
[#2398]: https://github.com/actix/actix-web/pull/2398
|
||||||
|
|
||||||
|
## 0.6.0-beta.12
|
||||||
|
|
||||||
|
- No significant changes since `0.6.0-beta.11`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.11
|
||||||
|
|
||||||
|
- No significant changes since `0.6.0-beta.10`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.10
|
||||||
|
|
||||||
## 0.6.0-beta.10 - 2021-12-11
|
|
||||||
- No significant changes since `0.6.0-beta.9`.
|
- No significant changes since `0.6.0-beta.9`.
|
||||||
|
|
||||||
|
## 0.6.0-beta.9
|
||||||
|
|
||||||
## 0.6.0-beta.9 - 2021-11-22
|
|
||||||
- Add crate feature `experimental-io-uring`, enabling async file I/O to be utilized. This feature is only available on Linux OSes with recent kernel versions. This feature is semver-exempt. [#2408]
|
- Add crate feature `experimental-io-uring`, enabling async file I/O to be utilized. This feature is only available on Linux OSes with recent kernel versions. This feature is semver-exempt. [#2408]
|
||||||
- Add `NamedFile::open_async`. [#2408]
|
- Add `NamedFile::open_async`. [#2408]
|
||||||
- Fix 304 Not Modified responses to omit the Content-Length header, as per the spec. [#2453]
|
- Fix 304 Not Modified responses to omit the Content-Length header, as per the spec. [#2453]
|
||||||
@ -22,24 +93,24 @@
|
|||||||
[#2408]: https://github.com/actix/actix-web/pull/2408
|
[#2408]: https://github.com/actix/actix-web/pull/2408
|
||||||
[#2453]: https://github.com/actix/actix-web/pull/2453
|
[#2453]: https://github.com/actix/actix-web/pull/2453
|
||||||
|
|
||||||
|
## 0.6.0-beta.8
|
||||||
|
|
||||||
## 0.6.0-beta.8 - 2021-10-20
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.52.
|
- Minimum supported Rust version (MSRV) is now 1.52.
|
||||||
|
|
||||||
|
## 0.6.0-beta.7
|
||||||
|
|
||||||
## 0.6.0-beta.7 - 2021-09-09
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.51.
|
- Minimum supported Rust version (MSRV) is now 1.51.
|
||||||
|
|
||||||
|
## 0.6.0-beta.6
|
||||||
|
|
||||||
## 0.6.0-beta.6 - 2021-06-26
|
|
||||||
- Added `Files::path_filter()`. [#2274]
|
- Added `Files::path_filter()`. [#2274]
|
||||||
- `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]
|
- `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]
|
||||||
|
|
||||||
[#2274]: https://github.com/actix/actix-web/pull/2274
|
[#2274]: https://github.com/actix/actix-web/pull/2274
|
||||||
[#2228]: https://github.com/actix/actix-web/pull/2228
|
[#2228]: https://github.com/actix/actix-web/pull/2228
|
||||||
|
|
||||||
|
## 0.6.0-beta.5
|
||||||
|
|
||||||
## 0.6.0-beta.5 - 2021-06-17
|
|
||||||
- `NamedFile` now implements `ServiceFactory` and `HttpServiceFactory` making it much more useful in routing. For example, it can be used directly as a default service. [#2135]
|
- `NamedFile` now implements `ServiceFactory` and `HttpServiceFactory` making it much more useful in routing. For example, it can be used directly as a default service. [#2135]
|
||||||
- For symbolic links, `Content-Disposition` header no longer shows the filename of the original file. [#2156]
|
- For symbolic links, `Content-Disposition` header no longer shows the filename of the original file. [#2156]
|
||||||
- `Files::redirect_to_slash_directory()` now works as expected when used with `Files::show_files_listing()`. [#2225]
|
- `Files::redirect_to_slash_directory()` now works as expected when used with `Files::show_files_listing()`. [#2225]
|
||||||
@ -50,58 +121,58 @@
|
|||||||
[#2225]: https://github.com/actix/actix-web/pull/2225
|
[#2225]: https://github.com/actix/actix-web/pull/2225
|
||||||
[#2257]: https://github.com/actix/actix-web/pull/2257
|
[#2257]: https://github.com/actix/actix-web/pull/2257
|
||||||
|
|
||||||
|
## 0.6.0-beta.4
|
||||||
|
|
||||||
## 0.6.0-beta.4 - 2021-04-02
|
|
||||||
- Add support for `.guard` in `Files` to selectively filter `Files` services. [#2046]
|
- Add support for `.guard` in `Files` to selectively filter `Files` services. [#2046]
|
||||||
|
|
||||||
[#2046]: https://github.com/actix/actix-web/pull/2046
|
[#2046]: https://github.com/actix/actix-web/pull/2046
|
||||||
|
|
||||||
|
## 0.6.0-beta.3
|
||||||
|
|
||||||
## 0.6.0-beta.3 - 2021-03-09
|
|
||||||
- No notable changes.
|
- No notable changes.
|
||||||
|
|
||||||
|
## 0.6.0-beta.2
|
||||||
|
|
||||||
## 0.6.0-beta.2 - 2021-02-10
|
|
||||||
- Fix If-Modified-Since and If-Unmodified-Since to not compare using sub-second timestamps. [#1887]
|
- Fix If-Modified-Since and If-Unmodified-Since to not compare using sub-second timestamps. [#1887]
|
||||||
- Replace `v_htmlescape` with `askama_escape`. [#1953]
|
- Replace `v_htmlescape` with `askama_escape`. [#1953]
|
||||||
|
|
||||||
[#1887]: https://github.com/actix/actix-web/pull/1887
|
[#1887]: https://github.com/actix/actix-web/pull/1887
|
||||||
[#1953]: https://github.com/actix/actix-web/pull/1953
|
[#1953]: https://github.com/actix/actix-web/pull/1953
|
||||||
|
|
||||||
|
## 0.6.0-beta.1
|
||||||
|
|
||||||
## 0.6.0-beta.1 - 2021-01-07
|
|
||||||
- `HttpRange::parse` now has its own error type.
|
- `HttpRange::parse` now has its own error type.
|
||||||
- Update `bytes` to `1.0`. [#1813]
|
- Update `bytes` to `1.0`. [#1813]
|
||||||
|
|
||||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||||
|
|
||||||
|
## 0.5.0
|
||||||
|
|
||||||
## 0.5.0 - 2020-12-26
|
|
||||||
- Optionally support hidden files/directories. [#1811]
|
- Optionally support hidden files/directories. [#1811]
|
||||||
|
|
||||||
[#1811]: https://github.com/actix/actix-web/pull/1811
|
[#1811]: https://github.com/actix/actix-web/pull/1811
|
||||||
|
|
||||||
|
## 0.4.1
|
||||||
|
|
||||||
## 0.4.1 - 2020-11-24
|
|
||||||
- Clarify order of parameters in `Files::new` and improve docs.
|
- Clarify order of parameters in `Files::new` and improve docs.
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
## 0.4.0 - 2020-10-06
|
|
||||||
- Add `Files::prefer_utf8` option that adds UTF-8 charset on certain response types. [#1714]
|
- Add `Files::prefer_utf8` option that adds UTF-8 charset on certain response types. [#1714]
|
||||||
|
|
||||||
[#1714]: https://github.com/actix/actix-web/pull/1714
|
[#1714]: https://github.com/actix/actix-web/pull/1714
|
||||||
|
|
||||||
|
## 0.3.0
|
||||||
|
|
||||||
## 0.3.0 - 2020-09-11
|
|
||||||
- No significant changes from 0.3.0-beta.1.
|
- No significant changes from 0.3.0-beta.1.
|
||||||
|
|
||||||
|
## 0.3.0-beta.1
|
||||||
|
|
||||||
## 0.3.0-beta.1 - 2020-07-15
|
|
||||||
- Update `v_htmlescape` to 0.10
|
- Update `v_htmlescape` to 0.10
|
||||||
- Update `actix-web` and `actix-http` dependencies to beta.1
|
- Update `actix-web` and `actix-http` dependencies to beta.1
|
||||||
|
|
||||||
|
## 0.3.0-alpha.1
|
||||||
|
|
||||||
## 0.3.0-alpha.1 - 2020-05-23
|
|
||||||
- Update `actix-web` and `actix-http` dependencies to alpha
|
- Update `actix-web` and `actix-http` dependencies to alpha
|
||||||
- Fix some typos in the docs
|
- Fix some typos in the docs
|
||||||
- Bump minimum supported Rust version to 1.40
|
- Bump minimum supported Rust version to 1.40
|
||||||
@ -109,73 +180,73 @@
|
|||||||
|
|
||||||
[#1384]: https://github.com/actix/actix-web/pull/1384
|
[#1384]: https://github.com/actix/actix-web/pull/1384
|
||||||
|
|
||||||
|
## 0.2.1
|
||||||
|
|
||||||
## 0.2.1 - 2019-12-22
|
|
||||||
- Use the same format for file URLs regardless of platforms
|
- Use the same format for file URLs regardless of platforms
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
## 0.2.0 - 2019-12-20
|
|
||||||
- Fix BodyEncoding trait import #1220
|
- Fix BodyEncoding trait import #1220
|
||||||
|
|
||||||
|
## 0.2.0-alpha.1
|
||||||
|
|
||||||
## 0.2.0-alpha.1 - 2019-12-07
|
|
||||||
- Migrate to `std::future`
|
- Migrate to `std::future`
|
||||||
|
|
||||||
|
## 0.1.7
|
||||||
|
|
||||||
## 0.1.7 - 2019-11-06
|
- Add an additional `filename*` param in the `Content-Disposition` header of `actix_files::NamedFile` to be more compatible. (#1151)
|
||||||
- Add an additional `filename*` param in the `Content-Disposition` header of
|
|
||||||
`actix_files::NamedFile` to be more compatible. (#1151)
|
## 0.1.6
|
||||||
|
|
||||||
## 0.1.6 - 2019-10-14
|
|
||||||
- Add option to redirect to a slash-ended path `Files` #1132
|
- Add option to redirect to a slash-ended path `Files` #1132
|
||||||
|
|
||||||
|
## 0.1.5
|
||||||
|
|
||||||
## 0.1.5 - 2019-10-08
|
|
||||||
- Bump up `mime_guess` crate version to 2.0.1
|
- Bump up `mime_guess` crate version to 2.0.1
|
||||||
- Bump up `percent-encoding` crate version to 2.1
|
- Bump up `percent-encoding` crate version to 2.1
|
||||||
- Allow user defined request guards for `Files` #1113
|
- Allow user defined request guards for `Files` #1113
|
||||||
|
|
||||||
|
## 0.1.4
|
||||||
|
|
||||||
## 0.1.4 - 2019-07-20
|
|
||||||
- Allow to disable `Content-Disposition` header #686
|
- Allow to disable `Content-Disposition` header #686
|
||||||
|
|
||||||
|
## 0.1.3
|
||||||
|
|
||||||
## 0.1.3 - 2019-06-28
|
|
||||||
- Do not set `Content-Length` header, let actix-http set it #930
|
- Do not set `Content-Length` header, let actix-http set it #930
|
||||||
|
|
||||||
|
## 0.1.2
|
||||||
|
|
||||||
## 0.1.2 - 2019-06-13
|
|
||||||
- Content-Length is 0 for NamedFile HEAD request #914
|
- Content-Length is 0 for NamedFile HEAD request #914
|
||||||
- Fix ring dependency from actix-web default features for #741
|
- Fix ring dependency from actix-web default features for #741
|
||||||
|
|
||||||
|
## 0.1.1
|
||||||
|
|
||||||
## 0.1.1 - 2019-06-01
|
|
||||||
- Static files are incorrectly served as both chunked and with length #812
|
- Static files are incorrectly served as both chunked and with length #812
|
||||||
|
|
||||||
|
## 0.1.0
|
||||||
|
|
||||||
## 0.1.0 - 2019-05-25
|
|
||||||
- NamedFile last-modified check always fails due to nano-seconds in file modified date #820
|
- NamedFile last-modified check always fails due to nano-seconds in file modified date #820
|
||||||
|
|
||||||
|
## 0.1.0-beta.4
|
||||||
|
|
||||||
## 0.1.0-beta.4 - 2019-05-12
|
|
||||||
- Update actix-web to beta.4
|
- Update actix-web to beta.4
|
||||||
|
|
||||||
|
## 0.1.0-beta.1
|
||||||
|
|
||||||
## 0.1.0-beta.1 - 2019-04-20
|
|
||||||
- Update actix-web to beta.1
|
- Update actix-web to beta.1
|
||||||
|
|
||||||
|
## 0.1.0-alpha.6
|
||||||
|
|
||||||
## 0.1.0-alpha.6 - 2019-04-14
|
|
||||||
- Update actix-web to alpha6
|
- Update actix-web to alpha6
|
||||||
|
|
||||||
|
## 0.1.0-alpha.4
|
||||||
|
|
||||||
## 0.1.0-alpha.4 - 2019-04-08
|
|
||||||
- Update actix-web to alpha4
|
- Update actix-web to alpha4
|
||||||
|
|
||||||
|
## 0.1.0-alpha.2
|
||||||
|
|
||||||
## 0.1.0-alpha.2 - 2019-04-02
|
|
||||||
- Add default handler support
|
- Add default handler support
|
||||||
|
|
||||||
|
## 0.1.0-alpha.1
|
||||||
|
|
||||||
## 0.1.0-alpha.1 - 2019-03-28
|
|
||||||
- Initial impl
|
- Initial impl
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-files"
|
name = "actix-files"
|
||||||
version = "0.6.0-beta.11"
|
version = "0.6.6"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"fakeshadow <24548779@qq.com>",
|
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
]
|
]
|
||||||
description = "Static file serving for Actix Web"
|
description = "Static file serving for Actix Web"
|
||||||
@ -12,36 +11,49 @@ homepage = "https://actix.rs"
|
|||||||
repository = "https://github.com/actix/actix-web"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = ["asynchronous", "web-programming::http-server"]
|
categories = ["asynchronous", "web-programming::http-server"]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_files"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_http::*",
|
||||||
|
"actix_service::*",
|
||||||
|
"actix_web::*",
|
||||||
|
"http::*",
|
||||||
|
"mime::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
experimental-io-uring = ["actix-web/experimental-io-uring", "tokio-uring"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-http = "3.0.0-beta.17"
|
actix-http = "3"
|
||||||
actix-service = "2"
|
actix-service = "2"
|
||||||
actix-utils = "3"
|
actix-utils = "3"
|
||||||
actix-web = { version = "4.0.0-beta.16", default-features = false }
|
actix-web = { version = "4", default-features = false }
|
||||||
|
|
||||||
askama_escape = "0.10"
|
bitflags = "2"
|
||||||
bitflags = "1"
|
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
derive_more = "0.99.5"
|
derive_more = { version = "2", features = ["display", "error", "from"] }
|
||||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
http-range = "0.1.4"
|
http-range = "0.1.4"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
mime = "0.3"
|
mime = "0.3.9"
|
||||||
mime_guess = "2.0.1"
|
mime_guess = "2.0.1"
|
||||||
percent-encoding = "2.1"
|
percent-encoding = "2.1"
|
||||||
pin-project-lite = "0.2.7"
|
pin-project-lite = "0.2.7"
|
||||||
|
v_htmlescape = "0.15.5"
|
||||||
|
|
||||||
tokio-uring = { version = "0.1", optional = true }
|
# experimental-io-uring
|
||||||
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
|
tokio-uring = { version = "0.5", optional = true, features = ["bytes"] }
|
||||||
|
actix-server = { version = "2.4", optional = true } # ensure matching tokio-uring versions
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.2"
|
actix-rt = "2.7"
|
||||||
actix-test = "0.1.0-beta.10"
|
actix-test = "0.1"
|
||||||
actix-web = "4.0.0-beta.16"
|
actix-web = "4"
|
||||||
|
env_logger = "0.11"
|
||||||
|
tempfile = "3.2"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
@ -1,18 +1,32 @@
|
|||||||
# actix-files
|
# `actix-files`
|
||||||
|
|
||||||
> Static file serving for Actix Web
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-files)
|
[](https://crates.io/crates/actix-files)
|
||||||
[](https://docs.rs/actix-files/0.6.0-beta.11)
|
[](https://docs.rs/actix-files/0.6.6)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.11)
|
[](https://deps.rs/crate/actix-files/0.6.6)
|
||||||
[](https://crates.io/crates/actix-files)
|
[](https://crates.io/crates/actix-files)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
## Documentation & Resources
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
- [API Documentation](https://docs.rs/actix-files/)
|
<!-- cargo-rdme start -->
|
||||||
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
|
|
||||||
- Minimum Supported Rust Version (MSRV): 1.52
|
Static file serving for Actix Web.
|
||||||
|
|
||||||
|
Provides a non-blocking service for serving static files from disk.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use actix_web::App;
|
||||||
|
use actix_files::Files;
|
||||||
|
|
||||||
|
let app = App::new()
|
||||||
|
.service(Files::new("/static", ".").prefer_utf8(true));
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- cargo-rdme end -->
|
||||||
|
33
actix-files/examples/guarded-listing.rs
Normal file
33
actix-files/examples/guarded-listing.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use actix_files::Files;
|
||||||
|
use actix_web::{get, guard, middleware, App, HttpServer, Responder};
|
||||||
|
|
||||||
|
const EXAMPLES_DIR: &str = concat![env!("CARGO_MANIFEST_DIR"), "/examples"];
|
||||||
|
|
||||||
|
#[get("/")]
|
||||||
|
async fn index() -> impl Responder {
|
||||||
|
"Hello world!"
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() -> std::io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
log::info!("starting HTTP server at http://localhost:8080");
|
||||||
|
|
||||||
|
HttpServer::new(|| {
|
||||||
|
App::new()
|
||||||
|
.service(index)
|
||||||
|
.service(
|
||||||
|
Files::new("/assets", EXAMPLES_DIR)
|
||||||
|
.show_files_listing()
|
||||||
|
.guard(guard::Header("show-listing", "?1")),
|
||||||
|
)
|
||||||
|
.service(Files::new("/assets", EXAMPLES_DIR))
|
||||||
|
.wrap(middleware::Compress::default())
|
||||||
|
.wrap(middleware::Logger::default())
|
||||||
|
})
|
||||||
|
.bind(("127.0.0.1", 8080))?
|
||||||
|
.workers(2)
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
@ -7,6 +7,8 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use actix_web::{error::Error, web::Bytes};
|
use actix_web::{error::Error, web::Bytes};
|
||||||
|
#[cfg(feature = "experimental-io-uring")]
|
||||||
|
use bytes::BytesMut;
|
||||||
use futures_core::{ready, Stream};
|
use futures_core::{ready, Stream};
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
|
||||||
@ -78,7 +80,7 @@ async fn chunked_read_file_callback(
|
|||||||
) -> Result<(File, Bytes), Error> {
|
) -> Result<(File, Bytes), Error> {
|
||||||
use io::{Read as _, Seek as _};
|
use io::{Read as _, Seek as _};
|
||||||
|
|
||||||
let res = actix_web::rt::task::spawn_blocking(move || {
|
let res = actix_web::web::block(move || {
|
||||||
let mut buf = Vec::with_capacity(max_bytes);
|
let mut buf = Vec::with_capacity(max_bytes);
|
||||||
|
|
||||||
file.seek(io::SeekFrom::Start(offset))?;
|
file.seek(io::SeekFrom::Start(offset))?;
|
||||||
@ -91,8 +93,7 @@ async fn chunked_read_file_callback(
|
|||||||
Ok((file, Bytes::from(buf)))
|
Ok((file, Bytes::from(buf)))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await
|
.await??;
|
||||||
.map_err(|_| actix_web::error::BlockingError)??;
|
|
||||||
|
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
@ -214,64 +215,3 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "experimental-io-uring")]
|
|
||||||
use bytes_mut::BytesMut;
|
|
||||||
|
|
||||||
// TODO: remove new type and use bytes::BytesMut directly
|
|
||||||
#[doc(hidden)]
|
|
||||||
#[cfg(feature = "experimental-io-uring")]
|
|
||||||
mod bytes_mut {
|
|
||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
|
|
||||||
use tokio_uring::buf::{IoBuf, IoBufMut};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct BytesMut(bytes::BytesMut);
|
|
||||||
|
|
||||||
impl BytesMut {
|
|
||||||
pub(super) fn new() -> Self {
|
|
||||||
Self(bytes::BytesMut::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for BytesMut {
|
|
||||||
type Target = bytes::BytesMut;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DerefMut for BytesMut {
|
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe impl IoBuf for BytesMut {
|
|
||||||
fn stable_ptr(&self) -> *const u8 {
|
|
||||||
self.0.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bytes_init(&self) -> usize {
|
|
||||||
self.0.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bytes_total(&self) -> usize {
|
|
||||||
self.0.capacity()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe impl IoBufMut for BytesMut {
|
|
||||||
fn stable_mut_ptr(&mut self) -> *mut u8 {
|
|
||||||
self.0.as_mut_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe fn set_init(&mut self, init_len: usize) {
|
|
||||||
if self.len() < init_len {
|
|
||||||
self.0.set_len(init_len);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,8 +1,13 @@
|
|||||||
use std::{fmt::Write, fs::DirEntry, io, path::Path, path::PathBuf};
|
use std::{
|
||||||
|
fmt::Write,
|
||||||
|
fs::DirEntry,
|
||||||
|
io,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
use actix_web::{dev::ServiceResponse, HttpRequest, HttpResponse};
|
use actix_web::{dev::ServiceResponse, HttpRequest, HttpResponse};
|
||||||
use askama_escape::{escape as escape_html_entity, Html};
|
|
||||||
use percent_encoding::{utf8_percent_encode, CONTROLS};
|
use percent_encoding::{utf8_percent_encode, CONTROLS};
|
||||||
|
use v_htmlescape::escape as escape_html_entity;
|
||||||
|
|
||||||
/// A directory; responds with the generated directory listing.
|
/// A directory; responds with the generated directory listing.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -40,17 +45,26 @@ impl Directory {
|
|||||||
pub(crate) type DirectoryRenderer =
|
pub(crate) type DirectoryRenderer =
|
||||||
dyn Fn(&Directory, &HttpRequest) -> Result<ServiceResponse, io::Error>;
|
dyn Fn(&Directory, &HttpRequest) -> Result<ServiceResponse, io::Error>;
|
||||||
|
|
||||||
// show file url as relative to static path
|
/// Returns percent encoded file URL path.
|
||||||
macro_rules! encode_file_url {
|
macro_rules! encode_file_url {
|
||||||
($path:ident) => {
|
($path:ident) => {
|
||||||
utf8_percent_encode(&$path, CONTROLS)
|
utf8_percent_encode(&$path, CONTROLS)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// " -- " & -- & ' -- ' < -- < > -- > / -- /
|
/// Returns HTML entity encoded formatter.
|
||||||
|
///
|
||||||
|
/// ```plain
|
||||||
|
/// " => "
|
||||||
|
/// & => &
|
||||||
|
/// ' => '
|
||||||
|
/// < => <
|
||||||
|
/// > => >
|
||||||
|
/// / => /
|
||||||
|
/// ```
|
||||||
macro_rules! encode_file_name {
|
macro_rules! encode_file_name {
|
||||||
($entry:ident) => {
|
($entry:ident) => {
|
||||||
escape_html_entity(&$entry.file_name().to_string_lossy(), Html)
|
escape_html_entity(&$entry.file_name().to_string_lossy())
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,7 +80,7 @@ pub(crate) fn directory_listing(
|
|||||||
if dir.is_visible(&entry) {
|
if dir.is_visible(&entry) {
|
||||||
let entry = entry.unwrap();
|
let entry = entry.unwrap();
|
||||||
let p = match entry.path().strip_prefix(&dir.path) {
|
let p = match entry.path().strip_prefix(&dir.path) {
|
||||||
Ok(p) if cfg!(windows) => base.join(p).to_string_lossy().replace("\\", "/"),
|
Ok(p) if cfg!(windows) => base.join(p).to_string_lossy().replace('\\', "/"),
|
||||||
Ok(p) => base.join(p).to_string_lossy().into_owned(),
|
Ok(p) => base.join(p).to_string_lossy().into_owned(),
|
||||||
Err(_) => continue,
|
Err(_) => continue,
|
||||||
};
|
};
|
||||||
|
@ -2,41 +2,47 @@ use actix_web::{http::StatusCode, ResponseError};
|
|||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
|
|
||||||
/// Errors which can occur when serving static files.
|
/// Errors which can occur when serving static files.
|
||||||
#[derive(Display, Debug, PartialEq)]
|
#[derive(Debug, PartialEq, Eq, Display)]
|
||||||
pub enum FilesError {
|
pub enum FilesError {
|
||||||
/// Path is not a directory
|
/// Path is not a directory.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[display(fmt = "Path is not a directory. Unable to serve static files")]
|
#[display("path is not a directory. Unable to serve static files")]
|
||||||
IsNotDirectory,
|
IsNotDirectory,
|
||||||
|
|
||||||
/// Cannot render directory
|
/// Cannot render directory.
|
||||||
#[display(fmt = "Unable to render directory without index file")]
|
#[display("unable to render directory without index file")]
|
||||||
IsDirectory,
|
IsDirectory,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `NotFound` for `FilesError`
|
|
||||||
impl ResponseError for FilesError {
|
impl ResponseError for FilesError {
|
||||||
|
/// Returns `404 Not Found`.
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
StatusCode::NOT_FOUND
|
StatusCode::NOT_FOUND
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
#[derive(Debug, PartialEq, Eq, Display)]
|
||||||
#[derive(Display, Debug, PartialEq)]
|
#[non_exhaustive]
|
||||||
pub enum UriSegmentError {
|
pub enum UriSegmentError {
|
||||||
/// The segment started with the wrapped invalid character.
|
/// Segment started with the wrapped invalid character.
|
||||||
#[display(fmt = "The segment started with the wrapped invalid character")]
|
#[display("segment started with invalid character: ('{_0}')")]
|
||||||
BadStart(char),
|
BadStart(char),
|
||||||
/// The segment contained the wrapped invalid character.
|
|
||||||
#[display(fmt = "The segment contained the wrapped invalid character")]
|
/// Segment contained the wrapped invalid character.
|
||||||
|
#[display("segment contained invalid character ('{_0}')")]
|
||||||
BadChar(char),
|
BadChar(char),
|
||||||
/// The segment ended with the wrapped invalid character.
|
|
||||||
#[display(fmt = "The segment ended with the wrapped invalid character")]
|
/// Segment ended with the wrapped invalid character.
|
||||||
|
#[display("segment ended with invalid character: ('{_0}')")]
|
||||||
BadEnd(char),
|
BadEnd(char),
|
||||||
|
|
||||||
|
/// Path is not a valid UTF-8 string after percent-decoding.
|
||||||
|
#[display("path is not a valid UTF-8 string after percent-decoding")]
|
||||||
|
NotValidUtf8,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `BadRequest` for `UriSegmentError`
|
|
||||||
impl ResponseError for UriSegmentError {
|
impl ResponseError for UriSegmentError {
|
||||||
|
/// Returns `400 Bad Request`.
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
StatusCode::BAD_REQUEST
|
StatusCode::BAD_REQUEST
|
||||||
}
|
}
|
||||||
|
@ -8,8 +8,7 @@ use std::{
|
|||||||
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
|
use actix_service::{boxed, IntoServiceFactory, ServiceFactory, ServiceFactoryExt};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
dev::{
|
dev::{
|
||||||
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest,
|
AppService, HttpServiceFactory, RequestHead, ResourceDef, ServiceRequest, ServiceResponse,
|
||||||
ServiceResponse,
|
|
||||||
},
|
},
|
||||||
error::Error,
|
error::Error,
|
||||||
guard::Guard,
|
guard::Guard,
|
||||||
@ -28,6 +27,7 @@ use crate::{
|
|||||||
///
|
///
|
||||||
/// `Files` service must be registered with `App::service()` method.
|
/// `Files` service must be registered with `App::service()` method.
|
||||||
///
|
///
|
||||||
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
/// use actix_web::App;
|
/// use actix_web::App;
|
||||||
/// use actix_files::Files;
|
/// use actix_files::Files;
|
||||||
@ -36,7 +36,7 @@ use crate::{
|
|||||||
/// .service(Files::new("/static", "."));
|
/// .service(Files::new("/static", "."));
|
||||||
/// ```
|
/// ```
|
||||||
pub struct Files {
|
pub struct Files {
|
||||||
path: String,
|
mount_path: String,
|
||||||
directory: PathBuf,
|
directory: PathBuf,
|
||||||
index: Option<String>,
|
index: Option<String>,
|
||||||
show_index: bool,
|
show_index: bool,
|
||||||
@ -67,7 +67,7 @@ impl Clone for Files {
|
|||||||
default: self.default.clone(),
|
default: self.default.clone(),
|
||||||
renderer: self.renderer.clone(),
|
renderer: self.renderer.clone(),
|
||||||
file_flags: self.file_flags,
|
file_flags: self.file_flags,
|
||||||
path: self.path.clone(),
|
mount_path: self.mount_path.clone(),
|
||||||
mime_override: self.mime_override.clone(),
|
mime_override: self.mime_override.clone(),
|
||||||
path_filter: self.path_filter.clone(),
|
path_filter: self.path_filter.clone(),
|
||||||
use_guards: self.use_guards.clone(),
|
use_guards: self.use_guards.clone(),
|
||||||
@ -106,7 +106,7 @@ impl Files {
|
|||||||
};
|
};
|
||||||
|
|
||||||
Files {
|
Files {
|
||||||
path: mount_path.trim_end_matches('/').to_owned(),
|
mount_path: mount_path.trim_end_matches('/').to_owned(),
|
||||||
directory: dir,
|
directory: dir,
|
||||||
index: None,
|
index: None,
|
||||||
show_index: false,
|
show_index: false,
|
||||||
@ -141,7 +141,7 @@ impl Files {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set custom directory renderer
|
/// Set custom directory renderer.
|
||||||
pub fn files_listing_renderer<F>(mut self, f: F) -> Self
|
pub fn files_listing_renderer<F>(mut self, f: F) -> Self
|
||||||
where
|
where
|
||||||
for<'r, 's> F:
|
for<'r, 's> F:
|
||||||
@ -151,7 +151,7 @@ impl Files {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Specifies mime override callback
|
/// Specifies MIME override callback.
|
||||||
pub fn mime_override<F>(mut self, f: F) -> Self
|
pub fn mime_override<F>(mut self, f: F) -> Self
|
||||||
where
|
where
|
||||||
F: Fn(&mime::Name<'_>) -> DispositionType + 'static,
|
F: Fn(&mime::Name<'_>) -> DispositionType + 'static,
|
||||||
@ -235,7 +235,7 @@ impl Files {
|
|||||||
/// request starts being handled by the file service, it will not be able to back-out and try
|
/// request starts being handled by the file service, it will not be able to back-out and try
|
||||||
/// the next service, you will simply get a 404 (or 405) error response.
|
/// the next service, you will simply get a 404 (or 405) error response.
|
||||||
///
|
///
|
||||||
/// To allow `POST` requests to retrieve files, see [`Files::use_guards`].
|
/// To allow `POST` requests to retrieve files, see [`Files::method_guard()`].
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
@ -300,12 +300,8 @@ impl Files {
|
|||||||
pub fn default_handler<F, U>(mut self, f: F) -> Self
|
pub fn default_handler<F, U>(mut self, f: F) -> Self
|
||||||
where
|
where
|
||||||
F: IntoServiceFactory<U, ServiceRequest>,
|
F: IntoServiceFactory<U, ServiceRequest>,
|
||||||
U: ServiceFactory<
|
U: ServiceFactory<ServiceRequest, Config = (), Response = ServiceResponse, Error = Error>
|
||||||
ServiceRequest,
|
+ 'static,
|
||||||
Config = (),
|
|
||||||
Response = ServiceResponse,
|
|
||||||
Error = Error,
|
|
||||||
> + 'static,
|
|
||||||
{
|
{
|
||||||
// create and configure default resource
|
// create and configure default resource
|
||||||
self.default = Rc::new(RefCell::new(Some(Rc::new(boxed::factory(
|
self.default = Rc::new(RefCell::new(Some(Rc::new(boxed::factory(
|
||||||
@ -341,9 +337,9 @@ impl HttpServiceFactory for Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let rdef = if config.is_root() {
|
let rdef = if config.is_root() {
|
||||||
ResourceDef::root_prefix(&self.path)
|
ResourceDef::root_prefix(&self.mount_path)
|
||||||
} else {
|
} else {
|
||||||
ResourceDef::prefix(&self.path)
|
ResourceDef::prefix(&self.mount_path)
|
||||||
};
|
};
|
||||||
|
|
||||||
config.register_service(rdef, guards, self, None)
|
config.register_service(rdef, guards, self, None)
|
||||||
@ -389,3 +385,46 @@ impl ServiceFactory<ServiceRequest> for Files {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use actix_web::{
|
||||||
|
http::StatusCode,
|
||||||
|
test::{self, TestRequest},
|
||||||
|
App, HttpResponse,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[actix_web::test]
|
||||||
|
async fn custom_files_listing_renderer() {
|
||||||
|
let srv = test::init_service(
|
||||||
|
App::new().service(
|
||||||
|
Files::new("/", "./tests")
|
||||||
|
.show_files_listing()
|
||||||
|
.files_listing_renderer(|dir, req| {
|
||||||
|
Ok(ServiceResponse::new(
|
||||||
|
req.clone(),
|
||||||
|
HttpResponse::Ok().body(dir.path.to_str().unwrap().to_owned()),
|
||||||
|
))
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = TestRequest::with_uri("/").to_request();
|
||||||
|
let res = test::call_service(&srv, req).await;
|
||||||
|
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let body_str = std::str::from_utf8(&body).unwrap();
|
||||||
|
let actual_path = Path::new(&body_str);
|
||||||
|
let expected_path = Path::new("actix-files/tests");
|
||||||
|
assert!(
|
||||||
|
actual_path.ends_with(expected_path),
|
||||||
|
"body {:?} does not end with {:?}",
|
||||||
|
actual_path,
|
||||||
|
expected_path
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
//!
|
//!
|
||||||
//! Provides a non-blocking service for serving static files from disk.
|
//! Provides a non-blocking service for serving static files from disk.
|
||||||
//!
|
//!
|
||||||
//! # Example
|
//! # Examples
|
||||||
//! ```
|
//! ```
|
||||||
//! use actix_web::App;
|
//! use actix_web::App;
|
||||||
//! use actix_files::Files;
|
//! use actix_files::Files;
|
||||||
@ -11,8 +11,12 @@
|
|||||||
//! .service(Files::new("/static", ".").prefer_utf8(true));
|
//! .service(Files::new("/static", ".").prefer_utf8(true));
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
#![warn(missing_docs, missing_debug_implementations)]
|
||||||
#![warn(future_incompatible, missing_docs, missing_debug_implementations)]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use actix_service::boxed::{BoxService, BoxServiceFactory};
|
use actix_service::boxed::{BoxService, BoxServiceFactory};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
@ -21,7 +25,6 @@ use actix_web::{
|
|||||||
http::header::DispositionType,
|
http::header::DispositionType,
|
||||||
};
|
};
|
||||||
use mime_guess::from_ext;
|
use mime_guess::from_ext;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
mod chunked;
|
mod chunked;
|
||||||
mod directory;
|
mod directory;
|
||||||
@ -33,16 +36,15 @@ mod path_buf;
|
|||||||
mod range;
|
mod range;
|
||||||
mod service;
|
mod service;
|
||||||
|
|
||||||
pub use self::chunked::ChunkedReadFile;
|
pub use self::{
|
||||||
pub use self::directory::Directory;
|
chunked::ChunkedReadFile, directory::Directory, files::Files, named::NamedFile,
|
||||||
pub use self::files::Files;
|
range::HttpRange, service::FilesService,
|
||||||
pub use self::named::NamedFile;
|
};
|
||||||
pub use self::range::HttpRange;
|
use self::{
|
||||||
pub use self::service::FilesService;
|
directory::{directory_listing, DirectoryRenderer},
|
||||||
|
error::FilesError,
|
||||||
use self::directory::{directory_listing, DirectoryRenderer};
|
path_buf::PathBufWrap,
|
||||||
use self::error::FilesError;
|
};
|
||||||
use self::path_buf::PathBufWrap;
|
|
||||||
|
|
||||||
type HttpService = BoxService<ServiceRequest, ServiceResponse, Error>;
|
type HttpService = BoxService<ServiceRequest, ServiceResponse, Error>;
|
||||||
type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Error, ()>;
|
type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Error, ()>;
|
||||||
@ -62,16 +64,17 @@ type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::{
|
use std::{
|
||||||
|
fmt::Write as _,
|
||||||
fs::{self},
|
fs::{self},
|
||||||
ops::Add,
|
ops::Add,
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
};
|
};
|
||||||
|
|
||||||
use actix_service::ServiceFactory;
|
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
|
dev::ServiceFactory,
|
||||||
guard,
|
guard,
|
||||||
http::{
|
http::{
|
||||||
header::{self, ContentDisposition, DispositionParam, DispositionType},
|
header::{self, ContentDisposition, DispositionParam},
|
||||||
Method, StatusCode,
|
Method, StatusCode,
|
||||||
},
|
},
|
||||||
middleware::Compress,
|
middleware::Compress,
|
||||||
@ -106,7 +109,7 @@ mod tests {
|
|||||||
let req = TestRequest::default()
|
let req = TestRequest::default()
|
||||||
.insert_header((header::IF_MODIFIED_SINCE, since))
|
.insert_header((header::IF_MODIFIED_SINCE, since))
|
||||||
.to_http_request();
|
.to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(resp.status(), StatusCode::NOT_MODIFIED);
|
assert_eq!(resp.status(), StatusCode::NOT_MODIFIED);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -118,7 +121,7 @@ mod tests {
|
|||||||
let req = TestRequest::default()
|
let req = TestRequest::default()
|
||||||
.insert_header((header::IF_MODIFIED_SINCE, since))
|
.insert_header((header::IF_MODIFIED_SINCE, since))
|
||||||
.to_http_request();
|
.to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(resp.status(), StatusCode::NOT_MODIFIED);
|
assert_eq!(resp.status(), StatusCode::NOT_MODIFIED);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,7 +134,7 @@ mod tests {
|
|||||||
.insert_header((header::IF_NONE_MATCH, "miss_etag"))
|
.insert_header((header::IF_NONE_MATCH, "miss_etag"))
|
||||||
.insert_header((header::IF_MODIFIED_SINCE, since))
|
.insert_header((header::IF_MODIFIED_SINCE, since))
|
||||||
.to_http_request();
|
.to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_ne!(resp.status(), StatusCode::NOT_MODIFIED);
|
assert_ne!(resp.status(), StatusCode::NOT_MODIFIED);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -143,7 +146,7 @@ mod tests {
|
|||||||
let req = TestRequest::default()
|
let req = TestRequest::default()
|
||||||
.insert_header((header::IF_UNMODIFIED_SINCE, since))
|
.insert_header((header::IF_UNMODIFIED_SINCE, since))
|
||||||
.to_http_request();
|
.to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(resp.status(), StatusCode::OK);
|
assert_eq!(resp.status(), StatusCode::OK);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -155,7 +158,7 @@ mod tests {
|
|||||||
let req = TestRequest::default()
|
let req = TestRequest::default()
|
||||||
.insert_header((header::IF_UNMODIFIED_SINCE, since))
|
.insert_header((header::IF_UNMODIFIED_SINCE, since))
|
||||||
.to_http_request();
|
.to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(resp.status(), StatusCode::PRECONDITION_FAILED);
|
assert_eq!(resp.status(), StatusCode::PRECONDITION_FAILED);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,7 +175,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"text/x-toml"
|
"text/x-toml"
|
||||||
@ -196,7 +199,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
|
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
|
||||||
"inline; filename=\"Cargo.toml\""
|
"inline; filename=\"Cargo.toml\""
|
||||||
@ -207,7 +210,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.disable_content_disposition();
|
.disable_content_disposition();
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert!(resp.headers().get(header::CONTENT_DISPOSITION).is_none());
|
assert!(resp.headers().get(header::CONTENT_DISPOSITION).is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,7 +238,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"text/x-toml"
|
"text/x-toml"
|
||||||
@ -261,7 +264,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"text/xml"
|
"text/xml"
|
||||||
@ -284,7 +287,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"image/png"
|
"image/png"
|
||||||
@ -300,14 +303,14 @@ mod tests {
|
|||||||
let file = NamedFile::open_async("tests/test.js").await.unwrap();
|
let file = NamedFile::open_async("tests/test.js").await.unwrap();
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"application/javascript"
|
"text/javascript",
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
|
resp.headers().get(header::CONTENT_DISPOSITION).unwrap(),
|
||||||
"inline; filename=\"test.js\""
|
"inline; filename=\"test.js\"",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -330,7 +333,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"image/png"
|
"image/png"
|
||||||
@ -353,7 +356,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"application/octet-stream"
|
"application/octet-stream"
|
||||||
@ -364,22 +367,45 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(deprecated)]
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_named_file_status_code_text() {
|
async fn status_code_customize_same_output() {
|
||||||
let mut file = NamedFile::open_async("Cargo.toml")
|
let file1 = NamedFile::open_async("Cargo.toml")
|
||||||
.await
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.set_status_code(StatusCode::NOT_FOUND);
|
.set_status_code(StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
let file2 = NamedFile::open_async("Cargo.toml")
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.customize()
|
||||||
|
.with_status(StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
let req = TestRequest::default().to_http_request();
|
||||||
|
let res1 = file1.respond_to(&req);
|
||||||
|
let res2 = file2.respond_to(&req);
|
||||||
|
|
||||||
|
assert_eq!(res1.status(), StatusCode::NOT_FOUND);
|
||||||
|
assert_eq!(res2.status(), StatusCode::NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_named_file_status_code_text() {
|
||||||
|
let mut file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||||
|
|
||||||
{
|
{
|
||||||
file.file();
|
file.file();
|
||||||
let _f: &File = &file;
|
let _f: &File = &file;
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let _f: &mut File = &mut file;
|
let _f: &mut File = &mut file;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let file = file.customize().with_status(StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
let req = TestRequest::default().to_http_request();
|
let req = TestRequest::default().to_http_request();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
resp.headers().get(header::CONTENT_TYPE).unwrap(),
|
||||||
"text/x-toml"
|
"text/x-toml"
|
||||||
@ -527,9 +553,8 @@ mod tests {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_static_files_with_spaces() {
|
async fn test_static_files_with_spaces() {
|
||||||
let srv = test::init_service(
|
let srv =
|
||||||
App::new().service(Files::new("/", ".").index_file("Cargo.toml")),
|
test::init_service(App::new().service(Files::new("/", ".").index_file("Cargo.toml")))
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
let request = TestRequest::get()
|
let request = TestRequest::get()
|
||||||
.uri("/tests/test%20space.binary")
|
.uri("/tests/test%20space.binary")
|
||||||
@ -542,6 +567,30 @@ mod tests {
|
|||||||
assert_eq!(bytes, data);
|
assert_eq!(bytes, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_static_files_with_special_characters() {
|
||||||
|
// Create the file we want to test against ad-hoc. We can't check it in as otherwise
|
||||||
|
// Windows can't even checkout this repository.
|
||||||
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
|
let file_with_newlines = temp_dir.path().join("test\n\x0B\x0C\rnewline.text");
|
||||||
|
fs::write(&file_with_newlines, "Look at my newlines").unwrap();
|
||||||
|
|
||||||
|
let srv = test::init_service(
|
||||||
|
App::new().service(Files::new("/", temp_dir.path()).index_file("Cargo.toml")),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
let request = TestRequest::get()
|
||||||
|
.uri("/test%0A%0B%0C%0Dnewline.text")
|
||||||
|
.to_request();
|
||||||
|
let response = test::call_service(&srv, request).await;
|
||||||
|
assert_eq!(response.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
let bytes = test::read_body(response).await;
|
||||||
|
let data = web::Bytes::from(fs::read(file_with_newlines).unwrap());
|
||||||
|
assert_eq!(bytes, data);
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_files_not_allowed() {
|
async fn test_files_not_allowed() {
|
||||||
let srv = test::init_service(App::new().service(Files::new("/", "."))).await;
|
let srv = test::init_service(App::new().service(Files::new("/", "."))).await;
|
||||||
@ -597,7 +646,8 @@ mod tests {
|
|||||||
.to_request();
|
.to_request();
|
||||||
let res = test::call_service(&srv, request).await;
|
let res = test::call_service(&srv, request).await;
|
||||||
assert_eq!(res.status(), StatusCode::OK);
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
|
assert!(res.headers().contains_key(header::CONTENT_ENCODING));
|
||||||
|
assert!(!test::read_body(res).await.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -632,15 +682,14 @@ mod tests {
|
|||||||
async fn test_named_file_allowed_method() {
|
async fn test_named_file_allowed_method() {
|
||||||
let req = TestRequest::default().method(Method::GET).to_http_request();
|
let req = TestRequest::default().method(Method::GET).to_http_request();
|
||||||
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
let file = NamedFile::open_async("Cargo.toml").await.unwrap();
|
||||||
let resp = file.respond_to(&req).await.unwrap();
|
let resp = file.respond_to(&req);
|
||||||
assert_eq!(resp.status(), StatusCode::OK);
|
assert_eq!(resp.status(), StatusCode::OK);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_static_files() {
|
async fn test_static_files() {
|
||||||
let srv =
|
let srv =
|
||||||
test::init_service(App::new().service(Files::new("/", ".").show_files_listing()))
|
test::init_service(App::new().service(Files::new("/", ".").show_files_listing())).await;
|
||||||
.await;
|
|
||||||
let req = TestRequest::with_uri("/missing").to_request();
|
let req = TestRequest::with_uri("/missing").to_request();
|
||||||
|
|
||||||
let resp = test::call_service(&srv, req).await;
|
let resp = test::call_service(&srv, req).await;
|
||||||
@ -653,8 +702,7 @@ mod tests {
|
|||||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
let srv =
|
let srv =
|
||||||
test::init_service(App::new().service(Files::new("/", ".").show_files_listing()))
|
test::init_service(App::new().service(Files::new("/", ".").show_files_listing())).await;
|
||||||
.await;
|
|
||||||
let req = TestRequest::with_uri("/tests").to_request();
|
let req = TestRequest::with_uri("/tests").to_request();
|
||||||
let resp = test::call_service(&srv, req).await;
|
let resp = test::call_service(&srv, req).await;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -802,6 +850,40 @@ mod tests {
|
|||||||
let req = TestRequest::get().uri("/test/%43argo.toml").to_request();
|
let req = TestRequest::get().uri("/test/%43argo.toml").to_request();
|
||||||
let res = test::call_service(&srv, req).await;
|
let res = test::call_service(&srv, req).await;
|
||||||
assert_eq!(res.status(), StatusCode::OK);
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
|
||||||
|
// `%2F` == `/`
|
||||||
|
let req = TestRequest::get().uri("/test%2Ftest.binary").to_request();
|
||||||
|
let res = test::call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
let req = TestRequest::get().uri("/test/Cargo.toml%00").to_request();
|
||||||
|
let res = test::call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_percent_encoding_2() {
|
||||||
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
|
let filename = match cfg!(unix) {
|
||||||
|
true => "ض:?#[]{}<>()@!$&'`|*+,;= %20\n.test",
|
||||||
|
false => "ض#[]{}()@!$&'`+,;= %20.test",
|
||||||
|
};
|
||||||
|
let filename_encoded = filename
|
||||||
|
.as_bytes()
|
||||||
|
.iter()
|
||||||
|
.fold(String::new(), |mut buf, c| {
|
||||||
|
write!(&mut buf, "%{:02X}", c).unwrap();
|
||||||
|
buf
|
||||||
|
});
|
||||||
|
std::fs::File::create(temp_dir.path().join(filename)).unwrap();
|
||||||
|
|
||||||
|
let srv = test::init_service(App::new().service(Files::new("/", temp_dir.path()))).await;
|
||||||
|
|
||||||
|
let req = TestRequest::get()
|
||||||
|
.uri(&format!("/{}", filename_encoded))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
@ -1,22 +1,20 @@
|
|||||||
use std::{
|
use std::{
|
||||||
fmt,
|
|
||||||
fs::Metadata,
|
fs::Metadata,
|
||||||
io,
|
io,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
time::{SystemTime, UNIX_EPOCH},
|
time::{SystemTime, UNIX_EPOCH},
|
||||||
};
|
};
|
||||||
|
|
||||||
use actix_service::{Service, ServiceFactory};
|
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
body::{self, BoxBody, SizedStream},
|
body::{self, BoxBody, SizedStream},
|
||||||
dev::{
|
dev::{
|
||||||
AppService, BodyEncoding, HttpServiceFactory, ResourceDef, ServiceRequest,
|
self, AppService, HttpServiceFactory, ResourceDef, Service, ServiceFactory, ServiceRequest,
|
||||||
ServiceResponse,
|
ServiceResponse,
|
||||||
},
|
},
|
||||||
http::{
|
http::{
|
||||||
header::{
|
header::{
|
||||||
self, Charset, ContentDisposition, ContentEncoding, DispositionParam,
|
self, Charset, ContentDisposition, ContentEncoding, DispositionParam, DispositionType,
|
||||||
DispositionType, ExtendedValue,
|
ExtendedValue, HeaderValue,
|
||||||
},
|
},
|
||||||
StatusCode,
|
StatusCode,
|
||||||
},
|
},
|
||||||
@ -25,11 +23,12 @@ use actix_web::{
|
|||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use derive_more::{Deref, DerefMut};
|
use derive_more::{Deref, DerefMut};
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
use mime_guess::from_path;
|
use mime::Mime;
|
||||||
|
|
||||||
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
use crate::{encoding::equiv_utf8_text, range::HttpRange};
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub(crate) struct Flags: u8 {
|
pub(crate) struct Flags: u8 {
|
||||||
const ETAG = 0b0000_0001;
|
const ETAG = 0b0000_0001;
|
||||||
const LAST_MD = 0b0000_0010;
|
const LAST_MD = 0b0000_0010;
|
||||||
@ -40,7 +39,7 @@ bitflags! {
|
|||||||
|
|
||||||
impl Default for Flags {
|
impl Default for Flags {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Flags::from_bits_truncate(0b0000_0111)
|
Flags::from_bits_truncate(0b0000_1111)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -68,49 +67,24 @@ impl Default for Flags {
|
|||||||
/// NamedFile::open_async("./static/index.html").await
|
/// NamedFile::open_async("./static/index.html").await
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(Deref, DerefMut)]
|
#[derive(Debug, Deref, DerefMut)]
|
||||||
pub struct NamedFile {
|
pub struct NamedFile {
|
||||||
path: PathBuf,
|
|
||||||
#[deref]
|
#[deref]
|
||||||
#[deref_mut]
|
#[deref_mut]
|
||||||
file: File,
|
file: File,
|
||||||
|
path: PathBuf,
|
||||||
modified: Option<SystemTime>,
|
modified: Option<SystemTime>,
|
||||||
pub(crate) md: Metadata,
|
pub(crate) md: Metadata,
|
||||||
pub(crate) flags: Flags,
|
pub(crate) flags: Flags,
|
||||||
pub(crate) status_code: StatusCode,
|
pub(crate) status_code: StatusCode,
|
||||||
pub(crate) content_type: mime::Mime,
|
pub(crate) content_type: Mime,
|
||||||
pub(crate) content_disposition: header::ContentDisposition,
|
pub(crate) content_disposition: ContentDisposition,
|
||||||
pub(crate) encoding: Option<ContentEncoding>,
|
pub(crate) encoding: Option<ContentEncoding>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for NamedFile {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.debug_struct("NamedFile")
|
|
||||||
.field("path", &self.path)
|
|
||||||
.field(
|
|
||||||
"file",
|
|
||||||
#[cfg(feature = "experimental-io-uring")]
|
|
||||||
{
|
|
||||||
&"tokio_uring::File"
|
|
||||||
},
|
|
||||||
#[cfg(not(feature = "experimental-io-uring"))]
|
|
||||||
{
|
|
||||||
&self.file
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.field("modified", &self.modified)
|
|
||||||
.field("md", &self.md)
|
|
||||||
.field("flags", &self.flags)
|
|
||||||
.field("status_code", &self.status_code)
|
|
||||||
.field("content_type", &self.content_type)
|
|
||||||
.field("content_disposition", &self.content_disposition)
|
|
||||||
.field("encoding", &self.encoding)
|
|
||||||
.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "experimental-io-uring"))]
|
#[cfg(not(feature = "experimental-io-uring"))]
|
||||||
pub(crate) use std::fs::File;
|
pub(crate) use std::fs::File;
|
||||||
|
|
||||||
#[cfg(feature = "experimental-io-uring")]
|
#[cfg(feature = "experimental-io-uring")]
|
||||||
pub(crate) use tokio_uring::fs::File;
|
pub(crate) use tokio_uring::fs::File;
|
||||||
|
|
||||||
@ -124,18 +98,18 @@ impl NamedFile {
|
|||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```ignore
|
/// ```ignore
|
||||||
|
/// use std::{
|
||||||
|
/// io::{self, Write as _},
|
||||||
|
/// env,
|
||||||
|
/// fs::File
|
||||||
|
/// };
|
||||||
/// use actix_files::NamedFile;
|
/// use actix_files::NamedFile;
|
||||||
/// use std::io::{self, Write};
|
|
||||||
/// use std::env;
|
|
||||||
/// use std::fs::File;
|
|
||||||
///
|
///
|
||||||
/// fn main() -> io::Result<()> {
|
|
||||||
/// let mut file = File::create("foo.txt")?;
|
/// let mut file = File::create("foo.txt")?;
|
||||||
/// file.write_all(b"Hello, world!")?;
|
/// file.write_all(b"Hello, world!")?;
|
||||||
/// let named_file = NamedFile::from_file(file, "bar.txt")?;
|
/// let named_file = NamedFile::from_file(file, "bar.txt")?;
|
||||||
/// # std::fs::remove_file("foo.txt");
|
/// # std::fs::remove_file("foo.txt");
|
||||||
/// Ok(())
|
/// Ok(())
|
||||||
/// }
|
|
||||||
/// ```
|
/// ```
|
||||||
pub fn from_file<P: AsRef<Path>>(file: File, path: P) -> io::Result<NamedFile> {
|
pub fn from_file<P: AsRef<Path>>(file: File, path: P) -> io::Result<NamedFile> {
|
||||||
let path = path.as_ref().to_path_buf();
|
let path = path.as_ref().to_path_buf();
|
||||||
@ -153,20 +127,25 @@ impl NamedFile {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let ct = from_path(&path).first_or_octet_stream();
|
let ct = mime_guess::from_path(&path).first_or_octet_stream();
|
||||||
|
|
||||||
let disposition = match ct.type_() {
|
let disposition = match ct.type_() {
|
||||||
mime::IMAGE | mime::TEXT | mime::VIDEO => DispositionType::Inline,
|
mime::IMAGE | mime::TEXT | mime::AUDIO | mime::VIDEO => DispositionType::Inline,
|
||||||
mime::APPLICATION => match ct.subtype() {
|
mime::APPLICATION => match ct.subtype() {
|
||||||
mime::JAVASCRIPT | mime::JSON => DispositionType::Inline,
|
mime::JAVASCRIPT | mime::JSON => DispositionType::Inline,
|
||||||
name if name == "wasm" => DispositionType::Inline,
|
name if name == "wasm" || name == "xhtml" => DispositionType::Inline,
|
||||||
_ => DispositionType::Attachment,
|
_ => DispositionType::Attachment,
|
||||||
},
|
},
|
||||||
_ => DispositionType::Attachment,
|
_ => DispositionType::Attachment,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut parameters =
|
// replace special characters in filenames which could occur on some filesystems
|
||||||
vec![DispositionParam::Filename(String::from(filename.as_ref()))];
|
let filename_s = filename
|
||||||
|
.replace('\n', "%0A") // \n line break
|
||||||
|
.replace('\x0B', "%0B") // \v vertical tab
|
||||||
|
.replace('\x0C', "%0C") // \f form feed
|
||||||
|
.replace('\r', "%0D"); // \r carriage return
|
||||||
|
let mut parameters = vec![DispositionParam::Filename(filename_s)];
|
||||||
|
|
||||||
if !filename.is_ascii() {
|
if !filename.is_ascii() {
|
||||||
parameters.push(DispositionParam::FilenameExt(ExtendedValue {
|
parameters.push(DispositionParam::FilenameExt(ExtendedValue {
|
||||||
@ -224,7 +203,6 @@ impl NamedFile {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "experimental-io-uring"))]
|
|
||||||
/// Attempts to open a file in read-only mode.
|
/// Attempts to open a file in read-only mode.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
@ -232,6 +210,7 @@ impl NamedFile {
|
|||||||
/// use actix_files::NamedFile;
|
/// use actix_files::NamedFile;
|
||||||
/// let file = NamedFile::open("foo.txt");
|
/// let file = NamedFile::open("foo.txt");
|
||||||
/// ```
|
/// ```
|
||||||
|
#[cfg(not(feature = "experimental-io-uring"))]
|
||||||
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> {
|
pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> {
|
||||||
let file = File::open(&path)?;
|
let file = File::open(&path)?;
|
||||||
Self::from_file(file, path)
|
Self::from_file(file, path)
|
||||||
@ -239,8 +218,8 @@ impl NamedFile {
|
|||||||
|
|
||||||
/// Attempts to open a file asynchronously in read-only mode.
|
/// Attempts to open a file asynchronously in read-only mode.
|
||||||
///
|
///
|
||||||
/// When the `experimental-io-uring` crate feature is enabled, this will be async.
|
/// When the `experimental-io-uring` crate feature is enabled, this will be async. Otherwise, it
|
||||||
/// Otherwise, it will be just like [`open`][Self::open].
|
/// will behave just like `open`.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
@ -265,13 +244,13 @@ impl NamedFile {
|
|||||||
Self::from_file(file, path)
|
Self::from_file(file, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns reference to the underlying `File` object.
|
/// Returns reference to the underlying file object.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn file(&self) -> &File {
|
pub fn file(&self) -> &File {
|
||||||
&self.file
|
&self.file
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieve the path of this file.
|
/// Returns the filesystem path to this file.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
@ -289,55 +268,92 @@ impl NamedFile {
|
|||||||
self.path.as_path()
|
self.path.as_path()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set response **Status Code**
|
/// Returns the time the file was last modified.
|
||||||
|
///
|
||||||
|
/// Returns `None` only on unsupported platforms; see [`std::fs::Metadata::modified()`].
|
||||||
|
/// Therefore, it is usually safe to unwrap this.
|
||||||
|
#[inline]
|
||||||
|
pub fn modified(&self) -> Option<SystemTime> {
|
||||||
|
self.modified
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the filesystem metadata associated with this file.
|
||||||
|
#[inline]
|
||||||
|
pub fn metadata(&self) -> &Metadata {
|
||||||
|
&self.md
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `Content-Type` header that will be used when serving this file.
|
||||||
|
#[inline]
|
||||||
|
pub fn content_type(&self) -> &Mime {
|
||||||
|
&self.content_type
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `Content-Disposition` that will be used when serving this file.
|
||||||
|
#[inline]
|
||||||
|
pub fn content_disposition(&self) -> &ContentDisposition {
|
||||||
|
&self.content_disposition
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `Content-Encoding` that will be used when serving this file.
|
||||||
|
///
|
||||||
|
/// A return value of `None` indicates that the content is not already using a compressed
|
||||||
|
/// representation and may be subject to compression downstream.
|
||||||
|
#[inline]
|
||||||
|
pub fn content_encoding(&self) -> Option<ContentEncoding> {
|
||||||
|
self.encoding
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set response status code.
|
||||||
|
#[deprecated(since = "0.7.0", note = "Prefer `Responder::customize()`.")]
|
||||||
pub fn set_status_code(mut self, status: StatusCode) -> Self {
|
pub fn set_status_code(mut self, status: StatusCode) -> Self {
|
||||||
self.status_code = status;
|
self.status_code = status;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the MIME Content-Type for serving this file. By default
|
/// Sets the `Content-Type` header that will be used when serving this file. By default the
|
||||||
/// the Content-Type is inferred from the filename extension.
|
/// `Content-Type` is inferred from the filename extension.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set_content_type(mut self, mime_type: mime::Mime) -> Self {
|
pub fn set_content_type(mut self, mime_type: Mime) -> Self {
|
||||||
self.content_type = mime_type;
|
self.content_type = mime_type;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the Content-Disposition for serving this file. This allows
|
/// Set the Content-Disposition for serving this file. This allows changing the
|
||||||
/// changing the inline/attachment disposition as well as the filename
|
/// `inline/attachment` disposition as well as the filename sent to the peer.
|
||||||
/// sent to the peer.
|
|
||||||
///
|
///
|
||||||
/// By default the disposition is `inline` for `text/*`, `image/*`, `video/*` and
|
/// By default the disposition is `inline` for `text/*`, `image/*`, `video/*` and
|
||||||
/// `application/{javascript, json, wasm}` mime types, and `attachment` otherwise,
|
/// `application/{javascript, json, wasm}` mime types, and `attachment` otherwise, and the
|
||||||
/// and the filename is taken from the path provided in the `open` method
|
/// filename is taken from the path provided in the `open` method after converting it to UTF-8
|
||||||
/// after converting it to UTF-8 using.
|
/// (using `to_string_lossy`).
|
||||||
/// [`std::ffi::OsStr::to_string_lossy`]
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set_content_disposition(mut self, cd: header::ContentDisposition) -> Self {
|
pub fn set_content_disposition(mut self, cd: ContentDisposition) -> Self {
|
||||||
self.content_disposition = cd;
|
self.content_disposition = cd;
|
||||||
self.flags.insert(Flags::CONTENT_DISPOSITION);
|
self.flags.insert(Flags::CONTENT_DISPOSITION);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disable `Content-Disposition` header.
|
/// Disables `Content-Disposition` header.
|
||||||
///
|
///
|
||||||
/// By default Content-Disposition` header is enabled.
|
/// By default, the `Content-Disposition` header is sent.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn disable_content_disposition(mut self) -> Self {
|
pub fn disable_content_disposition(mut self) -> Self {
|
||||||
self.flags.remove(Flags::CONTENT_DISPOSITION);
|
self.flags.remove(Flags::CONTENT_DISPOSITION);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set content encoding for serving this file
|
/// Sets content encoding for this file.
|
||||||
///
|
///
|
||||||
/// Must be used with [`actix_web::middleware::Compress`] to take effect.
|
/// This prevents the `Compress` middleware from modifying the file contents and signals to
|
||||||
|
/// browsers/clients how to decode it. For example, if serving a compressed HTML file (e.g.,
|
||||||
|
/// `index.html.gz`) then use `.set_content_encoding(ContentEncoding::Gzip)`.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn set_content_encoding(mut self, enc: ContentEncoding) -> Self {
|
pub fn set_content_encoding(mut self, enc: ContentEncoding) -> Self {
|
||||||
self.encoding = Some(enc);
|
self.encoding = Some(enc);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Specifies whether to use ETag or not.
|
/// Specifies whether to return `ETag` header in response.
|
||||||
///
|
///
|
||||||
/// Default is true.
|
/// Default is true.
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -346,7 +362,7 @@ impl NamedFile {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Specifies whether to use Last-Modified or not.
|
/// Specifies whether to return `Last-Modified` header in response.
|
||||||
///
|
///
|
||||||
/// Default is true.
|
/// Default is true.
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -364,7 +380,7 @@ impl NamedFile {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a etag in a format is similar to Apache's.
|
/// Creates an `ETag` in a format is similar to Apache's.
|
||||||
pub(crate) fn etag(&self) -> Option<header::EntityTag> {
|
pub(crate) fn etag(&self) -> Option<header::EntityTag> {
|
||||||
self.modified.as_ref().map(|mtime| {
|
self.modified.as_ref().map(|mtime| {
|
||||||
let ino = {
|
let ino = {
|
||||||
@ -386,7 +402,7 @@ impl NamedFile {
|
|||||||
.duration_since(UNIX_EPOCH)
|
.duration_since(UNIX_EPOCH)
|
||||||
.expect("modification time must be after epoch");
|
.expect("modification time must be after epoch");
|
||||||
|
|
||||||
header::EntityTag::strong(format!(
|
header::EntityTag::new_strong(format!(
|
||||||
"{:x}:{:x}:{:x}:{:x}",
|
"{:x}:{:x}:{:x}:{:x}",
|
||||||
ino,
|
ino,
|
||||||
self.md.len(),
|
self.md.len(),
|
||||||
@ -405,12 +421,13 @@ impl NamedFile {
|
|||||||
if self.status_code != StatusCode::OK {
|
if self.status_code != StatusCode::OK {
|
||||||
let mut res = HttpResponse::build(self.status_code);
|
let mut res = HttpResponse::build(self.status_code);
|
||||||
|
|
||||||
if self.flags.contains(Flags::PREFER_UTF8) {
|
let ct = if self.flags.contains(Flags::PREFER_UTF8) {
|
||||||
let ct = equiv_utf8_text(self.content_type.clone());
|
equiv_utf8_text(self.content_type.clone())
|
||||||
res.insert_header((header::CONTENT_TYPE, ct.to_string()));
|
|
||||||
} else {
|
} else {
|
||||||
res.insert_header((header::CONTENT_TYPE, self.content_type.to_string()));
|
self.content_type
|
||||||
}
|
};
|
||||||
|
|
||||||
|
res.insert_header((header::CONTENT_TYPE, ct.to_string()));
|
||||||
|
|
||||||
if self.flags.contains(Flags::CONTENT_DISPOSITION) {
|
if self.flags.contains(Flags::CONTENT_DISPOSITION) {
|
||||||
res.insert_header((
|
res.insert_header((
|
||||||
@ -420,7 +437,7 @@ impl NamedFile {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(current_encoding) = self.encoding {
|
if let Some(current_encoding) = self.encoding {
|
||||||
res.encoding(current_encoding);
|
res.insert_header((header::CONTENT_ENCODING, current_encoding.as_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let reader = chunked::new_chunked_read(self.md.len(), 0, self.file);
|
let reader = chunked::new_chunked_read(self.md.len(), 0, self.file);
|
||||||
@ -478,12 +495,13 @@ impl NamedFile {
|
|||||||
|
|
||||||
let mut res = HttpResponse::build(self.status_code);
|
let mut res = HttpResponse::build(self.status_code);
|
||||||
|
|
||||||
if self.flags.contains(Flags::PREFER_UTF8) {
|
let ct = if self.flags.contains(Flags::PREFER_UTF8) {
|
||||||
let ct = equiv_utf8_text(self.content_type.clone());
|
equiv_utf8_text(self.content_type.clone())
|
||||||
res.insert_header((header::CONTENT_TYPE, ct.to_string()));
|
|
||||||
} else {
|
} else {
|
||||||
res.insert_header((header::CONTENT_TYPE, self.content_type.to_string()));
|
self.content_type
|
||||||
}
|
};
|
||||||
|
|
||||||
|
res.insert_header((header::CONTENT_TYPE, ct.to_string()));
|
||||||
|
|
||||||
if self.flags.contains(Flags::CONTENT_DISPOSITION) {
|
if self.flags.contains(Flags::CONTENT_DISPOSITION) {
|
||||||
res.insert_header((
|
res.insert_header((
|
||||||
@ -492,9 +510,8 @@ impl NamedFile {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// default compressing
|
|
||||||
if let Some(current_encoding) = self.encoding {
|
if let Some(current_encoding) = self.encoding {
|
||||||
res.encoding(current_encoding);
|
res.insert_header((header::CONTENT_ENCODING, current_encoding.as_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(lm) = last_modified {
|
if let Some(lm) = last_modified {
|
||||||
@ -517,7 +534,27 @@ impl NamedFile {
|
|||||||
length = ranges[0].length;
|
length = ranges[0].length;
|
||||||
offset = ranges[0].start;
|
offset = ranges[0].start;
|
||||||
|
|
||||||
res.encoding(ContentEncoding::Identity);
|
// When a Content-Encoding header is present in a 206 partial content response
|
||||||
|
// for video content, it prevents browser video players from starting playback
|
||||||
|
// before loading the whole video and also prevents seeking.
|
||||||
|
//
|
||||||
|
// See: https://github.com/actix/actix-web/issues/2815
|
||||||
|
//
|
||||||
|
// The assumption of this fix is that the video player knows to not send an
|
||||||
|
// Accept-Encoding header for this request and that downstream middleware will
|
||||||
|
// not attempt compression for requests without it.
|
||||||
|
//
|
||||||
|
// TODO: Solve question around what to do if self.encoding is set and partial
|
||||||
|
// range is requested. Reject request? Ignoring self.encoding seems wrong, too.
|
||||||
|
// In practice, it should not come up.
|
||||||
|
if req.headers().contains_key(&header::ACCEPT_ENCODING) {
|
||||||
|
// don't allow compression middleware to modify partial content
|
||||||
|
res.insert_header((
|
||||||
|
header::CONTENT_ENCODING,
|
||||||
|
HeaderValue::from_static("identity"),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
res.insert_header((
|
res.insert_header((
|
||||||
header::CONTENT_RANGE,
|
header::CONTENT_RANGE,
|
||||||
format!("bytes {}-{}/{}", offset, offset + length - 1, self.md.len()),
|
format!("bytes {}-{}/{}", offset, offset + length - 1, self.md.len()),
|
||||||
@ -626,7 +663,7 @@ impl Service<ServiceRequest> for NamedFileService {
|
|||||||
type Error = Error;
|
type Error = Error;
|
||||||
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
||||||
|
|
||||||
actix_service::always_ready!();
|
dev::always_ready!();
|
||||||
|
|
||||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||||
let (req, _) = req.into_parts();
|
let (req, _) = req.into_parts();
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Component, Path, PathBuf},
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -26,8 +26,23 @@ impl PathBufWrap {
|
|||||||
pub fn parse_path(path: &str, hidden_files: bool) -> Result<Self, UriSegmentError> {
|
pub fn parse_path(path: &str, hidden_files: bool) -> Result<Self, UriSegmentError> {
|
||||||
let mut buf = PathBuf::new();
|
let mut buf = PathBuf::new();
|
||||||
|
|
||||||
|
// equivalent to `path.split('/').count()`
|
||||||
|
let mut segment_count = path.matches('/').count() + 1;
|
||||||
|
|
||||||
|
// we can decode the whole path here (instead of per-segment decoding)
|
||||||
|
// because we will reject `%2F` in paths using `segment_count`.
|
||||||
|
let path = percent_encoding::percent_decode_str(path)
|
||||||
|
.decode_utf8()
|
||||||
|
.map_err(|_| UriSegmentError::NotValidUtf8)?;
|
||||||
|
|
||||||
|
// disallow decoding `%2F` into `/`
|
||||||
|
if segment_count != path.matches('/').count() + 1 {
|
||||||
|
return Err(UriSegmentError::BadChar('/'));
|
||||||
|
}
|
||||||
|
|
||||||
for segment in path.split('/') {
|
for segment in path.split('/') {
|
||||||
if segment == ".." {
|
if segment == ".." {
|
||||||
|
segment_count -= 1;
|
||||||
buf.pop();
|
buf.pop();
|
||||||
} else if !hidden_files && segment.starts_with('.') {
|
} else if !hidden_files && segment.starts_with('.') {
|
||||||
return Err(UriSegmentError::BadStart('.'));
|
return Err(UriSegmentError::BadStart('.'));
|
||||||
@ -40,14 +55,27 @@ impl PathBufWrap {
|
|||||||
} else if segment.ends_with('<') {
|
} else if segment.ends_with('<') {
|
||||||
return Err(UriSegmentError::BadEnd('<'));
|
return Err(UriSegmentError::BadEnd('<'));
|
||||||
} else if segment.is_empty() {
|
} else if segment.is_empty() {
|
||||||
|
segment_count -= 1;
|
||||||
continue;
|
continue;
|
||||||
} else if cfg!(windows) && segment.contains('\\') {
|
} else if cfg!(windows) && segment.contains('\\') {
|
||||||
return Err(UriSegmentError::BadChar('\\'));
|
return Err(UriSegmentError::BadChar('\\'));
|
||||||
|
} else if cfg!(windows) && segment.contains(':') {
|
||||||
|
return Err(UriSegmentError::BadChar(':'));
|
||||||
} else {
|
} else {
|
||||||
buf.push(segment)
|
buf.push(segment)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// make sure we agree with stdlib parser
|
||||||
|
for (i, component) in buf.components().enumerate() {
|
||||||
|
assert!(
|
||||||
|
matches!(component, Component::Normal(_)),
|
||||||
|
"component `{:?}` is not normal",
|
||||||
|
component
|
||||||
|
);
|
||||||
|
assert!(i < segment_count);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(PathBufWrap(buf))
|
Ok(PathBufWrap(buf))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -63,14 +91,12 @@ impl FromRequest for PathBufWrap {
|
|||||||
type Future = Ready<Result<Self, Self::Error>>;
|
type Future = Ready<Result<Self, Self::Error>>;
|
||||||
|
|
||||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||||
ready(req.match_info().path().parse())
|
ready(req.match_info().unprocessed().parse())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::iter::FromIterator;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -137,4 +163,26 @@ mod tests {
|
|||||||
PathBuf::from_iter(vec!["etc/passwd"])
|
PathBuf::from_iter(vec!["etc/passwd"])
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg_attr(windows, should_panic)]
|
||||||
|
fn windows_drive_traversal() {
|
||||||
|
// detect issues in windows that could lead to path traversal
|
||||||
|
// see <https://github.com/SergioBenitez/Rocket/issues/1949
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
PathBufWrap::parse_path("C:test.txt", false).unwrap().0,
|
||||||
|
PathBuf::from_iter(vec!["C:test.txt"])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
PathBufWrap::parse_path("C:../whatever", false).unwrap().0,
|
||||||
|
PathBuf::from_iter(vec!["C:../whatever"])
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
PathBufWrap::parse_path(":test.txt", false).unwrap().0,
|
||||||
|
PathBuf::from_iter(vec![":test.txt"])
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,36 @@
|
|||||||
use derive_more::{Display, Error};
|
use std::fmt;
|
||||||
|
|
||||||
|
use derive_more::Error;
|
||||||
|
|
||||||
|
/// Copy of `http_range::HttpRangeParseError`.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
enum HttpRangeParseError {
|
||||||
|
InvalidRange,
|
||||||
|
NoOverlap,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<http_range::HttpRangeParseError> for HttpRangeParseError {
|
||||||
|
fn from(err: http_range::HttpRangeParseError) -> Self {
|
||||||
|
match err {
|
||||||
|
http_range::HttpRangeParseError::InvalidRange => Self::InvalidRange,
|
||||||
|
http_range::HttpRangeParseError::NoOverlap => Self::NoOverlap,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct ParseRangeErr(#[error(not(source))] HttpRangeParseError);
|
||||||
|
|
||||||
|
impl fmt::Display for ParseRangeErr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str("invalid Range header: ")?;
|
||||||
|
f.write_str(match self.0 {
|
||||||
|
HttpRangeParseError::InvalidRange => "invalid syntax",
|
||||||
|
HttpRangeParseError::NoOverlap => "range starts after end of content",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// HTTP Range header representation.
|
/// HTTP Range header representation.
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
@ -10,26 +42,22 @@ pub struct HttpRange {
|
|||||||
pub length: u64,
|
pub length: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Display, Error)]
|
|
||||||
#[display(fmt = "Parse HTTP Range failed")]
|
|
||||||
pub struct ParseRangeErr(#[error(not(source))] ());
|
|
||||||
|
|
||||||
impl HttpRange {
|
impl HttpRange {
|
||||||
/// Parses Range HTTP header string as per RFC 2616.
|
/// Parses Range HTTP header string as per RFC 2616.
|
||||||
///
|
///
|
||||||
/// `header` is HTTP Range header (e.g. `bytes=bytes=0-9`).
|
/// `header` is HTTP Range header (e.g. `bytes=bytes=0-9`).
|
||||||
/// `size` is full size of response (file).
|
/// `size` is full size of response (file).
|
||||||
pub fn parse(header: &str, size: u64) -> Result<Vec<HttpRange>, ParseRangeErr> {
|
pub fn parse(header: &str, size: u64) -> Result<Vec<HttpRange>, ParseRangeErr> {
|
||||||
match http_range::HttpRange::parse(header, size) {
|
let ranges =
|
||||||
Ok(ranges) => Ok(ranges
|
http_range::HttpRange::parse(header, size).map_err(|err| ParseRangeErr(err.into()))?;
|
||||||
|
|
||||||
|
Ok(ranges
|
||||||
.iter()
|
.iter()
|
||||||
.map(|range| HttpRange {
|
.map(|range| HttpRange {
|
||||||
start: range.start,
|
start: range.start,
|
||||||
length: range.length,
|
length: range.length,
|
||||||
})
|
})
|
||||||
.collect()),
|
.collect())
|
||||||
Err(_) => Err(ParseRangeErr(())),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use std::{fmt, io, ops::Deref, path::PathBuf, rc::Rc};
|
use std::{fmt, io, ops::Deref, path::PathBuf, rc::Rc};
|
||||||
|
|
||||||
use actix_service::Service;
|
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
dev::{ServiceRequest, ServiceResponse},
|
body::BoxBody,
|
||||||
|
dev::{self, Service, ServiceRequest, ServiceResponse},
|
||||||
error::Error,
|
error::Error,
|
||||||
guard::Guard,
|
guard::Guard,
|
||||||
http::{header, Method},
|
http::{header, Method},
|
||||||
@ -23,7 +23,7 @@ impl Deref for FilesService {
|
|||||||
type Target = FilesServiceInner;
|
type Target = FilesServiceInner;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&*self.0
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,11 +62,7 @@ impl FilesService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serve_named_file(
|
fn serve_named_file(&self, req: ServiceRequest, mut named_file: NamedFile) -> ServiceResponse {
|
||||||
&self,
|
|
||||||
req: ServiceRequest,
|
|
||||||
mut named_file: NamedFile,
|
|
||||||
) -> ServiceResponse {
|
|
||||||
if let Some(ref mime_override) = self.mime_override {
|
if let Some(ref mime_override) = self.mime_override {
|
||||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||||
named_file.content_disposition.disposition = new_disposition;
|
named_file.content_disposition.disposition = new_disposition;
|
||||||
@ -83,7 +79,7 @@ impl FilesService {
|
|||||||
|
|
||||||
let (req, _) = req.into_parts();
|
let (req, _) = req.into_parts();
|
||||||
|
|
||||||
(self.renderer)(&dir, &req).unwrap_or_else(|e| ServiceResponse::from_err(e, req))
|
(self.renderer)(&dir, &req).unwrap_or_else(|err| ServiceResponse::from_err(err, req))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,16 +90,16 @@ impl fmt::Debug for FilesService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Service<ServiceRequest> for FilesService {
|
impl Service<ServiceRequest> for FilesService {
|
||||||
type Response = ServiceResponse;
|
type Response = ServiceResponse<BoxBody>;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
|
||||||
|
|
||||||
actix_service::always_ready!();
|
dev::always_ready!();
|
||||||
|
|
||||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||||
let is_method_valid = if let Some(guard) = &self.guards {
|
let is_method_valid = if let Some(guard) = &self.guards {
|
||||||
// execute user defined guards
|
// execute user defined guards
|
||||||
(**guard).check(req.head())
|
(**guard).check(&req.guard_ctx())
|
||||||
} else {
|
} else {
|
||||||
// default behavior
|
// default behavior
|
||||||
matches!(*req.method(), Method::HEAD | Method::GET)
|
matches!(*req.method(), Method::HEAD | Method::GET)
|
||||||
@ -114,32 +110,30 @@ impl Service<ServiceRequest> for FilesService {
|
|||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
if !is_method_valid {
|
if !is_method_valid {
|
||||||
return Ok(req.into_response(
|
return Ok(req.into_response(
|
||||||
actix_web::HttpResponse::MethodNotAllowed()
|
HttpResponse::MethodNotAllowed()
|
||||||
.insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
|
.insert_header(header::ContentType(mime::TEXT_PLAIN_UTF_8))
|
||||||
.body("Request did not meet this resource's requirements."),
|
.body("Request did not meet this resource's requirements."),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let real_path =
|
let path_on_disk =
|
||||||
match PathBufWrap::parse_path(req.match_info().path(), this.hidden_files) {
|
match PathBufWrap::parse_path(req.match_info().unprocessed(), this.hidden_files) {
|
||||||
Ok(item) => item,
|
Ok(item) => item,
|
||||||
Err(e) => return Ok(req.error_response(e)),
|
Err(err) => return Ok(req.error_response(err)),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(filter) = &this.path_filter {
|
if let Some(filter) = &this.path_filter {
|
||||||
if !filter(real_path.as_ref(), req.head()) {
|
if !filter(path_on_disk.as_ref(), req.head()) {
|
||||||
if let Some(ref default) = this.default {
|
if let Some(ref default) = this.default {
|
||||||
return default.call(req).await;
|
return default.call(req).await;
|
||||||
} else {
|
} else {
|
||||||
return Ok(
|
return Ok(req.into_response(HttpResponse::NotFound().finish()));
|
||||||
req.into_response(actix_web::HttpResponse::NotFound().finish())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// full file path
|
// full file path
|
||||||
let path = this.directory.join(&real_path);
|
let path = this.directory.join(&path_on_disk);
|
||||||
if let Err(err) = path.canonicalize() {
|
if let Err(err) = path.canonicalize() {
|
||||||
return this.handle_err(err, req).await;
|
return this.handle_err(err, req).await;
|
||||||
}
|
}
|
||||||
@ -168,7 +162,7 @@ impl Service<ServiceRequest> for FilesService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
None if this.show_index => Ok(this.show_index(req, path)),
|
None if this.show_index => Ok(this.show_index(req, path)),
|
||||||
_ => Ok(ServiceResponse::from_err(
|
None => Ok(ServiceResponse::from_err(
|
||||||
FilesError::IsDirectory,
|
FilesError::IsDirectory,
|
||||||
req.into_parts().0,
|
req.into_parts().0,
|
||||||
)),
|
)),
|
||||||
@ -177,8 +171,7 @@ impl Service<ServiceRequest> for FilesService {
|
|||||||
match NamedFile::open_async(&path).await {
|
match NamedFile::open_async(&path).await {
|
||||||
Ok(mut named_file) => {
|
Ok(mut named_file) => {
|
||||||
if let Some(ref mime_override) = this.mime_override {
|
if let Some(ref mime_override) = this.mime_override {
|
||||||
let new_disposition =
|
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||||
mime_override(&named_file.content_type.type_());
|
|
||||||
named_file.content_disposition.disposition = new_disposition;
|
named_file.content_disposition.disposition = new_disposition;
|
||||||
}
|
}
|
||||||
named_file.flags = this.file_flags;
|
named_file.flags = this.file_flags;
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
use actix_files::Files;
|
use actix_files::{Files, NamedFile};
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
http::{
|
http::{
|
||||||
header::{self, HeaderValue},
|
header::{self, HeaderValue},
|
||||||
StatusCode,
|
StatusCode,
|
||||||
},
|
},
|
||||||
test::{self, TestRequest},
|
test::{self, TestRequest},
|
||||||
App,
|
web, App,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[actix_web::test]
|
#[actix_web::test]
|
||||||
@ -19,13 +19,12 @@ async fn test_utf8_file_contents() {
|
|||||||
assert_eq!(res.status(), StatusCode::OK);
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.headers().get(header::CONTENT_TYPE),
|
res.headers().get(header::CONTENT_TYPE),
|
||||||
Some(&HeaderValue::from_static("text/plain")),
|
Some(&HeaderValue::from_static("text/plain; charset=utf-8")),
|
||||||
);
|
);
|
||||||
|
|
||||||
// prefer UTF-8 encoding
|
// disable UTF-8 attribute
|
||||||
let srv =
|
let srv =
|
||||||
test::init_service(App::new().service(Files::new("/", "./tests").prefer_utf8(true)))
|
test::init_service(App::new().service(Files::new("/", "./tests").prefer_utf8(false))).await;
|
||||||
.await;
|
|
||||||
|
|
||||||
let req = TestRequest::with_uri("/utf8.txt").to_request();
|
let req = TestRequest::with_uri("/utf8.txt").to_request();
|
||||||
let res = test::call_service(&srv, req).await;
|
let res = test::call_service(&srv, req).await;
|
||||||
@ -33,6 +32,34 @@ async fn test_utf8_file_contents() {
|
|||||||
assert_eq!(res.status(), StatusCode::OK);
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.headers().get(header::CONTENT_TYPE),
|
res.headers().get(header::CONTENT_TYPE),
|
||||||
Some(&HeaderValue::from_static("text/plain; charset=utf-8")),
|
Some(&HeaderValue::from_static("text/plain")),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::test]
|
||||||
|
async fn partial_range_response_encoding() {
|
||||||
|
let srv = test::init_service(App::new().default_service(web::to(|| async {
|
||||||
|
NamedFile::open_async("./tests/test.binary").await.unwrap()
|
||||||
|
})))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// range request without accept-encoding returns no content-encoding header
|
||||||
|
let req = TestRequest::with_uri("/")
|
||||||
|
.append_header((header::RANGE, "bytes=10-20"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT);
|
||||||
|
assert!(!res.headers().contains_key(header::CONTENT_ENCODING));
|
||||||
|
|
||||||
|
// range request with accept-encoding returns a content-encoding header
|
||||||
|
let req = TestRequest::with_uri("/")
|
||||||
|
.append_header((header::RANGE, "bytes=10-20"))
|
||||||
|
.append_header((header::ACCEPT_ENCODING, "identity"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&srv, req).await;
|
||||||
|
assert_eq!(res.status(), StatusCode::PARTIAL_CONTENT);
|
||||||
|
assert_eq!(
|
||||||
|
res.headers().get(header::CONTENT_ENCODING).unwrap(),
|
||||||
|
"identity"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -12,9 +12,7 @@ async fn test_guard_filter() {
|
|||||||
let srv = test::init_service(
|
let srv = test::init_service(
|
||||||
App::new()
|
App::new()
|
||||||
.service(Files::new("/", "./tests/fixtures/guards/first").guard(Host("first.com")))
|
.service(Files::new("/", "./tests/fixtures/guards/first").guard(Host("first.com")))
|
||||||
.service(
|
.service(Files::new("/", "./tests/fixtures/guards/second").guard(Host("second.com"))),
|
||||||
Files::new("/", "./tests/fixtures/guards/second").guard(Host("second.com")),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
@ -9,8 +9,7 @@ use actix_web::{
|
|||||||
async fn test_directory_traversal_prevention() {
|
async fn test_directory_traversal_prevention() {
|
||||||
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
|
let srv = test::init_service(App::new().service(Files::new("/", "./tests"))).await;
|
||||||
|
|
||||||
let req =
|
let req = TestRequest::with_uri("/../../../../../../../../../../../etc/passwd").to_request();
|
||||||
TestRequest::with_uri("/../../../../../../../../../../../etc/passwd").to_request();
|
|
||||||
let res = test::call_service(&srv, req).await;
|
let res = test::call_service(&srv, req).await;
|
||||||
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||||
|
|
||||||
|
@ -1,63 +1,103 @@
|
|||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||||
|
|
||||||
|
## 3.2.0
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.68 due to transitive `time` dependency.
|
||||||
|
|
||||||
|
## 3.1.0
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.59.
|
||||||
|
|
||||||
|
## 3.0.0
|
||||||
|
|
||||||
|
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
||||||
|
- Added `TestServer::client_headers` method. [#2097]
|
||||||
|
- Update `actix-server` dependency to `2`.
|
||||||
|
- Update `actix-tls` dependency to `3`.
|
||||||
|
- Update `bytes` to `1.0`. [#1813]
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.57.
|
||||||
|
|
||||||
|
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||||
|
[#2097]: https://github.com/actix/actix-web/pull/2097
|
||||||
|
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>3.0.0 Pre-Releases</summary>
|
||||||
|
|
||||||
|
## 3.0.0-beta.13
|
||||||
|
|
||||||
|
- No significant changes since `3.0.0-beta.12`.
|
||||||
|
|
||||||
|
## 3.0.0-beta.12
|
||||||
|
|
||||||
|
- No significant changes since `3.0.0-beta.11`.
|
||||||
|
|
||||||
|
## 3.0.0-beta.11
|
||||||
|
|
||||||
|
- Minimum supported Rust version (MSRV) is now 1.54.
|
||||||
|
|
||||||
|
## 3.0.0-beta.10
|
||||||
|
|
||||||
## 3.0.0-beta.10 - 2021-12-27
|
|
||||||
- Update `actix-server` to `2.0.0-rc.2`. [#2550]
|
- Update `actix-server` to `2.0.0-rc.2`. [#2550]
|
||||||
|
|
||||||
[#2550]: https://github.com/actix/actix-web/pull/2550
|
[#2550]: https://github.com/actix/actix-web/pull/2550
|
||||||
|
|
||||||
|
## 3.0.0-beta.9
|
||||||
|
|
||||||
## 3.0.0-beta.9 - 2021-12-11
|
|
||||||
- No significant changes since `3.0.0-beta.8`.
|
- No significant changes since `3.0.0-beta.8`.
|
||||||
|
|
||||||
|
## 3.0.0-beta.8
|
||||||
|
|
||||||
## 3.0.0-beta.8 - 2021-11-30
|
|
||||||
- Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
- Update `actix-tls` to `3.0.0-rc.1`. [#2474]
|
||||||
|
|
||||||
[#2474]: https://github.com/actix/actix-web/pull/2474
|
[#2474]: https://github.com/actix/actix-web/pull/2474
|
||||||
|
|
||||||
|
## 3.0.0-beta.7
|
||||||
|
|
||||||
## 3.0.0-beta.7 - 2021-11-22
|
|
||||||
- Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
|
- Fix compatibility with experimental `io-uring` feature of `actix-rt`. [#2408]
|
||||||
|
|
||||||
[#2408]: https://github.com/actix/actix-web/pull/2408
|
[#2408]: https://github.com/actix/actix-web/pull/2408
|
||||||
|
|
||||||
|
## 3.0.0-beta.6
|
||||||
|
|
||||||
## 3.0.0-beta.6 - 2021-11-15
|
|
||||||
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
- `TestServer::stop` is now async and will wait for the server and system to shutdown. [#2442]
|
||||||
- Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
- Update `actix-server` to `2.0.0-beta.9`. [#2442]
|
||||||
- Minimum supported Rust version (MSRV) is now 1.52.
|
- Minimum supported Rust version (MSRV) is now 1.52.
|
||||||
|
|
||||||
[#2442]: https://github.com/actix/actix-web/pull/2442
|
[#2442]: https://github.com/actix/actix-web/pull/2442
|
||||||
|
|
||||||
|
## 3.0.0-beta.5
|
||||||
|
|
||||||
## 3.0.0-beta.5 - 2021-09-09
|
|
||||||
- Minimum supported Rust version (MSRV) is now 1.51.
|
- Minimum supported Rust version (MSRV) is now 1.51.
|
||||||
|
|
||||||
|
## 3.0.0-beta.4
|
||||||
|
|
||||||
## 3.0.0-beta.4 - 2021-04-02
|
|
||||||
- Added `TestServer::client_headers` method. [#2097]
|
- Added `TestServer::client_headers` method. [#2097]
|
||||||
|
|
||||||
[#2097]: https://github.com/actix/actix-web/pull/2097
|
[#2097]: https://github.com/actix/actix-web/pull/2097
|
||||||
|
|
||||||
|
## 3.0.0-beta.3
|
||||||
|
|
||||||
## 3.0.0-beta.3 - 2021-03-09
|
|
||||||
- No notable changes.
|
- No notable changes.
|
||||||
|
|
||||||
|
## 3.0.0-beta.2
|
||||||
|
|
||||||
## 3.0.0-beta.2 - 2021-02-10
|
|
||||||
- No notable changes.
|
- No notable changes.
|
||||||
|
|
||||||
|
## 3.0.0-beta.1
|
||||||
|
|
||||||
## 3.0.0-beta.1 - 2021-01-07
|
|
||||||
- Update `bytes` to `1.0`. [#1813]
|
- Update `bytes` to `1.0`. [#1813]
|
||||||
|
|
||||||
[#1813]: https://github.com/actix/actix-web/pull/1813
|
[#1813]: https://github.com/actix/actix-web/pull/1813
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## 2.1.0
|
||||||
|
|
||||||
## 2.1.0 - 2020-11-25
|
|
||||||
- Add ability to set address for `TestServer`. [#1645]
|
- Add ability to set address for `TestServer`. [#1645]
|
||||||
- Upgrade `base64` to `0.13`.
|
- Upgrade `base64` to `0.13`.
|
||||||
- Upgrade `serde_urlencoded` to `0.7`. [#1773]
|
- Upgrade `serde_urlencoded` to `0.7`. [#1773]
|
||||||
@ -65,12 +105,12 @@
|
|||||||
[#1773]: https://github.com/actix/actix-web/pull/1773
|
[#1773]: https://github.com/actix/actix-web/pull/1773
|
||||||
[#1645]: https://github.com/actix/actix-web/pull/1645
|
[#1645]: https://github.com/actix/actix-web/pull/1645
|
||||||
|
|
||||||
|
## 2.0.0
|
||||||
|
|
||||||
## 2.0.0 - 2020-09-11
|
|
||||||
- Update actix-codec and actix-utils dependencies.
|
- Update actix-codec and actix-utils dependencies.
|
||||||
|
|
||||||
|
## 2.0.0-alpha.1
|
||||||
|
|
||||||
## 2.0.0-alpha.1 - 2020-05-23
|
|
||||||
- Update the `time` dependency to 0.2.7
|
- Update the `time` dependency to 0.2.7
|
||||||
- Update `actix-connect` dependency to 2.0.0-alpha.2
|
- Update `actix-connect` dependency to 2.0.0-alpha.2
|
||||||
- Make `test_server` `async` fn.
|
- Make `test_server` `async` fn.
|
||||||
@ -79,56 +119,57 @@
|
|||||||
- Update `base64` dependency to 0.12
|
- Update `base64` dependency to 0.12
|
||||||
- Update `env_logger` dependency to 0.7
|
- Update `env_logger` dependency to 0.7
|
||||||
|
|
||||||
## 1.0.0 - 2019-12-13
|
## 1.0.0
|
||||||
|
|
||||||
- Replaced `TestServer::start()` with `test_server()`
|
- Replaced `TestServer::start()` with `test_server()`
|
||||||
|
|
||||||
|
## 1.0.0-alpha.3
|
||||||
|
|
||||||
## 1.0.0-alpha.3 - 2019-12-07
|
|
||||||
- Migrate to `std::future`
|
- Migrate to `std::future`
|
||||||
|
|
||||||
|
## 0.2.5
|
||||||
|
|
||||||
## 0.2.5 - 2019-09-17
|
|
||||||
- Update serde_urlencoded to "0.6.1"
|
- Update serde_urlencoded to "0.6.1"
|
||||||
- Increase TestServerRuntime timeouts from 500ms to 3000ms
|
- Increase TestServerRuntime timeouts from 500ms to 3000ms
|
||||||
- Do not override current `System`
|
- Do not override current `System`
|
||||||
|
|
||||||
|
## 0.2.4
|
||||||
|
|
||||||
## 0.2.4 - 2019-07-18
|
|
||||||
- Update actix-server to 0.6
|
- Update actix-server to 0.6
|
||||||
|
|
||||||
|
## 0.2.3
|
||||||
|
|
||||||
## 0.2.3 - 2019-07-16
|
|
||||||
- Add `delete`, `options`, `patch` methods to `TestServerRunner`
|
- Add `delete`, `options`, `patch` methods to `TestServerRunner`
|
||||||
|
|
||||||
|
## 0.2.2
|
||||||
|
|
||||||
## 0.2.2 - 2019-06-16
|
|
||||||
- Add .put() and .sput() methods
|
- Add .put() and .sput() methods
|
||||||
|
|
||||||
|
## 0.2.1
|
||||||
|
|
||||||
## 0.2.1 - 2019-06-05
|
|
||||||
- Add license files
|
- Add license files
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
## 0.2.0 - 2019-05-12
|
|
||||||
- Update awc and actix-http deps
|
- Update awc and actix-http deps
|
||||||
|
|
||||||
|
## 0.1.1
|
||||||
|
|
||||||
## 0.1.1 - 2019-04-24
|
|
||||||
- Always make new connection for http client
|
- Always make new connection for http client
|
||||||
|
|
||||||
|
## 0.1.0
|
||||||
|
|
||||||
## 0.1.0 - 2019-04-16
|
|
||||||
- No changes
|
- No changes
|
||||||
|
|
||||||
|
## 0.1.0-alpha.3
|
||||||
|
|
||||||
## 0.1.0-alpha.3 - 2019-04-02
|
|
||||||
- Request functions accept path #743
|
- Request functions accept path #743
|
||||||
|
|
||||||
|
## 0.1.0-alpha.2
|
||||||
|
|
||||||
## 0.1.0-alpha.2 - 2019-03-29
|
|
||||||
- Added TestServerRuntime::load_body() method
|
- Added TestServerRuntime::load_body() method
|
||||||
- Update actix-http and awc libraries
|
- Update actix-http and awc libraries
|
||||||
|
|
||||||
|
## 0.1.0-alpha.1
|
||||||
|
|
||||||
## 0.1.0-alpha.1 - 2019-03-28
|
|
||||||
- Initial impl
|
- Initial impl
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-http-test"
|
name = "actix-http-test"
|
||||||
version = "3.0.0-beta.10"
|
version = "3.2.0"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "Various helpers for Actix applications to use during testing"
|
description = "Various helpers for Actix applications to use during testing"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
@ -13,14 +13,22 @@ categories = [
|
|||||||
"web-programming::websocket",
|
"web-programming::websocket",
|
||||||
]
|
]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
features = []
|
features = []
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_http_test"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_codec::*",
|
||||||
|
"actix_http::*",
|
||||||
|
"actix_server::*",
|
||||||
|
"awc::*",
|
||||||
|
"bytes::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"http::*",
|
||||||
|
"tokio::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
@ -29,27 +37,28 @@ default = []
|
|||||||
openssl = ["tls-openssl", "awc/openssl"]
|
openssl = ["tls-openssl", "awc/openssl"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-service = "2.0.0"
|
actix-service = "2"
|
||||||
actix-codec = "0.4.1"
|
actix-codec = "0.5"
|
||||||
actix-tls = "3.0.0"
|
actix-tls = "3"
|
||||||
actix-utils = "3.0.0"
|
actix-utils = "3"
|
||||||
actix-rt = "2.2"
|
actix-rt = "2.2"
|
||||||
actix-server = "2.0.0-rc.2"
|
actix-server = "2"
|
||||||
awc = { version = "3.0.0-beta.15", default-features = false }
|
awc = { version = "3", default-features = false }
|
||||||
|
|
||||||
base64 = "0.13"
|
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
futures-core = { version = "0.3.7", default-features = false }
|
futures-core = { version = "0.3.17", default-features = false }
|
||||||
http = "0.2.5"
|
http = "0.2.7"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
socket2 = "0.4"
|
socket2 = "0.5"
|
||||||
serde = "1.0"
|
serde = "1"
|
||||||
serde_json = "1.0"
|
serde_json = "1"
|
||||||
slab = "0.4"
|
slab = "0.4"
|
||||||
serde_urlencoded = "0.7"
|
serde_urlencoded = "0.7"
|
||||||
tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
tls-openssl = { version = "0.10.55", package = "openssl", optional = true }
|
||||||
tokio = { version = "1.8", features = ["sync"] }
|
tokio = { version = "1.24.2", features = ["sync"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-web = { version = "4.0.0-beta.16", default-features = false, features = ["cookies"] }
|
actix-http = "3"
|
||||||
actix-http = "3.0.0-beta.17"
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
@ -1,17 +1,20 @@
|
|||||||
# actix-http-test
|
# `actix-http-test`
|
||||||
|
|
||||||
> Various helpers for Actix applications to use during testing.
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-http-test)
|
[](https://crates.io/crates/actix-http-test)
|
||||||
[](https://docs.rs/actix-http-test/3.0.0-beta.10)
|
[](https://docs.rs/actix-http-test/3.2.0)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|

|
||||||

|

|
||||||
<br>
|
<br>
|
||||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.10)
|
[](https://deps.rs/crate/actix-http-test/3.2.0)
|
||||||
[](https://crates.io/crates/actix-http-test)
|
[](https://crates.io/crates/actix-http-test)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
## Documentation & Resources
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
- [API Documentation](https://docs.rs/actix-http-test)
|
<!-- cargo-rdme start -->
|
||||||
- Minimum Supported Rust Version (MSRV): 1.52
|
|
||||||
|
Various helpers for Actix applications to use during testing.
|
||||||
|
|
||||||
|
<!-- cargo-rdme end -->
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
//! Various helpers for Actix applications to use during testing.
|
//! Various helpers for Actix applications to use during testing.
|
||||||
|
|
||||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
|
||||||
#![warn(future_incompatible)]
|
|
||||||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
extern crate tls_openssl as openssl;
|
extern crate tls_openssl as openssl;
|
||||||
@ -29,27 +28,31 @@ use tokio::sync::mpsc;
|
|||||||
/// for HTTP applications.
|
/// for HTTP applications.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```no_run
|
///
|
||||||
/// use actix_http::HttpService;
|
/// ```
|
||||||
|
/// use actix_http::{HttpService, Response, Error, StatusCode};
|
||||||
/// use actix_http_test::test_server;
|
/// use actix_http_test::test_server;
|
||||||
/// use actix_web::{web, App, HttpResponse, Error};
|
/// use actix_service::{fn_service, map_config, ServiceFactoryExt as _};
|
||||||
///
|
///
|
||||||
/// async fn my_handler() -> Result<HttpResponse, Error> {
|
/// #[actix_rt::test]
|
||||||
/// Ok(HttpResponse::Ok().into())
|
/// # async fn hidden_test() {}
|
||||||
/// }
|
|
||||||
///
|
|
||||||
/// #[actix_web::test]
|
|
||||||
/// async fn test_example() {
|
/// async fn test_example() {
|
||||||
/// let mut srv = TestServer::start(||
|
/// let srv = test_server(|| {
|
||||||
/// HttpService::new(
|
/// HttpService::build()
|
||||||
/// App::new().service(web::resource("/").to(my_handler))
|
/// .h1(fn_service(|req| async move {
|
||||||
/// )
|
/// Ok::<_, Error>(Response::ok())
|
||||||
/// );
|
/// }))
|
||||||
|
/// .tcp()
|
||||||
|
/// .map_err(|_| ())
|
||||||
|
/// })
|
||||||
|
/// .await;
|
||||||
///
|
///
|
||||||
/// let req = srv.get("/");
|
/// let req = srv.get("/");
|
||||||
/// let response = req.send().await.unwrap();
|
/// let response = req.send().await.unwrap();
|
||||||
/// assert!(response.status().is_success());
|
///
|
||||||
|
/// assert_eq!(response.status(), StatusCode::OK);
|
||||||
/// }
|
/// }
|
||||||
|
/// # actix_rt::System::new().block_on(test_example());
|
||||||
/// ```
|
/// ```
|
||||||
pub async fn test_server<F: ServerServiceFactory<TcpStream>>(factory: F) -> TestServer {
|
pub async fn test_server<F: ServerServiceFactory<TcpStream>>(factory: F) -> TestServer {
|
||||||
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
|
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
|
||||||
@ -87,6 +90,7 @@ pub async fn test_server_with_addr<F: ServerServiceFactory<TcpStream>>(
|
|||||||
|
|
||||||
// notify TestServer that server and system have shut down
|
// notify TestServer that server and system have shut down
|
||||||
// all thread managed resources should be dropped at this point
|
// all thread managed resources should be dropped at this point
|
||||||
|
#[allow(clippy::let_underscore_future)]
|
||||||
let _ = thread_stop_tx.send(());
|
let _ = thread_stop_tx.send(());
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -102,7 +106,7 @@ pub async fn test_server_with_addr<F: ServerServiceFactory<TcpStream>>(
|
|||||||
builder.set_verify(SslVerifyMode::NONE);
|
builder.set_verify(SslVerifyMode::NONE);
|
||||||
let _ = builder
|
let _ = builder
|
||||||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
.map_err(|err| log::error!("Can not set ALPN protocol: {err}"));
|
||||||
|
|
||||||
Connector::new()
|
Connector::new()
|
||||||
.conn_lifetime(Duration::from_secs(0))
|
.conn_lifetime(Duration::from_secs(0))
|
||||||
@ -294,6 +298,7 @@ impl Drop for TestServer {
|
|||||||
// without needing to await anything
|
// without needing to await anything
|
||||||
|
|
||||||
// signal server to stop
|
// signal server to stop
|
||||||
|
#[allow(clippy::let_underscore_future)]
|
||||||
let _ = self.server.stop(true);
|
let _ = self.server.stop(true);
|
||||||
|
|
||||||
// signal system to stop
|
// signal system to stop
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,119 +1,188 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "3.0.0-beta.17"
|
version = "3.10.0"
|
||||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
authors = [
|
||||||
description = "HTTP primitives for the Actix ecosystem"
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
|
]
|
||||||
|
description = "HTTP types and services for the Actix ecosystem"
|
||||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
"web-programming::http-server",
|
"web-programming::http-server",
|
||||||
"web-programming::websocket",
|
"web-programming::websocket",
|
||||||
]
|
]
|
||||||
license = "MIT OR Apache-2.0"
|
license.workspace = true
|
||||||
edition = "2018"
|
edition.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
# features that docs.rs will build with
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd"]
|
features = [
|
||||||
|
"http2",
|
||||||
|
"ws",
|
||||||
|
"openssl",
|
||||||
|
"rustls-0_20",
|
||||||
|
"rustls-0_21",
|
||||||
|
"rustls-0_22",
|
||||||
|
"rustls-0_23",
|
||||||
|
"compress-brotli",
|
||||||
|
"compress-gzip",
|
||||||
|
"compress-zstd",
|
||||||
|
]
|
||||||
|
|
||||||
[lib]
|
[package.metadata.cargo_check_external_types]
|
||||||
name = "actix_http"
|
allowed_external_types = [
|
||||||
path = "src/lib.rs"
|
"actix_codec::*",
|
||||||
|
"actix_service::*",
|
||||||
|
"actix_tls::*",
|
||||||
|
"actix_utils::*",
|
||||||
|
"bytes::*",
|
||||||
|
"bytestring::*",
|
||||||
|
"encoding_rs::*",
|
||||||
|
"futures_core::*",
|
||||||
|
"h2::*",
|
||||||
|
"http::*",
|
||||||
|
"httparse::*",
|
||||||
|
"language_tags::*",
|
||||||
|
"mime::*",
|
||||||
|
"openssl::*",
|
||||||
|
"rustls::*",
|
||||||
|
"tokio_util::*",
|
||||||
|
"tokio::*",
|
||||||
|
]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = []
|
default = []
|
||||||
|
|
||||||
# openssl
|
# HTTP/2 protocol support
|
||||||
openssl = ["actix-tls/accept", "actix-tls/openssl"]
|
http2 = ["dep:h2"]
|
||||||
|
|
||||||
# rustls support
|
# WebSocket protocol implementation
|
||||||
rustls = ["actix-tls/accept", "actix-tls/rustls"]
|
ws = [
|
||||||
|
"dep:local-channel",
|
||||||
|
"dep:base64",
|
||||||
|
"dep:rand",
|
||||||
|
"dep:sha1",
|
||||||
|
]
|
||||||
|
|
||||||
# enable compression support
|
# TLS via OpenSSL
|
||||||
compress-brotli = ["brotli2", "__compress"]
|
openssl = ["__tls", "actix-tls/accept", "actix-tls/openssl"]
|
||||||
compress-gzip = ["flate2", "__compress"]
|
|
||||||
compress-zstd = ["zstd", "__compress"]
|
|
||||||
|
|
||||||
# Internal (PRIVATE!) features used to aid testing and cheking feature status.
|
# TLS via Rustls v0.20
|
||||||
# Don't rely on these whatsoever. They may disappear at anytime.
|
rustls = ["__tls", "rustls-0_20"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.20
|
||||||
|
rustls-0_20 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_20"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.21
|
||||||
|
rustls-0_21 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_21"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.22
|
||||||
|
rustls-0_22 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_22"]
|
||||||
|
|
||||||
|
# TLS via Rustls v0.23
|
||||||
|
rustls-0_23 = ["__tls", "actix-tls/accept", "actix-tls/rustls-0_23"]
|
||||||
|
|
||||||
|
# Compression codecs
|
||||||
|
compress-brotli = ["__compress", "dep:brotli"]
|
||||||
|
compress-gzip = ["__compress", "dep:flate2"]
|
||||||
|
compress-zstd = ["__compress", "dep:zstd"]
|
||||||
|
|
||||||
|
# Internal (PRIVATE!) features used to aid testing and checking feature status.
|
||||||
|
# Don't rely on these whatsoever. They are semver-exempt and may disappear at anytime.
|
||||||
__compress = []
|
__compress = []
|
||||||
|
|
||||||
|
# Internal (PRIVATE!) features used to aid checking feature status.
|
||||||
|
# Don't rely on these whatsoever. They may disappear at anytime.
|
||||||
|
__tls = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-service = "2.0.0"
|
actix-service = "2"
|
||||||
actix-codec = "0.4.1"
|
actix-codec = "0.5"
|
||||||
actix-utils = "3.0.0"
|
actix-utils = "3"
|
||||||
actix-rt = { version = "2.2", default-features = false }
|
actix-rt = { version = "2.2", default-features = false }
|
||||||
|
|
||||||
ahash = "0.7"
|
bitflags = "2"
|
||||||
base64 = "0.13"
|
|
||||||
bitflags = "1.2"
|
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
bytestring = "1"
|
bytestring = "1"
|
||||||
derive_more = "0.99.5"
|
derive_more = { version = "2", features = ["as_ref", "deref", "deref_mut", "display", "error", "from"] }
|
||||||
encoding_rs = "0.8"
|
encoding_rs = "0.8"
|
||||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
foldhash = "0.1"
|
||||||
h2 = "0.3.9"
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
http = "0.2.5"
|
http = "0.2.7"
|
||||||
httparse = "1.5.1"
|
httparse = "1.5.1"
|
||||||
httpdate = "1.0.1"
|
httpdate = "1.0.1"
|
||||||
itoa = "1"
|
itoa = "1"
|
||||||
language-tags = "0.3"
|
language-tags = "0.3"
|
||||||
local-channel = "0.1"
|
mime = "0.3.4"
|
||||||
log = "0.4"
|
|
||||||
mime = "0.3"
|
|
||||||
percent-encoding = "2.1"
|
percent-encoding = "2.1"
|
||||||
pin-project-lite = "0.2"
|
pin-project-lite = "0.2"
|
||||||
rand = "0.8"
|
|
||||||
sha-1 = "0.10"
|
|
||||||
smallvec = "1.6.1"
|
smallvec = "1.6.1"
|
||||||
|
tokio = { version = "1.24.2", features = [] }
|
||||||
|
tokio-util = { version = "0.7", features = ["io", "codec"] }
|
||||||
|
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||||
|
|
||||||
# tls
|
# http2
|
||||||
actix-tls = { version = "3.0.0", default-features = false, optional = true }
|
h2 = { version = "0.3.26", optional = true }
|
||||||
|
|
||||||
# compression
|
# websockets
|
||||||
brotli2 = { version="0.3.2", optional = true }
|
local-channel = { version = "0.1", optional = true }
|
||||||
|
base64 = { version = "0.22", optional = true }
|
||||||
|
rand = { version = "0.9", optional = true }
|
||||||
|
sha1 = { version = "0.10", optional = true }
|
||||||
|
|
||||||
|
# openssl/rustls
|
||||||
|
actix-tls = { version = "3.4", default-features = false, optional = true }
|
||||||
|
|
||||||
|
# compress-*
|
||||||
|
brotli = { version = "7", optional = true }
|
||||||
flate2 = { version = "1.0.13", optional = true }
|
flate2 = { version = "1.0.13", optional = true }
|
||||||
zstd = { version = "0.9", optional = true }
|
zstd = { version = "0.13", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-http-test = { version = "3.0.0-beta.10", features = ["openssl"] }
|
actix-http-test = { version = "3", features = ["openssl"] }
|
||||||
actix-server = "2.0.0-rc.2"
|
actix-server = "2"
|
||||||
actix-tls = { version = "3.0.0", features = ["openssl"] }
|
actix-tls = { version = "3.4", features = ["openssl", "rustls-0_23-webpki-roots"] }
|
||||||
actix-web = "4.0.0-beta.16"
|
actix-web = "4"
|
||||||
|
|
||||||
async-stream = "0.3"
|
async-stream = "0.3"
|
||||||
criterion = { version = "0.3", features = ["html_reports"] }
|
criterion = { version = "0.5", features = ["html_reports"] }
|
||||||
env_logger = "0.9"
|
divan = "0.1.8"
|
||||||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
env_logger = "0.11"
|
||||||
rcgen = "0.8"
|
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
|
memchr = "2.4"
|
||||||
|
once_cell = "1.9"
|
||||||
|
rcgen = "0.13"
|
||||||
regex = "1.3"
|
regex = "1.3"
|
||||||
rustls-pemfile = "0.2"
|
rustversion = "1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
rustls-pemfile = "2"
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
static_assertions = "1"
|
static_assertions = "1"
|
||||||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
tls-openssl = { package = "openssl", version = "0.10.55" }
|
||||||
tls-rustls = { package = "rustls", version = "0.20.0" }
|
tls-rustls_023 = { package = "rustls", version = "0.23" }
|
||||||
tokio = { version = "1.8", features = ["net", "rt", "macros"] }
|
tokio = { version = "1.24.2", features = ["net", "rt", "macros"] }
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[[example]]
|
[[example]]
|
||||||
name = "ws"
|
name = "ws"
|
||||||
required-features = ["rustls"]
|
required-features = ["ws", "rustls-0_23"]
|
||||||
|
|
||||||
|
[[example]]
|
||||||
|
name = "tls_rustls"
|
||||||
|
required-features = ["http2", "rustls-0_23"]
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "write-camel-case"
|
name = "response-body-compression"
|
||||||
harness = false
|
harness = false
|
||||||
|
required-features = ["compress-brotli", "compress-gzip", "compress-zstd"]
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "status-line"
|
name = "date-formatting"
|
||||||
harness = false
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "uninit-headers"
|
|
||||||
harness = false
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "quality-value"
|
|
||||||
harness = false
|
harness = false
|
||||||
|
@ -1,22 +1,21 @@
|
|||||||
# actix-http
|
# `actix-http`
|
||||||
|
|
||||||
> HTTP primitives for the Actix ecosystem.
|
> HTTP types and services for the Actix ecosystem.
|
||||||
|
|
||||||
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
[](https://crates.io/crates/actix-http)
|
[](https://crates.io/crates/actix-http)
|
||||||
[](https://docs.rs/actix-http/3.0.0-beta.17)
|
[](https://docs.rs/actix-http/3.10.0)
|
||||||
[](https://blog.rust-lang.org/2021/05/06/Rust-1.52.0.html)
|

|
||||||

|

|
||||||
<br />
|
<br />
|
||||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.17)
|
[](https://deps.rs/crate/actix-http/3.10.0)
|
||||||
[](https://crates.io/crates/actix-http)
|
[](https://crates.io/crates/actix-http)
|
||||||
[](https://discord.gg/NWpN5mmg3x)
|
[](https://discord.gg/NWpN5mmg3x)
|
||||||
|
|
||||||
## Documentation & Resources
|
<!-- prettier-ignore-end -->
|
||||||
|
|
||||||
- [API Documentation](https://docs.rs/actix-http)
|
## Examples
|
||||||
- Minimum Supported Rust Version (MSRV): 1.52
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
use std::{env, io};
|
use std::{env, io};
|
||||||
@ -25,7 +24,7 @@ use actix_http::{HttpService, Response};
|
|||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use futures_util::future;
|
use futures_util::future;
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
use log::info;
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -49,18 +48,3 @@ async fn main() -> io::Result<()> {
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This project is licensed under either of
|
|
||||||
|
|
||||||
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0))
|
|
||||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or [http://opensource.org/licenses/MIT](http://opensource.org/licenses/MIT))
|
|
||||||
|
|
||||||
at your option.
|
|
||||||
|
|
||||||
## Code of Conduct
|
|
||||||
|
|
||||||
Contribution to the actix-http crate is organized under the terms of the
|
|
||||||
Contributor Covenant, the maintainer of actix-http, @fafhrd91, promises to
|
|
||||||
intervene to uphold that code of conduct.
|
|
||||||
|
20
actix-http/benches/date-formatting.rs
Normal file
20
actix-http/benches/date-formatting.rs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
use std::time::SystemTime;
|
||||||
|
|
||||||
|
use actix_http::header::HttpDate;
|
||||||
|
use divan::{black_box, AllocProfiler, Bencher};
|
||||||
|
|
||||||
|
#[global_allocator]
|
||||||
|
static ALLOC: AllocProfiler = AllocProfiler::system();
|
||||||
|
|
||||||
|
#[divan::bench]
|
||||||
|
fn date_formatting(b: Bencher<'_, '_>) {
|
||||||
|
let now = SystemTime::now();
|
||||||
|
|
||||||
|
b.bench(|| {
|
||||||
|
black_box(HttpDate::from(black_box(now)).to_string());
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
divan::main();
|
||||||
|
}
|
@ -1,90 +0,0 @@
|
|||||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
|
||||||
|
|
||||||
const CODES: &[u16] = &[0, 1000, 201, 800, 550];
|
|
||||||
|
|
||||||
fn bench_quality_display_impls(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("quality value display impls");
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (fast?)", i), i, |b, &i| {
|
|
||||||
b.iter(|| _new::Quality(i).to_string())
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| _naive::Quality(i).to_string())
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_quality_display_impls);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
pub struct Quality(pub(crate) u16);
|
|
||||||
|
|
||||||
impl fmt::Display for Quality {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self.0 {
|
|
||||||
0 => f.write_str("0"),
|
|
||||||
1000 => f.write_str("1"),
|
|
||||||
|
|
||||||
// some number in the range 1–999
|
|
||||||
x => {
|
|
||||||
f.write_str("0.")?;
|
|
||||||
|
|
||||||
// this implementation avoids string allocation otherwise required
|
|
||||||
// for `.trim_end_matches('0')`
|
|
||||||
|
|
||||||
if x < 10 {
|
|
||||||
f.write_str("00")?;
|
|
||||||
// 0 is handled so it's not possible to have a trailing 0, we can just return
|
|
||||||
itoa::fmt(f, x)
|
|
||||||
} else if x < 100 {
|
|
||||||
f.write_str("0")?;
|
|
||||||
if x % 10 == 0 {
|
|
||||||
// trailing 0, divide by 10 and write
|
|
||||||
itoa::fmt(f, x / 10)
|
|
||||||
} else {
|
|
||||||
itoa::fmt(f, x)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// x is in range 101–999
|
|
||||||
|
|
||||||
if x % 100 == 0 {
|
|
||||||
// two trailing 0s, divide by 100 and write
|
|
||||||
itoa::fmt(f, x / 100)
|
|
||||||
} else if x % 10 == 0 {
|
|
||||||
// one trailing 0, divide by 10 and write
|
|
||||||
itoa::fmt(f, x / 10)
|
|
||||||
} else {
|
|
||||||
itoa::fmt(f, x)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _naive {
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
pub struct Quality(pub(crate) u16);
|
|
||||||
|
|
||||||
impl fmt::Display for Quality {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self.0 {
|
|
||||||
0 => f.write_str("0"),
|
|
||||||
1000 => f.write_str("1"),
|
|
||||||
|
|
||||||
x => {
|
|
||||||
write!(f, "{}", format!("{:03}", x).trim_end_matches('0'))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
88
actix-http/benches/response-body-compression.rs
Normal file
88
actix-http/benches/response-body-compression.rs
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
use std::convert::Infallible;
|
||||||
|
|
||||||
|
use actix_http::{encoding::Encoder, ContentEncoding, Request, Response, StatusCode};
|
||||||
|
use actix_service::{fn_service, Service as _};
|
||||||
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||||
|
|
||||||
|
static BODY: &[u8] = include_bytes!("../Cargo.toml");
|
||||||
|
|
||||||
|
fn compression_responses(c: &mut Criterion) {
|
||||||
|
let mut group = c.benchmark_group("compression responses");
|
||||||
|
|
||||||
|
group.bench_function("identity", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Identity,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("gzip", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Gzip,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("br", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Brotli,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_function("zstd", |b| {
|
||||||
|
let rt = actix_rt::Runtime::new().unwrap();
|
||||||
|
|
||||||
|
let identity_svc = fn_service(|_: Request| async move {
|
||||||
|
let mut res = Response::with_body(StatusCode::OK, ());
|
||||||
|
let body = black_box(Encoder::response(
|
||||||
|
ContentEncoding::Zstd,
|
||||||
|
res.head_mut(),
|
||||||
|
BODY,
|
||||||
|
));
|
||||||
|
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
|
||||||
|
});
|
||||||
|
|
||||||
|
b.iter(|| {
|
||||||
|
rt.block_on(identity_svc.call(Request::new())).unwrap();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(benches, compression_responses);
|
||||||
|
criterion_main!(benches);
|
@ -1,214 +0,0 @@
|
|||||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
|
||||||
|
|
||||||
use bytes::BytesMut;
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
const CODES: &[u16] = &[201, 303, 404, 515];
|
|
||||||
|
|
||||||
fn bench_write_status_line_11(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_status_line v1.1");
|
|
||||||
|
|
||||||
let version = Version::HTTP_11;
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original (unsafe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_original::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (safe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_new::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_naive::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_write_status_line_10(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_status_line v1.0");
|
|
||||||
|
|
||||||
let version = Version::HTTP_10;
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original (unsafe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_original::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (safe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_new::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_naive::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_write_status_line_09(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_status_line v0.9");
|
|
||||||
|
|
||||||
let version = Version::HTTP_09;
|
|
||||||
|
|
||||||
for i in CODES.iter() {
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original (unsafe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_original::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New (safe)", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_new::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Naive", i), i, |b, &i| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut b = BytesMut::with_capacity(35);
|
|
||||||
_naive::write_status_line(version, i, &mut b);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(
|
|
||||||
benches,
|
|
||||||
bench_write_status_line_11,
|
|
||||||
bench_write_status_line_10,
|
|
||||||
bench_write_status_line_09
|
|
||||||
);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
mod _naive {
|
|
||||||
use bytes::{BufMut, BytesMut};
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
pub(crate) fn write_status_line(version: Version, n: u16, bytes: &mut BytesMut) {
|
|
||||||
match version {
|
|
||||||
Version::HTTP_11 => bytes.put_slice(b"HTTP/1.1 "),
|
|
||||||
Version::HTTP_10 => bytes.put_slice(b"HTTP/1.0 "),
|
|
||||||
Version::HTTP_09 => bytes.put_slice(b"HTTP/0.9 "),
|
|
||||||
_ => {
|
|
||||||
// other HTTP version handlers do not use this method
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bytes.put_slice(n.to_string().as_bytes());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
use bytes::{BufMut, BytesMut};
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
const DIGITS_START: u8 = b'0';
|
|
||||||
|
|
||||||
pub(crate) fn write_status_line(version: Version, n: u16, bytes: &mut BytesMut) {
|
|
||||||
match version {
|
|
||||||
Version::HTTP_11 => bytes.put_slice(b"HTTP/1.1 "),
|
|
||||||
Version::HTTP_10 => bytes.put_slice(b"HTTP/1.0 "),
|
|
||||||
Version::HTTP_09 => bytes.put_slice(b"HTTP/0.9 "),
|
|
||||||
_ => {
|
|
||||||
// other HTTP version handlers do not use this method
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let d100 = (n / 100) as u8;
|
|
||||||
let d10 = ((n / 10) % 10) as u8;
|
|
||||||
let d1 = (n % 10) as u8;
|
|
||||||
|
|
||||||
bytes.put_u8(DIGITS_START + d100);
|
|
||||||
bytes.put_u8(DIGITS_START + d10);
|
|
||||||
bytes.put_u8(DIGITS_START + d1);
|
|
||||||
|
|
||||||
bytes.put_u8(b' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _original {
|
|
||||||
use std::ptr;
|
|
||||||
|
|
||||||
use bytes::{BufMut, BytesMut};
|
|
||||||
use http::Version;
|
|
||||||
|
|
||||||
const DEC_DIGITS_LUT: &[u8] = b"0001020304050607080910111213141516171819\
|
|
||||||
2021222324252627282930313233343536373839\
|
|
||||||
4041424344454647484950515253545556575859\
|
|
||||||
6061626364656667686970717273747576777879\
|
|
||||||
8081828384858687888990919293949596979899";
|
|
||||||
|
|
||||||
pub(crate) const STATUS_LINE_BUF_SIZE: usize = 13;
|
|
||||||
|
|
||||||
pub(crate) fn write_status_line(version: Version, mut n: u16, bytes: &mut BytesMut) {
|
|
||||||
let mut buf: [u8; STATUS_LINE_BUF_SIZE] = *b"HTTP/1.1 ";
|
|
||||||
|
|
||||||
match version {
|
|
||||||
Version::HTTP_2 => buf[5] = b'2',
|
|
||||||
Version::HTTP_10 => buf[7] = b'0',
|
|
||||||
Version::HTTP_09 => {
|
|
||||||
buf[5] = b'0';
|
|
||||||
buf[7] = b'9';
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut curr: isize = 12;
|
|
||||||
let buf_ptr = buf.as_mut_ptr();
|
|
||||||
let lut_ptr = DEC_DIGITS_LUT.as_ptr();
|
|
||||||
let four = n > 999;
|
|
||||||
|
|
||||||
// decode 2 more chars, if > 2 chars
|
|
||||||
let d1 = (n % 100) << 1;
|
|
||||||
n /= 100;
|
|
||||||
curr -= 2;
|
|
||||||
unsafe {
|
|
||||||
ptr::copy_nonoverlapping(lut_ptr.offset(d1 as isize), buf_ptr.offset(curr), 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// decode last 1 or 2 chars
|
|
||||||
if n < 10 {
|
|
||||||
curr -= 1;
|
|
||||||
unsafe {
|
|
||||||
*buf_ptr.offset(curr) = (n as u8) + b'0';
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let d1 = n << 1;
|
|
||||||
curr -= 2;
|
|
||||||
unsafe {
|
|
||||||
ptr::copy_nonoverlapping(lut_ptr.offset(d1 as isize), buf_ptr.offset(curr), 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bytes.put_slice(&buf);
|
|
||||||
if four {
|
|
||||||
bytes.put_u8(b' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,134 +0,0 @@
|
|||||||
use criterion::{criterion_group, criterion_main, Criterion};
|
|
||||||
|
|
||||||
use bytes::BytesMut;
|
|
||||||
|
|
||||||
// A Miri run detects UB, seen on this playground:
|
|
||||||
// https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=f5d9aa166aa48df8dca05fce2b6c3915
|
|
||||||
|
|
||||||
fn bench_header_parsing(c: &mut Criterion) {
|
|
||||||
c.bench_function("Original (Unsound) [short]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ_SHORT);
|
|
||||||
_original::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
c.bench_function("New (safe) [short]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ_SHORT);
|
|
||||||
_new::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
c.bench_function("Original (Unsound) [realistic]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ);
|
|
||||||
_original::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
c.bench_function("New (safe) [realistic]", |b| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = BytesMut::from(REQ);
|
|
||||||
_new::parse_headers(&mut buf);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_header_parsing);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
const MAX_HEADERS: usize = 96;
|
|
||||||
|
|
||||||
const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
|
|
||||||
[httparse::EMPTY_HEADER; MAX_HEADERS];
|
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
struct HeaderIndex {
|
|
||||||
name: (usize, usize),
|
|
||||||
value: (usize, usize),
|
|
||||||
}
|
|
||||||
|
|
||||||
const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
|
|
||||||
name: (0, 0),
|
|
||||||
value: (0, 0),
|
|
||||||
};
|
|
||||||
|
|
||||||
const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] = [EMPTY_HEADER_INDEX; MAX_HEADERS];
|
|
||||||
|
|
||||||
impl HeaderIndex {
|
|
||||||
fn record(bytes: &[u8], headers: &[httparse::Header<'_>], indices: &mut [HeaderIndex]) {
|
|
||||||
let bytes_ptr = bytes.as_ptr() as usize;
|
|
||||||
for (header, indices) in headers.iter().zip(indices.iter_mut()) {
|
|
||||||
let name_start = header.name.as_ptr() as usize - bytes_ptr;
|
|
||||||
let name_end = name_start + header.name.len();
|
|
||||||
indices.name = (name_start, name_end);
|
|
||||||
let value_start = header.value.as_ptr() as usize - bytes_ptr;
|
|
||||||
let value_end = value_start + header.value.len();
|
|
||||||
indices.value = (value_start, value_end);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// test cases taken from:
|
|
||||||
// https://github.com/seanmonstar/httparse/blob/master/benches/parse.rs
|
|
||||||
|
|
||||||
const REQ_SHORT: &[u8] = b"\
|
|
||||||
GET / HTTP/1.0\r\n\
|
|
||||||
Host: example.com\r\n\
|
|
||||||
Cookie: session=60; user_id=1\r\n\r\n";
|
|
||||||
|
|
||||||
const REQ: &[u8] = b"\
|
|
||||||
GET /wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg HTTP/1.1\r\n\
|
|
||||||
Host: www.kittyhell.com\r\n\
|
|
||||||
User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\r\n\
|
|
||||||
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n\
|
|
||||||
Accept-Language: ja,en-us;q=0.7,en;q=0.3\r\n\
|
|
||||||
Accept-Encoding: gzip,deflate\r\n\
|
|
||||||
Accept-Charset: Shift_JIS,utf-8;q=0.7,*;q=0.7\r\n\
|
|
||||||
Keep-Alive: 115\r\n\
|
|
||||||
Connection: keep-alive\r\n\
|
|
||||||
Cookie: wp_ozh_wsa_visits=2; wp_ozh_wsa_visit_lasttime=xxxxxxxxxx; __utma=xxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.x; __utmz=xxxxxxxxx.xxxxxxxxxx.x.x.utmccn=(referral)|utmcsr=reader.livedoor.com|utmcct=/reader/|utmcmd=referral|padding=under256\r\n\r\n";
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
|
||||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
|
||||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
|
||||||
|
|
||||||
let mut req = httparse::Request::new(&mut parsed);
|
|
||||||
match req.parse(src).unwrap() {
|
|
||||||
httparse::Status::Complete(_len) => {
|
|
||||||
HeaderIndex::record(src, req.headers, &mut headers);
|
|
||||||
req.headers.len()
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _original {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
use std::mem::MaybeUninit;
|
|
||||||
|
|
||||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
|
||||||
#![allow(clippy::uninit_assumed_init)]
|
|
||||||
|
|
||||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
|
||||||
unsafe { MaybeUninit::uninit().assume_init() };
|
|
||||||
|
|
||||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] =
|
|
||||||
unsafe { MaybeUninit::uninit().assume_init() };
|
|
||||||
|
|
||||||
let mut req = httparse::Request::new(&mut parsed);
|
|
||||||
match req.parse(src).unwrap() {
|
|
||||||
httparse::Status::Complete(_len) => {
|
|
||||||
HeaderIndex::record(src, req.headers, &mut headers);
|
|
||||||
req.headers.len()
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,93 +0,0 @@
|
|||||||
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
|
|
||||||
|
|
||||||
fn bench_write_camel_case(c: &mut Criterion) {
|
|
||||||
let mut group = c.benchmark_group("write_camel_case");
|
|
||||||
|
|
||||||
let names = ["connection", "Transfer-Encoding", "transfer-encoding"];
|
|
||||||
|
|
||||||
for &i in &names {
|
|
||||||
let bts = i.as_bytes();
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("Original", i), bts, |b, bts| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = black_box([0; 24]);
|
|
||||||
_original::write_camel_case(black_box(bts), &mut buf)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| {
|
|
||||||
b.iter(|| {
|
|
||||||
let mut buf = black_box([0; 24]);
|
|
||||||
let len = black_box(bts.len());
|
|
||||||
_new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
group.finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_write_camel_case);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
mod _new {
|
|
||||||
pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
|
||||||
// first copy entire (potentially wrong) slice to output
|
|
||||||
let buffer = unsafe {
|
|
||||||
std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len);
|
|
||||||
std::slice::from_raw_parts_mut(buf, len)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut iter = value.iter();
|
|
||||||
|
|
||||||
// first character should be uppercase
|
|
||||||
if let Some(c @ b'a'..=b'z') = iter.next() {
|
|
||||||
buffer[0] = c & 0b1101_1111;
|
|
||||||
}
|
|
||||||
|
|
||||||
// track 1 ahead of the current position since that's the location being assigned to
|
|
||||||
let mut index = 2;
|
|
||||||
|
|
||||||
// remaining characters after hyphens should also be uppercase
|
|
||||||
while let Some(&c) = iter.next() {
|
|
||||||
if c == b'-' {
|
|
||||||
// advance iter by one and uppercase if needed
|
|
||||||
if let Some(c @ b'a'..=b'z') = iter.next() {
|
|
||||||
buffer[index] = c & 0b1101_1111;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
index += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mod _original {
|
|
||||||
pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
|
||||||
let mut index = 0;
|
|
||||||
let key = value;
|
|
||||||
let mut key_iter = key.iter();
|
|
||||||
|
|
||||||
if let Some(c) = key_iter.next() {
|
|
||||||
if *c >= b'a' && *c <= b'z' {
|
|
||||||
buffer[index] = *c ^ b' ';
|
|
||||||
index += 1;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
while let Some(c) = key_iter.next() {
|
|
||||||
buffer[index] = *c;
|
|
||||||
index += 1;
|
|
||||||
if *c == b'-' {
|
|
||||||
if let Some(c) = key_iter.next() {
|
|
||||||
if *c >= b'a' && *c <= b'z' {
|
|
||||||
buffer[index] = *c ^ b' ';
|
|
||||||
index += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,10 +1,10 @@
|
|||||||
use actix_http::HttpService;
|
use actix_http::HttpService;
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use actix_service::map_config;
|
use actix_service::map_config;
|
||||||
use actix_web::{dev::AppConfig, get, App};
|
use actix_web::{dev::AppConfig, get, App, Responder};
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
async fn index() -> &'static str {
|
async fn index() -> impl Responder {
|
||||||
"Hello, world. From Actix Web!"
|
"Hello, world. From Actix Web!"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -18,7 +18,8 @@ async fn main() -> std::io::Result<()> {
|
|||||||
HttpService::build()
|
HttpService::build()
|
||||||
// pass the app to service builder
|
// pass the app to service builder
|
||||||
// map_config is used to map App's configuration to ServiceBuilder
|
// map_config is used to map App's configuration to ServiceBuilder
|
||||||
.finish(map_config(app, |_| AppConfig::default()))
|
// h1 will configure server to only use HTTP/1.1
|
||||||
|
.h1(map_config(app, |_| AppConfig::default()))
|
||||||
.tcp()
|
.tcp()
|
||||||
})?
|
})?
|
||||||
.run()
|
.run()
|
||||||
|
27
actix-http/examples/bench.rs
Normal file
27
actix-http/examples/bench.rs
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
use std::{convert::Infallible, io, time::Duration};
|
||||||
|
|
||||||
|
use actix_http::{HttpService, Request, Response, StatusCode};
|
||||||
|
use actix_server::Server;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
static STR: Lazy<String> = Lazy::new(|| "HELLO WORLD ".repeat(20));
|
||||||
|
|
||||||
|
#[actix_rt::main]
|
||||||
|
async fn main() -> io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
Server::build()
|
||||||
|
.bind("dispatcher-benchmark", ("127.0.0.1", 8080), || {
|
||||||
|
HttpService::build()
|
||||||
|
.client_request_timeout(Duration::from_secs(1))
|
||||||
|
.finish(|_: Request| async move {
|
||||||
|
let mut res = Response::build(StatusCode::OK);
|
||||||
|
Ok::<_, Infallible>(res.body(&**STR))
|
||||||
|
})
|
||||||
|
.tcp()
|
||||||
|
})?
|
||||||
|
// limiting number of workers so that bench client is not sharing as many resources
|
||||||
|
.workers(4)
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
@ -1,10 +1,11 @@
|
|||||||
use std::io;
|
use std::{io, time::Duration};
|
||||||
|
|
||||||
use actix_http::{Error, HttpService, Request, Response, StatusCode};
|
use actix_http::{Error, HttpService, Request, Response, StatusCode};
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use bytes::BytesMut;
|
use bytes::BytesMut;
|
||||||
use futures_util::StreamExt as _;
|
use futures_util::StreamExt as _;
|
||||||
use http::header::HeaderValue;
|
use http::header::HeaderValue;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -13,23 +14,24 @@ async fn main() -> io::Result<()> {
|
|||||||
Server::build()
|
Server::build()
|
||||||
.bind("echo", ("127.0.0.1", 8080), || {
|
.bind("echo", ("127.0.0.1", 8080), || {
|
||||||
HttpService::build()
|
HttpService::build()
|
||||||
.client_timeout(1000)
|
.client_request_timeout(Duration::from_secs(1))
|
||||||
.client_disconnect(1000)
|
.client_disconnect_timeout(Duration::from_secs(1))
|
||||||
|
// handles HTTP/1.1 and HTTP/2
|
||||||
.finish(|mut req: Request| async move {
|
.finish(|mut req: Request| async move {
|
||||||
let mut body = BytesMut::new();
|
let mut body = BytesMut::new();
|
||||||
while let Some(item) = req.payload().next().await {
|
while let Some(item) = req.payload().next().await {
|
||||||
body.extend_from_slice(&item?);
|
body.extend_from_slice(&item?);
|
||||||
}
|
}
|
||||||
|
|
||||||
log::info!("request body: {:?}", body);
|
info!("request body: {body:?}");
|
||||||
|
|
||||||
Ok::<_, Error>(
|
let res = Response::build(StatusCode::OK)
|
||||||
Response::build(StatusCode::OK)
|
|
||||||
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
||||||
.body(body),
|
.body(body);
|
||||||
)
|
|
||||||
|
Ok::<_, Error>(res)
|
||||||
})
|
})
|
||||||
.tcp()
|
.tcp() // No TLS
|
||||||
})?
|
})?
|
||||||
.run()
|
.run()
|
||||||
.await
|
.await
|
||||||
|
@ -1,32 +1,34 @@
|
|||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use actix_http::{
|
use actix_http::{
|
||||||
body::MessageBody, header::HeaderValue, Error, HttpService, Request, Response, StatusCode,
|
body::{BodyStream, MessageBody},
|
||||||
|
header, Error, HttpMessage, HttpService, Request, Response, StatusCode,
|
||||||
};
|
};
|
||||||
use actix_server::Server;
|
|
||||||
use bytes::BytesMut;
|
|
||||||
use futures_util::StreamExt as _;
|
|
||||||
|
|
||||||
async fn handle_request(mut req: Request) -> Result<Response<impl MessageBody>, Error> {
|
async fn handle_request(mut req: Request) -> Result<Response<impl MessageBody>, Error> {
|
||||||
let mut body = BytesMut::new();
|
let mut res = Response::build(StatusCode::OK);
|
||||||
while let Some(item) = req.payload().next().await {
|
|
||||||
body.extend_from_slice(&item?)
|
if let Some(ct) = req.headers().get(header::CONTENT_TYPE) {
|
||||||
|
res.insert_header((header::CONTENT_TYPE, ct));
|
||||||
}
|
}
|
||||||
|
|
||||||
log::info!("request body: {:?}", body);
|
// echo request payload stream as (chunked) response body
|
||||||
|
let res = res.message_body(BodyStream::new(req.payload().take()))?;
|
||||||
|
|
||||||
Ok(Response::build(StatusCode::OK)
|
Ok(res)
|
||||||
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
|
||||||
.body(body))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
Server::build()
|
actix_server::Server::build()
|
||||||
.bind("echo", ("127.0.0.1", 8080), || {
|
.bind("echo", ("127.0.0.1", 8080), || {
|
||||||
HttpService::build().finish(handle_request).tcp()
|
HttpService::build()
|
||||||
|
// handles HTTP/1.1 only
|
||||||
|
.h1(handle_request)
|
||||||
|
// No TLS
|
||||||
|
.tcp()
|
||||||
})?
|
})?
|
||||||
.run()
|
.run()
|
||||||
.await
|
.await
|
||||||
|
34
actix-http/examples/h2c-detect.rs
Normal file
34
actix-http/examples/h2c-detect.rs
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
//! An example that supports automatic selection of plaintext h1/h2c connections.
|
||||||
|
//!
|
||||||
|
//! Notably, both the following commands will work.
|
||||||
|
//! ```console
|
||||||
|
//! $ curl --http1.1 'http://localhost:8080/'
|
||||||
|
//! $ curl --http2-prior-knowledge 'http://localhost:8080/'
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
use std::{convert::Infallible, io};
|
||||||
|
|
||||||
|
use actix_http::{body::BodyStream, HttpService, Request, Response, StatusCode};
|
||||||
|
use actix_server::Server;
|
||||||
|
|
||||||
|
#[tokio::main(flavor = "current_thread")]
|
||||||
|
async fn main() -> io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
Server::build()
|
||||||
|
.bind("h2c-detect", ("127.0.0.1", 8080), || {
|
||||||
|
HttpService::build()
|
||||||
|
.finish(|_req: Request| async move {
|
||||||
|
Ok::<_, Infallible>(Response::build(StatusCode::OK).body(BodyStream::new(
|
||||||
|
futures_util::stream::iter([
|
||||||
|
Ok::<_, String>("123".into()),
|
||||||
|
Err("wertyuikmnbvcxdfty6t".to_owned()),
|
||||||
|
]),
|
||||||
|
)))
|
||||||
|
})
|
||||||
|
.tcp_auto_h2c()
|
||||||
|
})?
|
||||||
|
.workers(2)
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
25
actix-http/examples/h2spec.rs
Normal file
25
actix-http/examples/h2spec.rs
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
use std::{convert::Infallible, io};
|
||||||
|
|
||||||
|
use actix_http::{HttpService, Request, Response, StatusCode};
|
||||||
|
use actix_server::Server;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
static STR: Lazy<String> = Lazy::new(|| "HELLO WORLD ".repeat(100));
|
||||||
|
|
||||||
|
#[actix_rt::main]
|
||||||
|
async fn main() -> io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
Server::build()
|
||||||
|
.bind("h2spec", ("127.0.0.1", 8080), || {
|
||||||
|
HttpService::build()
|
||||||
|
.h2(|_: Request| async move {
|
||||||
|
let mut res = Response::build(StatusCode::OK);
|
||||||
|
Ok::<_, Infallible>(res.body(&**STR))
|
||||||
|
})
|
||||||
|
.tcp()
|
||||||
|
})?
|
||||||
|
.workers(4)
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
@ -1,9 +1,8 @@
|
|||||||
use std::{convert::Infallible, io};
|
use std::{convert::Infallible, io, time::Duration};
|
||||||
|
|
||||||
use actix_http::{
|
use actix_http::{header::HeaderValue, HttpService, Request, Response, StatusCode};
|
||||||
header::HeaderValue, HttpMessage, HttpService, Request, Response, StatusCode,
|
|
||||||
};
|
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -12,22 +11,19 @@ async fn main() -> io::Result<()> {
|
|||||||
Server::build()
|
Server::build()
|
||||||
.bind("hello-world", ("127.0.0.1", 8080), || {
|
.bind("hello-world", ("127.0.0.1", 8080), || {
|
||||||
HttpService::build()
|
HttpService::build()
|
||||||
.client_timeout(1000)
|
.client_request_timeout(Duration::from_secs(1))
|
||||||
.client_disconnect(1000)
|
.client_disconnect_timeout(Duration::from_secs(1))
|
||||||
.on_connect_ext(|_, ext| {
|
.on_connect_ext(|_, ext| {
|
||||||
ext.insert(42u32);
|
ext.insert(42u32);
|
||||||
})
|
})
|
||||||
.finish(|req: Request| async move {
|
.finish(|req: Request| async move {
|
||||||
log::info!("{:?}", req);
|
info!("{req:?}");
|
||||||
|
|
||||||
let mut res = Response::build(StatusCode::OK);
|
let mut res = Response::build(StatusCode::OK);
|
||||||
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
||||||
|
|
||||||
let forty_two = req.extensions().get::<u32>().unwrap().to_string();
|
let forty_two = req.conn_data::<u32>().unwrap().to_string();
|
||||||
res.insert_header((
|
res.insert_header(("x-forty-two", HeaderValue::from_str(&forty_two).unwrap()));
|
||||||
"x-forty-two",
|
|
||||||
HeaderValue::from_str(&forty_two).unwrap(),
|
|
||||||
));
|
|
||||||
|
|
||||||
Ok::<_, Infallible>(res.body("Hello world!"))
|
Ok::<_, Infallible>(res.body("Hello world!"))
|
||||||
})
|
})
|
||||||
|
@ -12,6 +12,7 @@ use actix_http::{body::BodyStream, HttpService, Response};
|
|||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use async_stream::stream;
|
use async_stream::stream;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -21,16 +22,16 @@ async fn main() -> io::Result<()> {
|
|||||||
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
||||||
HttpService::build()
|
HttpService::build()
|
||||||
.finish(|req| async move {
|
.finish(|req| async move {
|
||||||
log::info!("{:?}", req);
|
info!("{req:?}");
|
||||||
let res = Response::ok();
|
let res = Response::ok();
|
||||||
|
|
||||||
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
||||||
yield Ok(Bytes::from("123"));
|
yield Ok(Bytes::from("123"));
|
||||||
yield Ok(Bytes::from("456"));
|
yield Ok(Bytes::from("456"));
|
||||||
|
|
||||||
actix_rt::time::sleep(Duration::from_millis(1000)).await;
|
actix_rt::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
|
||||||
yield Err(io::Error::new(io::ErrorKind::Other, ""));
|
yield Err(io::Error::new(io::ErrorKind::Other, "abc"));
|
||||||
})))
|
})))
|
||||||
})
|
})
|
||||||
.tcp()
|
.tcp()
|
||||||
|
76
actix-http/examples/tls_rustls.rs
Normal file
76
actix-http/examples/tls_rustls.rs
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
//! Demonstrates TLS configuration (via Rustls) for HTTP/1.1 and HTTP/2 connections.
|
||||||
|
//!
|
||||||
|
//! Test using cURL:
|
||||||
|
//!
|
||||||
|
//! ```console
|
||||||
|
//! $ curl --insecure https://127.0.0.1:8443
|
||||||
|
//! Hello World!
|
||||||
|
//! Protocol: HTTP/2.0
|
||||||
|
//!
|
||||||
|
//! $ curl --insecure --http1.1 https://127.0.0.1:8443
|
||||||
|
//! Hello World!
|
||||||
|
//! Protocol: HTTP/1.1
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
extern crate tls_rustls_023 as rustls;
|
||||||
|
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_http::{Error, HttpService, Request, Response};
|
||||||
|
use actix_utils::future::ok;
|
||||||
|
|
||||||
|
#[actix_rt::main]
|
||||||
|
async fn main() -> io::Result<()> {
|
||||||
|
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||||
|
|
||||||
|
tracing::info!("starting HTTP server at https://127.0.0.1:8443");
|
||||||
|
|
||||||
|
actix_server::Server::build()
|
||||||
|
.bind("echo", ("127.0.0.1", 8443), || {
|
||||||
|
HttpService::build()
|
||||||
|
.finish(|req: Request| {
|
||||||
|
let body = format!(
|
||||||
|
"Hello World!\n\
|
||||||
|
Protocol: {:?}",
|
||||||
|
req.head().version
|
||||||
|
);
|
||||||
|
ok::<_, Error>(Response::ok().set_body(body))
|
||||||
|
})
|
||||||
|
.rustls_0_23(rustls_config())
|
||||||
|
})?
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rustls_config() -> rustls::ServerConfig {
|
||||||
|
let rcgen::CertifiedKey { cert, key_pair } =
|
||||||
|
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
|
||||||
|
let cert_file = cert.pem();
|
||||||
|
let key_file = key_pair.serialize_pem();
|
||||||
|
|
||||||
|
let cert_file = &mut io::BufReader::new(cert_file.as_bytes());
|
||||||
|
let key_file = &mut io::BufReader::new(key_file.as_bytes());
|
||||||
|
|
||||||
|
let cert_chain = rustls_pemfile::certs(cert_file)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.unwrap();
|
||||||
|
let mut keys = rustls_pemfile::pkcs8_private_keys(key_file)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut config = rustls::ServerConfig::builder()
|
||||||
|
.with_no_client_auth()
|
||||||
|
.with_single_cert(
|
||||||
|
cert_chain,
|
||||||
|
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
const H1_ALPN: &[u8] = b"http/1.1";
|
||||||
|
const H2_ALPN: &[u8] = b"h2";
|
||||||
|
|
||||||
|
config.alpn_protocols.push(H2_ALPN.to_vec());
|
||||||
|
config.alpn_protocols.push(H1_ALPN.to_vec());
|
||||||
|
|
||||||
|
config
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
//! Sets up a WebSocket server over TCP and TLS.
|
//! Sets up a WebSocket server over TCP and TLS.
|
||||||
//! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames.
|
//! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames.
|
||||||
|
|
||||||
extern crate tls_rustls as rustls;
|
extern crate tls_rustls_023 as rustls;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
io,
|
io,
|
||||||
@ -10,13 +10,13 @@ use std::{
|
|||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
use actix_codec::Encoder;
|
|
||||||
use actix_http::{body::BodyStream, error::Error, ws, HttpService, Request, Response};
|
use actix_http::{body::BodyStream, error::Error, ws, HttpService, Request, Response};
|
||||||
use actix_rt::time::{interval, Interval};
|
use actix_rt::time::{interval, Interval};
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use bytestring::ByteString;
|
use bytestring::ByteString;
|
||||||
use futures_core::{ready, Stream};
|
use futures_core::{ready, Stream};
|
||||||
|
use tokio_util::codec::Encoder;
|
||||||
|
|
||||||
#[actix_rt::main]
|
#[actix_rt::main]
|
||||||
async fn main() -> io::Result<()> {
|
async fn main() -> io::Result<()> {
|
||||||
@ -27,20 +27,22 @@ async fn main() -> io::Result<()> {
|
|||||||
HttpService::build().h1(handler).tcp()
|
HttpService::build().h1(handler).tcp()
|
||||||
})?
|
})?
|
||||||
.bind("tls", ("127.0.0.1", 8443), || {
|
.bind("tls", ("127.0.0.1", 8443), || {
|
||||||
HttpService::build().finish(handler).rustls(tls_config())
|
HttpService::build()
|
||||||
|
.finish(handler)
|
||||||
|
.rustls_0_23(tls_config())
|
||||||
})?
|
})?
|
||||||
.run()
|
.run()
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
||||||
log::info!("handshaking");
|
tracing::info!("handshaking");
|
||||||
let mut res = ws::handshake(req.head())?;
|
let mut res = ws::handshake(req.head())?;
|
||||||
|
|
||||||
// handshake will always fail under HTTP/2
|
// handshake will always fail under HTTP/2
|
||||||
|
|
||||||
log::info!("responding");
|
tracing::info!("responding");
|
||||||
Ok(res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))?)
|
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Heartbeat {
|
struct Heartbeat {
|
||||||
@ -61,7 +63,7 @@ impl Stream for Heartbeat {
|
|||||||
type Item = Result<Bytes, Error>;
|
type Item = Result<Bytes, Error>;
|
||||||
|
|
||||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
log::trace!("poll");
|
tracing::trace!("poll");
|
||||||
|
|
||||||
ready!(self.as_mut().interval.poll_tick(cx));
|
ready!(self.as_mut().interval.poll_tick(cx));
|
||||||
|
|
||||||
@ -82,27 +84,27 @@ impl Stream for Heartbeat {
|
|||||||
fn tls_config() -> rustls::ServerConfig {
|
fn tls_config() -> rustls::ServerConfig {
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
|
|
||||||
use rustls::{Certificate, PrivateKey};
|
|
||||||
use rustls_pemfile::{certs, pkcs8_private_keys};
|
use rustls_pemfile::{certs, pkcs8_private_keys};
|
||||||
|
|
||||||
let cert = rcgen::generate_simple_self_signed(vec!["localhost".to_owned()]).unwrap();
|
let rcgen::CertifiedKey { cert, key_pair } =
|
||||||
let cert_file = cert.serialize_pem().unwrap();
|
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
|
||||||
let key_file = cert.serialize_private_key_pem();
|
let cert_file = cert.pem();
|
||||||
|
let key_file = key_pair.serialize_pem();
|
||||||
|
|
||||||
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
let cert_file = &mut BufReader::new(cert_file.as_bytes());
|
||||||
let key_file = &mut BufReader::new(key_file.as_bytes());
|
let key_file = &mut BufReader::new(key_file.as_bytes());
|
||||||
|
|
||||||
let cert_chain = certs(cert_file)
|
let cert_chain = certs(cert_file).collect::<Result<Vec<_>, _>>().unwrap();
|
||||||
.unwrap()
|
let mut keys = pkcs8_private_keys(key_file)
|
||||||
.into_iter()
|
.collect::<Result<Vec<_>, _>>()
|
||||||
.map(Certificate)
|
.unwrap();
|
||||||
.collect();
|
|
||||||
let mut keys = pkcs8_private_keys(key_file).unwrap();
|
|
||||||
|
|
||||||
let mut config = rustls::ServerConfig::builder()
|
let mut config = rustls::ServerConfig::builder()
|
||||||
.with_safe_defaults()
|
|
||||||
.with_no_client_auth()
|
.with_no_client_auth()
|
||||||
.with_single_cert(cert_chain, PrivateKey(keys.remove(0)))
|
.with_single_cert(
|
||||||
|
cert_chain,
|
||||||
|
rustls::pki_types::PrivateKeyDer::Pkcs8(keys.remove(0)),
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
config.alpn_protocols.push(b"http/1.1".to_vec());
|
config.alpn_protocols.push(b"http/1.1".to_vec());
|
||||||
|
@ -47,9 +47,8 @@ where
|
|||||||
|
|
||||||
/// Attempts to pull out the next value of the underlying [`Stream`].
|
/// Attempts to pull out the next value of the underlying [`Stream`].
|
||||||
///
|
///
|
||||||
/// Empty values are skipped to prevent [`BodyStream`]'s transmission being
|
/// Empty values are skipped to prevent [`BodyStream`]'s transmission being ended on a
|
||||||
/// ended on a zero-length chunk, but rather proceed until the underlying
|
/// zero-length chunk, but rather proceed until the underlying [`Stream`] ends.
|
||||||
/// [`Stream`] ends.
|
|
||||||
fn poll_next(
|
fn poll_next(
|
||||||
mut self: Pin<&mut Self>,
|
mut self: Pin<&mut Self>,
|
||||||
cx: &mut Context<'_>,
|
cx: &mut Context<'_>,
|
||||||
@ -80,7 +79,7 @@ mod tests {
|
|||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
use futures_util::{stream, FutureExt as _};
|
use futures_util::{stream, FutureExt as _};
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::body::to_bytes;
|
use crate::body::to_bytes;
|
||||||
@ -91,10 +90,10 @@ mod tests {
|
|||||||
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
assert_impl_all!(BodyStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
||||||
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
assert_impl_all!(BodyStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
||||||
|
|
||||||
assert_not_impl_all!(BodyStream<stream::Empty<Bytes>>: MessageBody);
|
assert_not_impl_any!(BodyStream<stream::Empty<Bytes>>: MessageBody);
|
||||||
assert_not_impl_all!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
|
assert_not_impl_any!(BodyStream<stream::Repeat<Bytes>>: MessageBody);
|
||||||
// crate::Error is not Clone
|
// crate::Error is not Clone
|
||||||
assert_not_impl_all!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
assert_not_impl_any!(BodyStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn skips_empty_chunks() {
|
async fn skips_empty_chunks() {
|
||||||
@ -132,7 +131,7 @@ mod tests {
|
|||||||
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
|
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
|
||||||
}
|
}
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "stream error")]
|
#[display("stream error")]
|
||||||
struct StreamErr;
|
struct StreamErr;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
@ -31,7 +31,7 @@ impl fmt::Debug for BoxBodyInner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl BoxBody {
|
impl BoxBody {
|
||||||
/// Same as `MessageBody::boxed`.
|
/// Boxes body type, erasing type information.
|
||||||
///
|
///
|
||||||
/// If the body type to wrap is unknown or generic it is better to use [`MessageBody::boxed`] to
|
/// If the body type to wrap is unknown or generic it is better to use [`MessageBody::boxed`] to
|
||||||
/// avoid double boxing.
|
/// avoid double boxing.
|
||||||
@ -77,12 +77,8 @@ impl MessageBody for BoxBody {
|
|||||||
cx: &mut Context<'_>,
|
cx: &mut Context<'_>,
|
||||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||||
match &mut self.0 {
|
match &mut self.0 {
|
||||||
BoxBodyInner::None(body) => {
|
BoxBodyInner::None(body) => Pin::new(body).poll_next(cx).map_err(|err| match err {}),
|
||||||
Pin::new(body).poll_next(cx).map_err(|err| match err {})
|
BoxBodyInner::Bytes(body) => Pin::new(body).poll_next(cx).map_err(|err| match err {}),
|
||||||
}
|
|
||||||
BoxBodyInner::Bytes(body) => {
|
|
||||||
Pin::new(body).poll_next(cx).map_err(|err| match err {})
|
|
||||||
}
|
|
||||||
BoxBodyInner::Stream(body) => Pin::new(body).poll_next(cx),
|
BoxBodyInner::Stream(body) => Pin::new(body).poll_next(cx),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -104,15 +100,13 @@ impl MessageBody for BoxBody {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::body::to_bytes;
|
use crate::body::to_bytes;
|
||||||
|
|
||||||
assert_impl_all!(BoxBody: MessageBody, fmt::Debug, Unpin);
|
assert_impl_all!(BoxBody: fmt::Debug, MessageBody, Unpin);
|
||||||
|
assert_not_impl_any!(BoxBody: Send, Sync);
|
||||||
assert_not_impl_all!(BoxBody: Send, Sync, Unpin);
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn nested_boxed_body() {
|
async fn nested_boxed_body() {
|
||||||
|
@ -10,6 +10,17 @@ use super::{BodySize, BoxBody, MessageBody};
|
|||||||
use crate::Error;
|
use crate::Error;
|
||||||
|
|
||||||
pin_project! {
|
pin_project! {
|
||||||
|
/// An "either" type specialized for body types.
|
||||||
|
///
|
||||||
|
/// It is common, in middleware especially, to conditionally return an inner service's unknown/
|
||||||
|
/// generic body `B` type or return early with a new response. This type's "right" variant
|
||||||
|
/// defaults to `BoxBody` since error responses are the common case.
|
||||||
|
///
|
||||||
|
/// For example, middleware will often have `type Response = ServiceResponse<EitherBody<B>>`.
|
||||||
|
/// This means that the inner service's response body type maps to the `Left` variant and the
|
||||||
|
/// middleware's own error responses use the default `Right` variant of `BoxBody`. Of course,
|
||||||
|
/// there's no reason it couldn't use `EitherBody<B, String>` instead if its alternative
|
||||||
|
/// responses have a known type.
|
||||||
#[project = EitherBodyProj]
|
#[project = EitherBodyProj]
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum EitherBody<L, R = BoxBody> {
|
pub enum EitherBody<L, R = BoxBody> {
|
||||||
@ -22,7 +33,10 @@ pin_project! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<L> EitherBody<L, BoxBody> {
|
impl<L> EitherBody<L, BoxBody> {
|
||||||
/// Creates new `EitherBody` using left variant and boxed right variant.
|
/// Creates new `EitherBody` left variant with a boxed right variant.
|
||||||
|
///
|
||||||
|
/// If the expected `R` type will be inferred and is not `BoxBody` then use the
|
||||||
|
/// [`left`](Self::left) constructor instead.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn new(body: L) -> Self {
|
pub fn new(body: L) -> Self {
|
||||||
Self::Left { body }
|
Self::Left { body }
|
||||||
|
@ -14,8 +14,44 @@ use pin_project_lite::pin_project;
|
|||||||
|
|
||||||
use super::{BodySize, BoxBody};
|
use super::{BodySize, BoxBody};
|
||||||
|
|
||||||
/// An interface types that can converted to bytes and used as response bodies.
|
/// An interface for types that can be used as a response body.
|
||||||
// TODO: examples
|
///
|
||||||
|
/// It is not usually necessary to create custom body types, this trait is already [implemented for
|
||||||
|
/// a large number of sensible body types](#foreign-impls) including:
|
||||||
|
/// - Empty body: `()`
|
||||||
|
/// - Text-based: `String`, `&'static str`, [`ByteString`](https://docs.rs/bytestring/1).
|
||||||
|
/// - Byte-based: `Bytes`, `BytesMut`, `Vec<u8>`, `&'static [u8]`;
|
||||||
|
/// - Streams: [`BodyStream`](super::BodyStream), [`SizedStream`](super::SizedStream)
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
/// ```
|
||||||
|
/// # use std::convert::Infallible;
|
||||||
|
/// # use std::task::{Poll, Context};
|
||||||
|
/// # use std::pin::Pin;
|
||||||
|
/// # use bytes::Bytes;
|
||||||
|
/// # use actix_http::body::{BodySize, MessageBody};
|
||||||
|
/// struct Repeat {
|
||||||
|
/// chunk: String,
|
||||||
|
/// n_times: usize,
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// impl MessageBody for Repeat {
|
||||||
|
/// type Error = Infallible;
|
||||||
|
///
|
||||||
|
/// fn size(&self) -> BodySize {
|
||||||
|
/// BodySize::Sized((self.chunk.len() * self.n_times) as u64)
|
||||||
|
/// }
|
||||||
|
///
|
||||||
|
/// fn poll_next(
|
||||||
|
/// self: Pin<&mut Self>,
|
||||||
|
/// _cx: &mut Context<'_>,
|
||||||
|
/// ) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||||
|
/// let payload_string = self.chunk.repeat(self.n_times);
|
||||||
|
/// let payload_bytes = Bytes::from(payload_string);
|
||||||
|
/// Poll::Ready(Some(Ok(payload_bytes)))
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
pub trait MessageBody {
|
pub trait MessageBody {
|
||||||
/// The type of error that will be returned if streaming body fails.
|
/// The type of error that will be returned if streaming body fails.
|
||||||
///
|
///
|
||||||
@ -29,7 +65,22 @@ pub trait MessageBody {
|
|||||||
fn size(&self) -> BodySize;
|
fn size(&self) -> BodySize;
|
||||||
|
|
||||||
/// Attempt to pull out the next chunk of body bytes.
|
/// Attempt to pull out the next chunk of body bytes.
|
||||||
// TODO: expand documentation
|
///
|
||||||
|
/// # Return Value
|
||||||
|
/// Similar to the `Stream` interface, there are several possible return values, each indicating
|
||||||
|
/// a distinct state:
|
||||||
|
/// - `Poll::Pending` means that this body's next chunk is not ready yet. Implementations must
|
||||||
|
/// ensure that the current task will be notified when the next chunk may be ready.
|
||||||
|
/// - `Poll::Ready(Some(val))` means that the body has successfully produced a chunk, `val`,
|
||||||
|
/// and may produce further values on subsequent `poll_next` calls.
|
||||||
|
/// - `Poll::Ready(None)` means that the body is complete, and `poll_next` should not be
|
||||||
|
/// invoked again.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
/// Once a body is complete (i.e., `poll_next` returned `Ready(None)`), calling its `poll_next`
|
||||||
|
/// method again may panic, block forever, or cause other kinds of problems; this trait places
|
||||||
|
/// no requirements on the effects of such a call. However, as the `poll_next` method is not
|
||||||
|
/// marked unsafe, Rust’s usual rules apply: calls must never cause UB, regardless of its state.
|
||||||
fn poll_next(
|
fn poll_next(
|
||||||
self: Pin<&mut Self>,
|
self: Pin<&mut Self>,
|
||||||
cx: &mut Context<'_>,
|
cx: &mut Context<'_>,
|
||||||
@ -37,7 +88,7 @@ pub trait MessageBody {
|
|||||||
|
|
||||||
/// Try to convert into the complete chunk of body bytes.
|
/// Try to convert into the complete chunk of body bytes.
|
||||||
///
|
///
|
||||||
/// Implement this method if the entire body can be trivially extracted. This is useful for
|
/// Override this method if the complete body can be trivially extracted. This is useful for
|
||||||
/// optimizations where `poll_next` calls can be avoided.
|
/// optimizations where `poll_next` calls can be avoided.
|
||||||
///
|
///
|
||||||
/// Body types with [`BodySize::None`] are allowed to return empty `Bytes`. Although, if calling
|
/// Body types with [`BodySize::None`] are allowed to return empty `Bytes`. Although, if calling
|
||||||
@ -54,7 +105,11 @@ pub trait MessageBody {
|
|||||||
Err(self)
|
Err(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts this body into `BoxBody`.
|
/// Wraps this body into a `BoxBody`.
|
||||||
|
///
|
||||||
|
/// No-op when called on a `BoxBody`, meaning there is no risk of double boxing when calling
|
||||||
|
/// this on a generic `MessageBody`. Prefer this over [`BoxBody::new`] when a boxed body
|
||||||
|
/// is required.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn boxed(self) -> BoxBody
|
fn boxed(self) -> BoxBody
|
||||||
where
|
where
|
||||||
@ -65,8 +120,28 @@ pub trait MessageBody {
|
|||||||
}
|
}
|
||||||
|
|
||||||
mod foreign_impls {
|
mod foreign_impls {
|
||||||
|
use std::{borrow::Cow, ops::DerefMut};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
impl<B> MessageBody for &mut B
|
||||||
|
where
|
||||||
|
B: MessageBody + Unpin + ?Sized,
|
||||||
|
{
|
||||||
|
type Error = B::Error;
|
||||||
|
|
||||||
|
fn size(&self) -> BodySize {
|
||||||
|
(**self).size()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn poll_next(
|
||||||
|
mut self: Pin<&mut Self>,
|
||||||
|
cx: &mut Context<'_>,
|
||||||
|
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||||
|
Pin::new(&mut **self).poll_next(cx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl MessageBody for Infallible {
|
impl MessageBody for Infallible {
|
||||||
type Error = Infallible;
|
type Error = Infallible;
|
||||||
|
|
||||||
@ -124,8 +199,9 @@ mod foreign_impls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B> MessageBody for Pin<Box<B>>
|
impl<T, B> MessageBody for Pin<T>
|
||||||
where
|
where
|
||||||
|
T: DerefMut<Target = B> + Unpin,
|
||||||
B: MessageBody + ?Sized,
|
B: MessageBody + ?Sized,
|
||||||
{
|
{
|
||||||
type Error = B::Error;
|
type Error = B::Error;
|
||||||
@ -248,6 +324,39 @@ mod foreign_impls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl MessageBody for Cow<'static, [u8]> {
|
||||||
|
type Error = Infallible;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size(&self) -> BodySize {
|
||||||
|
BodySize::Sized(self.len() as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn poll_next(
|
||||||
|
self: Pin<&mut Self>,
|
||||||
|
_cx: &mut Context<'_>,
|
||||||
|
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||||
|
if self.is_empty() {
|
||||||
|
Poll::Ready(None)
|
||||||
|
} else {
|
||||||
|
let bytes = match mem::take(self.get_mut()) {
|
||||||
|
Cow::Borrowed(b) => Bytes::from_static(b),
|
||||||
|
Cow::Owned(b) => Bytes::from(b),
|
||||||
|
};
|
||||||
|
Poll::Ready(Some(Ok(bytes)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn try_into_bytes(self) -> Result<Bytes, Self> {
|
||||||
|
match self {
|
||||||
|
Cow::Borrowed(b) => Ok(Bytes::from_static(b)),
|
||||||
|
Cow::Owned(b) => Ok(Bytes::from(b)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl MessageBody for &'static str {
|
impl MessageBody for &'static str {
|
||||||
type Error = Infallible;
|
type Error = Infallible;
|
||||||
|
|
||||||
@ -303,6 +412,39 @@ mod foreign_impls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl MessageBody for Cow<'static, str> {
|
||||||
|
type Error = Infallible;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size(&self) -> BodySize {
|
||||||
|
BodySize::Sized(self.len() as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn poll_next(
|
||||||
|
self: Pin<&mut Self>,
|
||||||
|
_cx: &mut Context<'_>,
|
||||||
|
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||||
|
if self.is_empty() {
|
||||||
|
Poll::Ready(None)
|
||||||
|
} else {
|
||||||
|
let bytes = match mem::take(self.get_mut()) {
|
||||||
|
Cow::Borrowed(s) => Bytes::from_static(s.as_bytes()),
|
||||||
|
Cow::Owned(s) => Bytes::from(s.into_bytes()),
|
||||||
|
};
|
||||||
|
Poll::Ready(Some(Ok(bytes)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn try_into_bytes(self) -> Result<Bytes, Self> {
|
||||||
|
match self {
|
||||||
|
Cow::Borrowed(s) => Ok(Bytes::from_static(s.as_bytes())),
|
||||||
|
Cow::Owned(s) => Ok(Bytes::from(s.into_bytes())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl MessageBody for bytestring::ByteString {
|
impl MessageBody for bytestring::ByteString {
|
||||||
type Error = Infallible;
|
type Error = Infallible;
|
||||||
|
|
||||||
@ -389,7 +531,7 @@ where
|
|||||||
mod tests {
|
mod tests {
|
||||||
use actix_rt::pin;
|
use actix_rt::pin;
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use bytes::{Bytes, BytesMut};
|
use futures_util::stream;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::body::{self, EitherBody};
|
use crate::body::{self, EitherBody};
|
||||||
@ -412,6 +554,7 @@ mod tests {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused_allocation)] // triggered by `Box::new(()).size()`
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn boxing_equivalence() {
|
async fn boxing_equivalence() {
|
||||||
assert_eq!(().size(), BodySize::Sized(0));
|
assert_eq!(().size(), BodySize::Sized(0));
|
||||||
@ -426,6 +569,35 @@ mod tests {
|
|||||||
assert_poll_next_none!(pl);
|
assert_poll_next_none!(pl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn mut_equivalence() {
|
||||||
|
assert_eq!(().size(), BodySize::Sized(0));
|
||||||
|
assert_eq!(().size(), (&(&mut ())).size());
|
||||||
|
|
||||||
|
let pl = &mut ();
|
||||||
|
pin!(pl);
|
||||||
|
assert_poll_next_none!(pl);
|
||||||
|
|
||||||
|
let pl = &mut Box::new(());
|
||||||
|
pin!(pl);
|
||||||
|
assert_poll_next_none!(pl);
|
||||||
|
|
||||||
|
let mut body = body::SizedStream::new(
|
||||||
|
8,
|
||||||
|
stream::iter([
|
||||||
|
Ok::<_, std::io::Error>(Bytes::from("1234")),
|
||||||
|
Ok(Bytes::from("5678")),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
let body = &mut body;
|
||||||
|
assert_eq!(body.size(), BodySize::Sized(8));
|
||||||
|
pin!(body);
|
||||||
|
assert_poll_next!(body, Bytes::from_static(b"1234"));
|
||||||
|
assert_poll_next!(body, Bytes::from_static(b"5678"));
|
||||||
|
assert_poll_next_none!(body);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::let_unit_value)]
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_unit() {
|
async fn test_unit() {
|
||||||
let pl = ();
|
let pl = ();
|
||||||
@ -551,4 +723,18 @@ mod tests {
|
|||||||
let not_body = resp_body.downcast_ref::<()>();
|
let not_body = resp_body.downcast_ref::<()>();
|
||||||
assert!(not_body.is_none());
|
assert!(not_body.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn non_owning_to_bytes() {
|
||||||
|
let mut body = BoxBody::new(());
|
||||||
|
let bytes = body::to_bytes(&mut body).await.unwrap();
|
||||||
|
assert_eq!(bytes, Bytes::new());
|
||||||
|
|
||||||
|
let mut body = body::BodyStream::new(stream::iter([
|
||||||
|
Ok::<_, std::io::Error>(Bytes::from("1234")),
|
||||||
|
Ok(Bytes::from("5678")),
|
||||||
|
]));
|
||||||
|
let bytes = body::to_bytes(&mut body).await.unwrap();
|
||||||
|
assert_eq!(bytes, Bytes::from_static(b"12345678"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
//! Traits and structures to aid consuming and writing HTTP payloads.
|
//! Traits and structures to aid consuming and writing HTTP payloads.
|
||||||
|
//!
|
||||||
|
//! "Body" and "payload" are used somewhat interchangeably in this documentation.
|
||||||
|
|
||||||
|
// Though the spec kinda reads like "payload" is the possibly-transfer-encoded part of the message
|
||||||
|
// and the "body" is the intended possibly-decoded version of that.
|
||||||
|
|
||||||
mod body_stream;
|
mod body_stream;
|
||||||
mod boxed;
|
mod boxed;
|
||||||
@ -9,12 +14,14 @@ mod size;
|
|||||||
mod sized_stream;
|
mod sized_stream;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
pub use self::body_stream::BodyStream;
|
|
||||||
pub use self::boxed::BoxBody;
|
|
||||||
pub use self::either::EitherBody;
|
|
||||||
pub use self::message_body::MessageBody;
|
|
||||||
pub(crate) use self::message_body::MessageBodyMapErr;
|
pub(crate) use self::message_body::MessageBodyMapErr;
|
||||||
pub use self::none::None;
|
pub use self::{
|
||||||
pub use self::size::BodySize;
|
body_stream::BodyStream,
|
||||||
pub use self::sized_stream::SizedStream;
|
boxed::BoxBody,
|
||||||
pub use self::utils::to_bytes;
|
either::EitherBody,
|
||||||
|
message_body::MessageBody,
|
||||||
|
none::None,
|
||||||
|
size::BodySize,
|
||||||
|
sized_stream::SizedStream,
|
||||||
|
utils::{to_bytes, to_bytes_limited, BodyLimitExceeded},
|
||||||
|
};
|
||||||
|
@ -10,9 +10,12 @@ use super::{BodySize, MessageBody};
|
|||||||
|
|
||||||
/// Body type for responses that forbid payloads.
|
/// Body type for responses that forbid payloads.
|
||||||
///
|
///
|
||||||
/// Distinct from an empty response which would contain a Content-Length header.
|
/// This is distinct from an "empty" response which _would_ contain a `Content-Length` header.
|
||||||
///
|
|
||||||
/// For an "empty" body, use `()` or `Bytes::new()`.
|
/// For an "empty" body, use `()` or `Bytes::new()`.
|
||||||
|
///
|
||||||
|
/// For example, the HTTP spec forbids a payload to be sent with a `204 No Content` response.
|
||||||
|
/// In this case, the payload (or lack thereof) is implicit from the status code, so a
|
||||||
|
/// `Content-Length` header is not required.
|
||||||
#[derive(Debug, Clone, Copy, Default)]
|
#[derive(Debug, Clone, Copy, Default)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub struct None;
|
pub struct None;
|
||||||
|
@ -44,7 +44,7 @@ where
|
|||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn size(&self) -> BodySize {
|
fn size(&self) -> BodySize {
|
||||||
BodySize::Sized(self.size as u64)
|
BodySize::Sized(self.size)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to pull out the next value of the underlying [`Stream`].
|
/// Attempts to pull out the next value of the underlying [`Stream`].
|
||||||
@ -76,7 +76,7 @@ mod tests {
|
|||||||
use actix_rt::pin;
|
use actix_rt::pin;
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use futures_util::stream;
|
use futures_util::stream;
|
||||||
use static_assertions::{assert_impl_all, assert_not_impl_all};
|
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::body::to_bytes;
|
use crate::body::to_bytes;
|
||||||
@ -87,10 +87,10 @@ mod tests {
|
|||||||
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
assert_impl_all!(SizedStream<stream::Empty<Result<Bytes, Infallible>>>: MessageBody);
|
||||||
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
assert_impl_all!(SizedStream<stream::Repeat<Result<Bytes, Infallible>>>: MessageBody);
|
||||||
|
|
||||||
assert_not_impl_all!(SizedStream<stream::Empty<Bytes>>: MessageBody);
|
assert_not_impl_any!(SizedStream<stream::Empty<Bytes>>: MessageBody);
|
||||||
assert_not_impl_all!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
|
assert_not_impl_any!(SizedStream<stream::Repeat<Bytes>>: MessageBody);
|
||||||
// crate::Error is not Clone
|
// crate::Error is not Clone
|
||||||
assert_not_impl_all!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
assert_not_impl_any!(SizedStream<stream::Repeat<Result<Bytes, crate::Error>>>: MessageBody);
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn skips_empty_chunks() {
|
async fn skips_empty_chunks() {
|
||||||
|
@ -3,75 +3,196 @@ use std::task::Poll;
|
|||||||
use actix_rt::pin;
|
use actix_rt::pin;
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
|
use derive_more::{Display, Error};
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
|
|
||||||
use super::{BodySize, MessageBody};
|
use super::{BodySize, MessageBody};
|
||||||
|
|
||||||
/// Collects the body produced by a `MessageBody` implementation into `Bytes`.
|
/// Collects all the bytes produced by `body`.
|
||||||
///
|
///
|
||||||
/// Any errors produced by the body stream are returned immediately.
|
/// Any errors produced by the body stream are returned immediately.
|
||||||
///
|
///
|
||||||
|
/// Consider using [`to_bytes_limited`] instead to protect against memory exhaustion.
|
||||||
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use actix_http::body::{self, to_bytes};
|
/// use actix_http::body::{self, to_bytes};
|
||||||
/// use bytes::Bytes;
|
/// use bytes::Bytes;
|
||||||
///
|
///
|
||||||
/// # async fn test_to_bytes() {
|
/// # actix_rt::System::new().block_on(async {
|
||||||
/// let body = body::None::new();
|
/// let body = body::None::new();
|
||||||
/// let bytes = to_bytes(body).await.unwrap();
|
/// let bytes = to_bytes(body).await.unwrap();
|
||||||
/// assert!(bytes.is_empty());
|
/// assert!(bytes.is_empty());
|
||||||
///
|
///
|
||||||
/// let body = Bytes::from_static(b"123");
|
/// let body = Bytes::from_static(b"123");
|
||||||
/// let bytes = to_bytes(body).await.unwrap();
|
/// let bytes = to_bytes(body).await.unwrap();
|
||||||
/// assert_eq!(bytes, b"123"[..]);
|
/// assert_eq!(bytes, "123");
|
||||||
/// # }
|
/// # });
|
||||||
/// ```
|
/// ```
|
||||||
pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
|
pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
|
||||||
|
to_bytes_limited(body, usize::MAX)
|
||||||
|
.await
|
||||||
|
.expect("body should never yield more than usize::MAX bytes")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Error type returned from [`to_bytes_limited`] when body produced exceeds limit.
|
||||||
|
#[derive(Debug, Display, Error)]
|
||||||
|
#[display("limit exceeded while collecting body bytes")]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct BodyLimitExceeded;
|
||||||
|
|
||||||
|
/// Collects the bytes produced by `body`, up to `limit` bytes.
|
||||||
|
///
|
||||||
|
/// If a chunk read from `poll_next` causes the total number of bytes read to exceed `limit`, an
|
||||||
|
/// `Err(BodyLimitExceeded)` is returned.
|
||||||
|
///
|
||||||
|
/// Any errors produced by the body stream are returned immediately as `Ok(Err(B::Error))`.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use actix_http::body::{self, to_bytes_limited};
|
||||||
|
/// use bytes::Bytes;
|
||||||
|
///
|
||||||
|
/// # actix_rt::System::new().block_on(async {
|
||||||
|
/// let body = body::None::new();
|
||||||
|
/// let bytes = to_bytes_limited(body, 10).await.unwrap().unwrap();
|
||||||
|
/// assert!(bytes.is_empty());
|
||||||
|
///
|
||||||
|
/// let body = Bytes::from_static(b"123");
|
||||||
|
/// let bytes = to_bytes_limited(body, 10).await.unwrap().unwrap();
|
||||||
|
/// assert_eq!(bytes, "123");
|
||||||
|
///
|
||||||
|
/// let body = Bytes::from_static(b"123");
|
||||||
|
/// assert!(to_bytes_limited(body, 2).await.is_err());
|
||||||
|
/// # });
|
||||||
|
/// ```
|
||||||
|
pub async fn to_bytes_limited<B: MessageBody>(
|
||||||
|
body: B,
|
||||||
|
limit: usize,
|
||||||
|
) -> Result<Result<Bytes, B::Error>, BodyLimitExceeded> {
|
||||||
|
/// Sensible default (32kB) for initial, bounded allocation when collecting body bytes.
|
||||||
|
const INITIAL_ALLOC_BYTES: usize = 32 * 1024;
|
||||||
|
|
||||||
let cap = match body.size() {
|
let cap = match body.size() {
|
||||||
BodySize::None | BodySize::Sized(0) => return Ok(Bytes::new()),
|
BodySize::None | BodySize::Sized(0) => return Ok(Ok(Bytes::new())),
|
||||||
BodySize::Sized(size) => size as usize,
|
BodySize::Sized(size) if size as usize > limit => return Err(BodyLimitExceeded),
|
||||||
// good enough first guess for chunk size
|
BodySize::Sized(size) => (size as usize).min(INITIAL_ALLOC_BYTES),
|
||||||
BodySize::Stream => 32_768,
|
BodySize::Stream => INITIAL_ALLOC_BYTES,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut exceeded_limit = false;
|
||||||
let mut buf = BytesMut::with_capacity(cap);
|
let mut buf = BytesMut::with_capacity(cap);
|
||||||
|
|
||||||
pin!(body);
|
pin!(body);
|
||||||
|
|
||||||
poll_fn(|cx| loop {
|
match poll_fn(|cx| loop {
|
||||||
let body = body.as_mut();
|
let body = body.as_mut();
|
||||||
|
|
||||||
match ready!(body.poll_next(cx)) {
|
match ready!(body.poll_next(cx)) {
|
||||||
Some(Ok(bytes)) => buf.extend_from_slice(&*bytes),
|
Some(Ok(bytes)) => {
|
||||||
|
// if limit is exceeded...
|
||||||
|
if buf.len() + bytes.len() > limit {
|
||||||
|
// ...set flag to true and break out of poll_fn
|
||||||
|
exceeded_limit = true;
|
||||||
|
return Poll::Ready(Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.extend_from_slice(&bytes)
|
||||||
|
}
|
||||||
None => return Poll::Ready(Ok(())),
|
None => return Poll::Ready(Ok(())),
|
||||||
Some(Err(err)) => return Poll::Ready(Err(err)),
|
Some(Err(err)) => return Poll::Ready(Err(err)),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await?;
|
.await
|
||||||
|
{
|
||||||
|
// propagate error returned from body poll
|
||||||
|
Err(err) => Ok(Err(err)),
|
||||||
|
|
||||||
Ok(buf.freeze())
|
// limit was exceeded while reading body
|
||||||
|
Ok(()) if exceeded_limit => Err(BodyLimitExceeded),
|
||||||
|
|
||||||
|
// otherwise return body buffer
|
||||||
|
Ok(()) => Ok(Ok(buf.freeze())),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod tests {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
use futures_util::{stream, StreamExt as _};
|
use futures_util::{stream, StreamExt as _};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{body::BodyStream, Error};
|
use crate::{
|
||||||
|
body::{BodyStream, SizedStream},
|
||||||
|
Error,
|
||||||
|
};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_to_bytes() {
|
async fn to_bytes_complete() {
|
||||||
let bytes = to_bytes(()).await.unwrap();
|
let bytes = to_bytes(()).await.unwrap();
|
||||||
assert!(bytes.is_empty());
|
assert!(bytes.is_empty());
|
||||||
|
|
||||||
let body = Bytes::from_static(b"123");
|
let body = Bytes::from_static(b"123");
|
||||||
let bytes = to_bytes(body).await.unwrap();
|
let bytes = to_bytes(body).await.unwrap();
|
||||||
assert_eq!(bytes, b"123"[..]);
|
assert_eq!(bytes, b"123"[..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_bytes_streams() {
|
||||||
let stream = stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
|
let stream = stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
|
||||||
.map(Ok::<_, Error>);
|
.map(Ok::<_, Error>);
|
||||||
let body = BodyStream::new(stream);
|
let body = BodyStream::new(stream);
|
||||||
let bytes = to_bytes(body).await.unwrap();
|
let bytes = to_bytes(body).await.unwrap();
|
||||||
assert_eq!(bytes, b"123abc"[..]);
|
assert_eq!(bytes, b"123abc"[..]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_bytes_limited_complete() {
|
||||||
|
let bytes = to_bytes_limited((), 0).await.unwrap().unwrap();
|
||||||
|
assert!(bytes.is_empty());
|
||||||
|
|
||||||
|
let bytes = to_bytes_limited((), 1).await.unwrap().unwrap();
|
||||||
|
assert!(bytes.is_empty());
|
||||||
|
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 0)
|
||||||
|
.await
|
||||||
|
.is_err());
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 1)
|
||||||
|
.await
|
||||||
|
.is_err());
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 2).await.is_ok());
|
||||||
|
assert!(to_bytes_limited(Bytes::from_static(b"12"), 3).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_bytes_limited_streams() {
|
||||||
|
// hinting a larger body fails
|
||||||
|
let body = SizedStream::new(8, stream::empty().map(Ok::<_, Error>));
|
||||||
|
assert!(to_bytes_limited(body, 3).await.is_err());
|
||||||
|
|
||||||
|
// hinting a smaller body is okay
|
||||||
|
let body = SizedStream::new(3, stream::empty().map(Ok::<_, Error>));
|
||||||
|
assert!(to_bytes_limited(body, 3).await.unwrap().unwrap().is_empty());
|
||||||
|
|
||||||
|
// hinting a smaller body then returning a larger one fails
|
||||||
|
let stream = stream::iter(vec![Bytes::from_static(b"1234")]).map(Ok::<_, Error>);
|
||||||
|
let body = SizedStream::new(3, stream);
|
||||||
|
assert!(to_bytes_limited(body, 3).await.is_err());
|
||||||
|
|
||||||
|
let stream = stream::iter(vec![Bytes::from_static(b"123"), Bytes::from_static(b"abc")])
|
||||||
|
.map(Ok::<_, Error>);
|
||||||
|
let body = BodyStream::new(stream);
|
||||||
|
assert!(to_bytes_limited(body, 3).await.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn to_body_limit_error() {
|
||||||
|
let err_stream = stream::once(async { Err(io::Error::new(io::ErrorKind::Other, "")) });
|
||||||
|
let body = SizedStream::new(8, err_stream);
|
||||||
|
// not too big, but propagates error from body stream
|
||||||
|
assert!(to_bytes_limited(body, 10).await.unwrap().is_err());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,25 +1,22 @@
|
|||||||
use std::{fmt, marker::PhantomData, net, rc::Rc};
|
use std::{fmt, marker::PhantomData, net, rc::Rc, time::Duration};
|
||||||
|
|
||||||
use actix_codec::Framed;
|
use actix_codec::Framed;
|
||||||
use actix_service::{IntoServiceFactory, Service, ServiceFactory};
|
use actix_service::{IntoServiceFactory, Service, ServiceFactory};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
config::{KeepAlive, ServiceConfig},
|
|
||||||
h1::{self, ExpectHandler, H1Service, UpgradeHandler},
|
h1::{self, ExpectHandler, H1Service, UpgradeHandler},
|
||||||
h2::H2Service,
|
|
||||||
service::HttpService,
|
service::HttpService,
|
||||||
ConnectCallback, Extensions, Request, Response,
|
ConnectCallback, Extensions, KeepAlive, Request, Response, ServiceConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A HTTP service builder
|
/// An HTTP service builder.
|
||||||
///
|
///
|
||||||
/// This type can be used to construct an instance of [`HttpService`] through a
|
/// This type can construct an instance of [`HttpService`] through a builder-like pattern.
|
||||||
/// builder-like pattern.
|
|
||||||
pub struct HttpServiceBuilder<T, S, X = ExpectHandler, U = UpgradeHandler> {
|
pub struct HttpServiceBuilder<T, S, X = ExpectHandler, U = UpgradeHandler> {
|
||||||
keep_alive: KeepAlive,
|
keep_alive: KeepAlive,
|
||||||
client_timeout: u64,
|
client_request_timeout: Duration,
|
||||||
client_disconnect: u64,
|
client_disconnect_timeout: Duration,
|
||||||
secure: bool,
|
secure: bool,
|
||||||
local_addr: Option<net::SocketAddr>,
|
local_addr: Option<net::SocketAddr>,
|
||||||
expect: X,
|
expect: X,
|
||||||
@ -28,22 +25,23 @@ pub struct HttpServiceBuilder<T, S, X = ExpectHandler, U = UpgradeHandler> {
|
|||||||
_phantom: PhantomData<S>,
|
_phantom: PhantomData<S>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, S> HttpServiceBuilder<T, S, ExpectHandler, UpgradeHandler>
|
impl<T, S> Default for HttpServiceBuilder<T, S, ExpectHandler, UpgradeHandler>
|
||||||
where
|
where
|
||||||
S: ServiceFactory<Request, Config = ()>,
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
S::Error: Into<Response<BoxBody>> + 'static,
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
S::InitError: fmt::Debug,
|
S::InitError: fmt::Debug,
|
||||||
<S::Service as Service<Request>>::Future: 'static,
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
{
|
{
|
||||||
/// Create instance of `ServiceConfigBuilder`
|
fn default() -> Self {
|
||||||
#[allow(clippy::new_without_default)]
|
|
||||||
pub fn new() -> Self {
|
|
||||||
HttpServiceBuilder {
|
HttpServiceBuilder {
|
||||||
keep_alive: KeepAlive::Timeout(5),
|
// ServiceConfig parts (make sure defaults match)
|
||||||
client_timeout: 5000,
|
keep_alive: KeepAlive::default(),
|
||||||
client_disconnect: 0,
|
client_request_timeout: Duration::from_secs(5),
|
||||||
|
client_disconnect_timeout: Duration::ZERO,
|
||||||
secure: false,
|
secure: false,
|
||||||
local_addr: None,
|
local_addr: None,
|
||||||
|
|
||||||
|
// dispatcher parts
|
||||||
expect: ExpectHandler,
|
expect: ExpectHandler,
|
||||||
upgrade: None,
|
upgrade: None,
|
||||||
on_connect_ext: None,
|
on_connect_ext: None,
|
||||||
@ -65,9 +63,11 @@ where
|
|||||||
U::Error: fmt::Display,
|
U::Error: fmt::Display,
|
||||||
U::InitError: fmt::Debug,
|
U::InitError: fmt::Debug,
|
||||||
{
|
{
|
||||||
/// Set server keep-alive setting.
|
/// Set connection keep-alive setting.
|
||||||
///
|
///
|
||||||
/// By default keep alive is set to a 5 seconds.
|
/// Applies to HTTP/1.1 keep-alive and HTTP/2 ping-pong.
|
||||||
|
///
|
||||||
|
/// By default keep-alive is 5 seconds.
|
||||||
pub fn keep_alive<W: Into<KeepAlive>>(mut self, val: W) -> Self {
|
pub fn keep_alive<W: Into<KeepAlive>>(mut self, val: W) -> Self {
|
||||||
self.keep_alive = val.into();
|
self.keep_alive = val.into();
|
||||||
self
|
self
|
||||||
@ -85,33 +85,45 @@ where
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set server client timeout in milliseconds for first request.
|
/// Set client request timeout (for first request).
|
||||||
///
|
///
|
||||||
/// Defines a timeout for reading client request header. If a client does not transmit
|
/// Defines a timeout for reading client request header. If the client does not transmit the
|
||||||
/// the entire set headers within this time, the request is terminated with
|
/// request head within this duration, the connection is terminated with a `408 Request Timeout`
|
||||||
/// the 408 (Request Time-out) error.
|
/// response error.
|
||||||
///
|
///
|
||||||
/// To disable timeout set value to 0.
|
/// A duration of zero disables the timeout.
|
||||||
///
|
///
|
||||||
/// By default client timeout is set to 5000 milliseconds.
|
/// By default, the client timeout is 5 seconds.
|
||||||
pub fn client_timeout(mut self, val: u64) -> Self {
|
pub fn client_request_timeout(mut self, dur: Duration) -> Self {
|
||||||
self.client_timeout = val;
|
self.client_request_timeout = dur;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set server connection disconnect timeout in milliseconds.
|
#[doc(hidden)]
|
||||||
|
#[deprecated(since = "3.0.0", note = "Renamed to `client_request_timeout`.")]
|
||||||
|
pub fn client_timeout(self, dur: Duration) -> Self {
|
||||||
|
self.client_request_timeout(dur)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set client connection disconnect timeout.
|
||||||
///
|
///
|
||||||
/// Defines a timeout for disconnect connection. If a disconnect procedure does not complete
|
/// Defines a timeout for disconnect connection. If a disconnect procedure does not complete
|
||||||
/// within this time, the request get dropped. This timeout affects secure connections.
|
/// within this time, the request get dropped. This timeout affects secure connections.
|
||||||
///
|
///
|
||||||
/// To disable timeout set value to 0.
|
/// A duration of zero disables the timeout.
|
||||||
///
|
///
|
||||||
/// By default disconnect timeout is set to 0.
|
/// By default, the disconnect timeout is disabled.
|
||||||
pub fn client_disconnect(mut self, val: u64) -> Self {
|
pub fn client_disconnect_timeout(mut self, dur: Duration) -> Self {
|
||||||
self.client_disconnect = val;
|
self.client_disconnect_timeout = dur;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[doc(hidden)]
|
||||||
|
#[deprecated(since = "3.0.0", note = "Renamed to `client_disconnect_timeout`.")]
|
||||||
|
pub fn client_disconnect(self, dur: Duration) -> Self {
|
||||||
|
self.client_disconnect_timeout(dur)
|
||||||
|
}
|
||||||
|
|
||||||
/// Provide service for `EXPECT: 100-Continue` support.
|
/// Provide service for `EXPECT: 100-Continue` support.
|
||||||
///
|
///
|
||||||
/// Service get called with request that contains `EXPECT` header.
|
/// Service get called with request that contains `EXPECT` header.
|
||||||
@ -126,8 +138,8 @@ where
|
|||||||
{
|
{
|
||||||
HttpServiceBuilder {
|
HttpServiceBuilder {
|
||||||
keep_alive: self.keep_alive,
|
keep_alive: self.keep_alive,
|
||||||
client_timeout: self.client_timeout,
|
client_request_timeout: self.client_request_timeout,
|
||||||
client_disconnect: self.client_disconnect,
|
client_disconnect_timeout: self.client_disconnect_timeout,
|
||||||
secure: self.secure,
|
secure: self.secure,
|
||||||
local_addr: self.local_addr,
|
local_addr: self.local_addr,
|
||||||
expect: expect.into_factory(),
|
expect: expect.into_factory(),
|
||||||
@ -150,8 +162,8 @@ where
|
|||||||
{
|
{
|
||||||
HttpServiceBuilder {
|
HttpServiceBuilder {
|
||||||
keep_alive: self.keep_alive,
|
keep_alive: self.keep_alive,
|
||||||
client_timeout: self.client_timeout,
|
client_request_timeout: self.client_request_timeout,
|
||||||
client_disconnect: self.client_disconnect,
|
client_disconnect_timeout: self.client_disconnect_timeout,
|
||||||
secure: self.secure,
|
secure: self.secure,
|
||||||
local_addr: self.local_addr,
|
local_addr: self.local_addr,
|
||||||
expect: self.expect,
|
expect: self.expect,
|
||||||
@ -174,7 +186,7 @@ where
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finish service configuration and create a HTTP Service for HTTP/1 protocol.
|
/// Finish service configuration and create a service for the HTTP/1 protocol.
|
||||||
pub fn h1<F, B>(self, service: F) -> H1Service<T, S, B, X, U>
|
pub fn h1<F, B>(self, service: F) -> H1Service<T, S, B, X, U>
|
||||||
where
|
where
|
||||||
B: MessageBody,
|
B: MessageBody,
|
||||||
@ -185,8 +197,8 @@ where
|
|||||||
{
|
{
|
||||||
let cfg = ServiceConfig::new(
|
let cfg = ServiceConfig::new(
|
||||||
self.keep_alive,
|
self.keep_alive,
|
||||||
self.client_timeout,
|
self.client_request_timeout,
|
||||||
self.client_disconnect,
|
self.client_disconnect_timeout,
|
||||||
self.secure,
|
self.secure,
|
||||||
self.local_addr,
|
self.local_addr,
|
||||||
);
|
);
|
||||||
@ -197,8 +209,9 @@ where
|
|||||||
.on_connect_ext(self.on_connect_ext)
|
.on_connect_ext(self.on_connect_ext)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finish service configuration and create a HTTP service for HTTP/2 protocol.
|
/// Finish service configuration and create a service for the HTTP/2 protocol.
|
||||||
pub fn h2<F, B>(self, service: F) -> H2Service<T, S, B>
|
#[cfg(feature = "http2")]
|
||||||
|
pub fn h2<F, B>(self, service: F) -> crate::h2::H2Service<T, S, B>
|
||||||
where
|
where
|
||||||
F: IntoServiceFactory<S, Request>,
|
F: IntoServiceFactory<S, Request>,
|
||||||
S::Error: Into<Response<BoxBody>> + 'static,
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
@ -209,13 +222,14 @@ where
|
|||||||
{
|
{
|
||||||
let cfg = ServiceConfig::new(
|
let cfg = ServiceConfig::new(
|
||||||
self.keep_alive,
|
self.keep_alive,
|
||||||
self.client_timeout,
|
self.client_request_timeout,
|
||||||
self.client_disconnect,
|
self.client_disconnect_timeout,
|
||||||
self.secure,
|
self.secure,
|
||||||
self.local_addr,
|
self.local_addr,
|
||||||
);
|
);
|
||||||
|
|
||||||
H2Service::with_config(cfg, service.into_factory()).on_connect_ext(self.on_connect_ext)
|
crate::h2::H2Service::with_config(cfg, service.into_factory())
|
||||||
|
.on_connect_ext(self.on_connect_ext)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finish service configuration and create `HttpService` instance.
|
/// Finish service configuration and create `HttpService` instance.
|
||||||
@ -230,8 +244,8 @@ where
|
|||||||
{
|
{
|
||||||
let cfg = ServiceConfig::new(
|
let cfg = ServiceConfig::new(
|
||||||
self.keep_alive,
|
self.keep_alive,
|
||||||
self.client_timeout,
|
self.client_request_timeout,
|
||||||
self.client_disconnect,
|
self.client_disconnect_timeout,
|
||||||
self.secure,
|
self.secure,
|
||||||
self.local_addr,
|
self.local_addr,
|
||||||
);
|
);
|
||||||
|
@ -1,106 +1,59 @@
|
|||||||
use std::{
|
use std::{
|
||||||
cell::Cell,
|
|
||||||
fmt::{self, Write},
|
|
||||||
net,
|
net,
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
use actix_rt::{
|
|
||||||
task::JoinHandle,
|
|
||||||
time::{interval, sleep_until, Instant, Sleep},
|
|
||||||
};
|
|
||||||
use bytes::BytesMut;
|
use bytes::BytesMut;
|
||||||
|
|
||||||
/// "Sun, 06 Nov 1994 08:49:37 GMT".len()
|
use crate::{date::DateService, KeepAlive};
|
||||||
pub(crate) const DATE_VALUE_LENGTH: usize = 29;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
/// HTTP service configuration.
|
||||||
/// Server keep-alive setting
|
#[derive(Debug, Clone)]
|
||||||
pub enum KeepAlive {
|
|
||||||
/// Keep alive in seconds
|
|
||||||
Timeout(usize),
|
|
||||||
|
|
||||||
/// Rely on OS to shutdown tcp connection
|
|
||||||
Os,
|
|
||||||
|
|
||||||
/// Disabled
|
|
||||||
Disabled,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<usize> for KeepAlive {
|
|
||||||
fn from(keepalive: usize) -> Self {
|
|
||||||
KeepAlive::Timeout(keepalive)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Option<usize>> for KeepAlive {
|
|
||||||
fn from(keepalive: Option<usize>) -> Self {
|
|
||||||
if let Some(keepalive) = keepalive {
|
|
||||||
KeepAlive::Timeout(keepalive)
|
|
||||||
} else {
|
|
||||||
KeepAlive::Disabled
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Http service configuration
|
|
||||||
pub struct ServiceConfig(Rc<Inner>);
|
pub struct ServiceConfig(Rc<Inner>);
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
struct Inner {
|
struct Inner {
|
||||||
keep_alive: Option<Duration>,
|
keep_alive: KeepAlive,
|
||||||
client_timeout: u64,
|
client_request_timeout: Duration,
|
||||||
client_disconnect: u64,
|
client_disconnect_timeout: Duration,
|
||||||
ka_enabled: bool,
|
|
||||||
secure: bool,
|
secure: bool,
|
||||||
local_addr: Option<std::net::SocketAddr>,
|
local_addr: Option<std::net::SocketAddr>,
|
||||||
date_service: DateService,
|
date_service: DateService,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for ServiceConfig {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
ServiceConfig(self.0.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for ServiceConfig {
|
impl Default for ServiceConfig {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::new(KeepAlive::Timeout(5), 0, 0, false, None)
|
Self::new(
|
||||||
|
KeepAlive::default(),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ServiceConfig {
|
impl ServiceConfig {
|
||||||
/// Create instance of `ServiceConfig`
|
/// Create instance of `ServiceConfig`.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
keep_alive: KeepAlive,
|
keep_alive: KeepAlive,
|
||||||
client_timeout: u64,
|
client_request_timeout: Duration,
|
||||||
client_disconnect: u64,
|
client_disconnect_timeout: Duration,
|
||||||
secure: bool,
|
secure: bool,
|
||||||
local_addr: Option<net::SocketAddr>,
|
local_addr: Option<net::SocketAddr>,
|
||||||
) -> ServiceConfig {
|
) -> ServiceConfig {
|
||||||
let (keep_alive, ka_enabled) = match keep_alive {
|
|
||||||
KeepAlive::Timeout(val) => (val as u64, true),
|
|
||||||
KeepAlive::Os => (0, true),
|
|
||||||
KeepAlive::Disabled => (0, false),
|
|
||||||
};
|
|
||||||
let keep_alive = if ka_enabled && keep_alive > 0 {
|
|
||||||
Some(Duration::from_secs(keep_alive))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
ServiceConfig(Rc::new(Inner {
|
ServiceConfig(Rc::new(Inner {
|
||||||
keep_alive,
|
keep_alive: keep_alive.normalize(),
|
||||||
ka_enabled,
|
client_request_timeout,
|
||||||
client_timeout,
|
client_disconnect_timeout,
|
||||||
client_disconnect,
|
|
||||||
secure,
|
secure,
|
||||||
local_addr,
|
local_addr,
|
||||||
date_service: DateService::new(),
|
date_service: DateService::new(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if connection is secure (HTTPS)
|
/// Returns `true` if connection is secure (i.e., using TLS / HTTPS).
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn secure(&self) -> bool {
|
pub fn secure(&self) -> bool {
|
||||||
self.0.secure
|
self.0.secure
|
||||||
@ -114,235 +67,97 @@ impl ServiceConfig {
|
|||||||
self.0.local_addr
|
self.0.local_addr
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Keep alive duration if configured.
|
/// Connection keep-alive setting.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn keep_alive(&self) -> Option<Duration> {
|
pub fn keep_alive(&self) -> KeepAlive {
|
||||||
self.0.keep_alive
|
self.0.keep_alive
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return state of connection keep-alive functionality
|
/// Creates a time object representing the deadline for this connection's keep-alive period, if
|
||||||
#[inline]
|
/// enabled.
|
||||||
pub fn keep_alive_enabled(&self) -> bool {
|
///
|
||||||
self.0.ka_enabled
|
/// When [`KeepAlive::Os`] or [`KeepAlive::Disabled`] is set, this will return `None`.
|
||||||
}
|
pub fn keep_alive_deadline(&self) -> Option<Instant> {
|
||||||
|
match self.keep_alive() {
|
||||||
/// Client timeout for first request.
|
KeepAlive::Timeout(dur) => Some(self.now() + dur),
|
||||||
#[inline]
|
KeepAlive::Os => None,
|
||||||
pub fn client_timer(&self) -> Option<Sleep> {
|
KeepAlive::Disabled => None,
|
||||||
let delay_time = self.0.client_timeout;
|
|
||||||
if delay_time != 0 {
|
|
||||||
Some(sleep_until(self.now() + Duration::from_millis(delay_time)))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Client timeout for first request.
|
/// Creates a time object representing the deadline for the client to finish sending the head of
|
||||||
pub fn client_timer_expire(&self) -> Option<Instant> {
|
/// its first request.
|
||||||
let delay = self.0.client_timeout;
|
///
|
||||||
if delay != 0 {
|
/// Returns `None` if this `ServiceConfig was` constructed with `client_request_timeout: 0`.
|
||||||
Some(self.now() + Duration::from_millis(delay))
|
pub fn client_request_deadline(&self) -> Option<Instant> {
|
||||||
} else {
|
let timeout = self.0.client_request_timeout;
|
||||||
None
|
(timeout != Duration::ZERO).then(|| self.now() + timeout)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Client disconnect timer
|
/// Creates a time object representing the deadline for the client to disconnect.
|
||||||
pub fn client_disconnect_timer(&self) -> Option<Instant> {
|
pub fn client_disconnect_deadline(&self) -> Option<Instant> {
|
||||||
let delay = self.0.client_disconnect;
|
let timeout = self.0.client_disconnect_timeout;
|
||||||
if delay != 0 {
|
(timeout != Duration::ZERO).then(|| self.now() + timeout)
|
||||||
Some(self.now() + Duration::from_millis(delay))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return keep-alive timer delay is configured.
|
|
||||||
#[inline]
|
|
||||||
pub fn keep_alive_timer(&self) -> Option<Sleep> {
|
|
||||||
self.keep_alive().map(|ka| sleep_until(self.now() + ka))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Keep-alive expire time
|
|
||||||
pub fn keep_alive_expire(&self) -> Option<Instant> {
|
|
||||||
self.keep_alive().map(|ka| self.now() + ka)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub(crate) fn now(&self) -> Instant {
|
pub(crate) fn now(&self) -> Instant {
|
||||||
self.0.date_service.now()
|
self.0.date_service.now()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Writes date header to `dst` buffer.
|
||||||
|
///
|
||||||
|
/// Low-level method that utilizes the built-in efficient date service, requiring fewer syscalls
|
||||||
|
/// than normal. Note that a CRLF (`\r\n`) is included in what is written.
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn set_date(&self, dst: &mut BytesMut) {
|
pub fn write_date_header(&self, dst: &mut BytesMut, camel_case: bool) {
|
||||||
let mut buf: [u8; 39] = [0; 39];
|
let mut buf: [u8; 37] = [0; 37];
|
||||||
buf[..6].copy_from_slice(b"date: ");
|
|
||||||
|
buf[..6].copy_from_slice(if camel_case { b"Date: " } else { b"date: " });
|
||||||
|
|
||||||
self.0
|
self.0
|
||||||
.date_service
|
.date_service
|
||||||
.set_date(|date| buf[6..35].copy_from_slice(&date.bytes));
|
.with_date(|date| buf[6..35].copy_from_slice(&date.bytes));
|
||||||
buf[35..].copy_from_slice(b"\r\n\r\n");
|
|
||||||
|
buf[35..].copy_from_slice(b"\r\n");
|
||||||
dst.extend_from_slice(&buf);
|
dst.extend_from_slice(&buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_date_header(&self, dst: &mut BytesMut) {
|
#[allow(unused)] // used with `http2` feature flag
|
||||||
|
pub(crate) fn write_date_header_value(&self, dst: &mut BytesMut) {
|
||||||
self.0
|
self.0
|
||||||
.date_service
|
.date_service
|
||||||
.set_date(|date| dst.extend_from_slice(&date.bytes));
|
.with_date(|date| dst.extend_from_slice(&date.bytes));
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
|
||||||
struct Date {
|
|
||||||
bytes: [u8; DATE_VALUE_LENGTH],
|
|
||||||
pos: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Date {
|
|
||||||
fn new() -> Date {
|
|
||||||
let mut date = Date {
|
|
||||||
bytes: [0; DATE_VALUE_LENGTH],
|
|
||||||
pos: 0,
|
|
||||||
};
|
|
||||||
date.update();
|
|
||||||
date
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update(&mut self) {
|
|
||||||
self.pos = 0;
|
|
||||||
write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Write for Date {
|
|
||||||
fn write_str(&mut self, s: &str) -> fmt::Result {
|
|
||||||
let len = s.len();
|
|
||||||
self.bytes[self.pos..self.pos + len].copy_from_slice(s.as_bytes());
|
|
||||||
self.pos += len;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Service for update Date and Instant periodically at 500 millis interval.
|
|
||||||
struct DateService {
|
|
||||||
current: Rc<Cell<(Date, Instant)>>,
|
|
||||||
handle: JoinHandle<()>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for DateService {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
// stop the timer update async task on drop.
|
|
||||||
self.handle.abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DateService {
|
|
||||||
fn new() -> Self {
|
|
||||||
// shared date and timer for DateService and update async task.
|
|
||||||
let current = Rc::new(Cell::new((Date::new(), Instant::now())));
|
|
||||||
let current_clone = Rc::clone(¤t);
|
|
||||||
// spawn an async task sleep for 500 milli and update current date/timer in a loop.
|
|
||||||
// handle is used to stop the task on DateService drop.
|
|
||||||
let handle = actix_rt::spawn(async move {
|
|
||||||
#[cfg(test)]
|
|
||||||
let _notify = notify_on_drop::NotifyOnDrop::new();
|
|
||||||
|
|
||||||
let mut interval = interval(Duration::from_millis(500));
|
|
||||||
loop {
|
|
||||||
let now = interval.tick().await;
|
|
||||||
let date = Date::new();
|
|
||||||
current_clone.set((date, now));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
DateService { current, handle }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn now(&self) -> Instant {
|
|
||||||
self.current.get().1
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_date<F: FnMut(&Date)>(&self, mut f: F) {
|
|
||||||
f(&self.current.get().0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: move to a util module for testing all spawn handle drop style tasks.
|
|
||||||
/// Test Module for checking the drop state of certain async tasks that are spawned
|
|
||||||
/// with `actix_rt::spawn`
|
|
||||||
///
|
|
||||||
/// The target task must explicitly generate `NotifyOnDrop` when spawn the task
|
|
||||||
#[cfg(test)]
|
|
||||||
mod notify_on_drop {
|
|
||||||
use std::cell::RefCell;
|
|
||||||
|
|
||||||
thread_local! {
|
|
||||||
static NOTIFY_DROPPED: RefCell<Option<bool>> = RefCell::new(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if the spawned task is dropped.
|
|
||||||
///
|
|
||||||
/// # Panics
|
|
||||||
/// Panics when there was no `NotifyOnDrop` instance on current thread.
|
|
||||||
pub(crate) fn is_dropped() -> bool {
|
|
||||||
NOTIFY_DROPPED.with(|bool| {
|
|
||||||
bool.borrow()
|
|
||||||
.expect("No NotifyOnDrop existed on current thread")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct NotifyOnDrop;
|
|
||||||
|
|
||||||
impl NotifyOnDrop {
|
|
||||||
/// # Panic:
|
|
||||||
///
|
|
||||||
/// When construct multiple instances on any given thread.
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
NOTIFY_DROPPED.with(|bool| {
|
|
||||||
let mut bool = bool.borrow_mut();
|
|
||||||
if bool.is_some() {
|
|
||||||
panic!("NotifyOnDrop existed on current thread");
|
|
||||||
} else {
|
|
||||||
*bool = Some(false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
NotifyOnDrop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for NotifyOnDrop {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
NOTIFY_DROPPED.with(|bool| {
|
|
||||||
if let Some(b) = bool.borrow_mut().as_mut() {
|
|
||||||
*b = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use actix_rt::{
|
||||||
|
task::yield_now,
|
||||||
|
time::{sleep, sleep_until},
|
||||||
|
};
|
||||||
|
use memchr::memmem;
|
||||||
|
|
||||||
use actix_rt::{task::yield_now, time::sleep};
|
use super::*;
|
||||||
|
use crate::{date::DATE_VALUE_LENGTH, notify_on_drop};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_date_service_update() {
|
async fn test_date_service_update() {
|
||||||
let settings = ServiceConfig::new(KeepAlive::Os, 0, 0, false, None);
|
let settings =
|
||||||
|
ServiceConfig::new(KeepAlive::Os, Duration::ZERO, Duration::ZERO, false, None);
|
||||||
|
|
||||||
yield_now().await;
|
yield_now().await;
|
||||||
|
|
||||||
let mut buf1 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
let mut buf1 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
||||||
settings.set_date(&mut buf1);
|
settings.write_date_header(&mut buf1, false);
|
||||||
let now1 = settings.now();
|
let now1 = settings.now();
|
||||||
|
|
||||||
sleep_until(Instant::now() + Duration::from_secs(2)).await;
|
sleep_until((Instant::now() + Duration::from_secs(2)).into()).await;
|
||||||
yield_now().await;
|
yield_now().await;
|
||||||
|
|
||||||
let now2 = settings.now();
|
let now2 = settings.now();
|
||||||
let mut buf2 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
let mut buf2 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
||||||
settings.set_date(&mut buf2);
|
settings.write_date_header(&mut buf2, false);
|
||||||
|
|
||||||
assert_ne!(now1, now2);
|
assert_ne!(now1, now2);
|
||||||
|
|
||||||
@ -395,11 +210,27 @@ mod tests {
|
|||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_date() {
|
async fn test_date() {
|
||||||
let settings = ServiceConfig::new(KeepAlive::Os, 0, 0, false, None);
|
let settings = ServiceConfig::default();
|
||||||
|
|
||||||
let mut buf1 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
let mut buf1 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
||||||
settings.set_date(&mut buf1);
|
settings.write_date_header(&mut buf1, false);
|
||||||
|
|
||||||
let mut buf2 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
let mut buf2 = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
||||||
settings.set_date(&mut buf2);
|
settings.write_date_header(&mut buf2, false);
|
||||||
|
|
||||||
assert_eq!(buf1, buf2);
|
assert_eq!(buf1, buf2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_date_camel_case() {
|
||||||
|
let settings = ServiceConfig::default();
|
||||||
|
|
||||||
|
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
||||||
|
settings.write_date_header(&mut buf, false);
|
||||||
|
assert!(memmem::find(&buf, b"date:").is_some());
|
||||||
|
|
||||||
|
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH + 10);
|
||||||
|
settings.write_date_header(&mut buf, true);
|
||||||
|
assert!(memmem::find(&buf, b"Date:").is_some());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
92
actix-http/src/date.rs
Normal file
92
actix-http/src/date.rs
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
use std::{
|
||||||
|
cell::Cell,
|
||||||
|
fmt::{self, Write},
|
||||||
|
rc::Rc,
|
||||||
|
time::{Duration, Instant, SystemTime},
|
||||||
|
};
|
||||||
|
|
||||||
|
use actix_rt::{task::JoinHandle, time::interval};
|
||||||
|
|
||||||
|
/// "Thu, 01 Jan 1970 00:00:00 GMT".len()
|
||||||
|
pub(crate) const DATE_VALUE_LENGTH: usize = 29;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub(crate) struct Date {
|
||||||
|
pub(crate) bytes: [u8; DATE_VALUE_LENGTH],
|
||||||
|
pos: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Date {
|
||||||
|
fn new() -> Date {
|
||||||
|
let mut date = Date {
|
||||||
|
bytes: [0; DATE_VALUE_LENGTH],
|
||||||
|
pos: 0,
|
||||||
|
};
|
||||||
|
date.update();
|
||||||
|
date
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(&mut self) {
|
||||||
|
self.pos = 0;
|
||||||
|
write!(self, "{}", httpdate::HttpDate::from(SystemTime::now())).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Write for Date {
|
||||||
|
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||||
|
let len = s.len();
|
||||||
|
self.bytes[self.pos..self.pos + len].copy_from_slice(s.as_bytes());
|
||||||
|
self.pos += len;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Service for update Date and Instant periodically at 500 millis interval.
|
||||||
|
pub(crate) struct DateService {
|
||||||
|
current: Rc<Cell<(Date, Instant)>>,
|
||||||
|
handle: JoinHandle<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DateService {
|
||||||
|
pub(crate) fn new() -> Self {
|
||||||
|
// shared date and timer for DateService and update async task.
|
||||||
|
let current = Rc::new(Cell::new((Date::new(), Instant::now())));
|
||||||
|
let current_clone = Rc::clone(¤t);
|
||||||
|
// spawn an async task sleep for 500 millis and update current date/timer in a loop.
|
||||||
|
// handle is used to stop the task on DateService drop.
|
||||||
|
let handle = actix_rt::spawn(async move {
|
||||||
|
#[cfg(test)]
|
||||||
|
let _notify = crate::notify_on_drop::NotifyOnDrop::new();
|
||||||
|
|
||||||
|
let mut interval = interval(Duration::from_millis(500));
|
||||||
|
loop {
|
||||||
|
let now = interval.tick().await;
|
||||||
|
let date = Date::new();
|
||||||
|
current_clone.set((date, now.into_std()));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
DateService { current, handle }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn now(&self) -> Instant {
|
||||||
|
self.current.get().1
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with_date<F: FnMut(&Date)>(&self, mut f: F) {
|
||||||
|
f(&self.current.get().0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for DateService {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("DateService").finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for DateService {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// stop the timer update async task on drop.
|
||||||
|
self.handle.abort();
|
||||||
|
}
|
||||||
|
}
|
@ -9,20 +9,15 @@ use std::{
|
|||||||
|
|
||||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use futures_core::{ready, Stream};
|
|
||||||
|
|
||||||
#[cfg(feature = "compress-brotli")]
|
|
||||||
use brotli2::write::BrotliDecoder;
|
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
use flate2::write::{GzDecoder, ZlibDecoder};
|
use flate2::write::{GzDecoder, ZlibDecoder};
|
||||||
|
use futures_core::{ready, Stream};
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
use zstd::stream::write::Decoder as ZstdDecoder;
|
use zstd::stream::write::Decoder as ZstdDecoder;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
encoding::Writer,
|
encoding::Writer,
|
||||||
error::{BlockingError, PayloadError},
|
error::PayloadError,
|
||||||
header::{ContentEncoding, HeaderMap, CONTENT_ENCODING},
|
header::{ContentEncoding, HeaderMap, CONTENT_ENCODING},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -47,17 +42,20 @@ where
|
|||||||
pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> {
|
pub fn new(stream: S, encoding: ContentEncoding) -> Decoder<S> {
|
||||||
let decoder = match encoding {
|
let decoder = match encoding {
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoding::Br => Some(ContentDecoder::Br(Box::new(BrotliDecoder::new(
|
ContentEncoding::Brotli => Some(ContentDecoder::Brotli(Box::new(
|
||||||
|
brotli::DecompressorWriter::new(Writer::new(), 8_096),
|
||||||
|
))),
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
|
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(ZlibDecoder::new(
|
||||||
Writer::new(),
|
Writer::new(),
|
||||||
)))),
|
)))),
|
||||||
#[cfg(feature = "compress-gzip")]
|
|
||||||
ContentEncoding::Deflate => Some(ContentDecoder::Deflate(Box::new(
|
|
||||||
ZlibDecoder::new(Writer::new()),
|
|
||||||
))),
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(GzDecoder::new(
|
ContentEncoding::Gzip => Some(ContentDecoder::Gzip(Box::new(GzDecoder::new(
|
||||||
Writer::new(),
|
Writer::new(),
|
||||||
)))),
|
)))),
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
|
ContentEncoding::Zstd => Some(ContentDecoder::Zstd(Box::new(
|
||||||
ZstdDecoder::new(Writer::new()).expect(
|
ZstdDecoder::new(Writer::new()).expect(
|
||||||
@ -101,8 +99,12 @@ where
|
|||||||
|
|
||||||
loop {
|
loop {
|
||||||
if let Some(ref mut fut) = this.fut {
|
if let Some(ref mut fut) = this.fut {
|
||||||
let (chunk, decoder) =
|
let (chunk, decoder) = ready!(Pin::new(fut).poll(cx)).map_err(|_| {
|
||||||
ready!(Pin::new(fut).poll(cx)).map_err(|_| BlockingError)??;
|
PayloadError::Io(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
"Blocking task was cancelled unexpectedly",
|
||||||
|
))
|
||||||
|
})??;
|
||||||
|
|
||||||
*this.decoder = Some(decoder);
|
*this.decoder = Some(decoder);
|
||||||
this.fut.take();
|
this.fut.take();
|
||||||
@ -162,10 +164,13 @@ where
|
|||||||
enum ContentDecoder {
|
enum ContentDecoder {
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
Deflate(Box<ZlibDecoder<Writer>>),
|
Deflate(Box<ZlibDecoder<Writer>>),
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
Gzip(Box<GzDecoder<Writer>>),
|
Gzip(Box<GzDecoder<Writer>>),
|
||||||
|
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
Br(Box<BrotliDecoder<Writer>>),
|
Brotli(Box<brotli::DecompressorWriter<Writer>>),
|
||||||
|
|
||||||
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
|
// We need explicit 'static lifetime here because ZstdDecoder need lifetime
|
||||||
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
// argument, and we use `spawn_blocking` in `Decoder::poll_next` that require `FnOnce() -> R + Send + 'static`
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
@ -176,7 +181,7 @@ impl ContentDecoder {
|
|||||||
fn feed_eof(&mut self) -> io::Result<Option<Bytes>> {
|
fn feed_eof(&mut self) -> io::Result<Option<Bytes>> {
|
||||||
match self {
|
match self {
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentDecoder::Br(ref mut decoder) => match decoder.flush() {
|
ContentDecoder::Brotli(ref mut decoder) => match decoder.flush() {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
let b = decoder.get_mut().take();
|
let b = decoder.get_mut().take();
|
||||||
|
|
||||||
@ -186,7 +191,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -200,7 +205,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -213,7 +218,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
@ -226,7 +231,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -234,7 +239,7 @@ impl ContentDecoder {
|
|||||||
fn feed_data(&mut self, data: Bytes) -> io::Result<Option<Bytes>> {
|
fn feed_data(&mut self, data: Bytes) -> io::Result<Option<Bytes>> {
|
||||||
match self {
|
match self {
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentDecoder::Br(ref mut decoder) => match decoder.write_all(&data) {
|
ContentDecoder::Brotli(ref mut decoder) => match decoder.write_all(&data) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
decoder.flush()?;
|
decoder.flush()?;
|
||||||
let b = decoder.get_mut().take();
|
let b = decoder.get_mut().take();
|
||||||
@ -245,7 +250,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -260,7 +265,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
@ -275,7 +280,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
@ -290,7 +295,7 @@ impl ContentDecoder {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => Err(e),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,22 +11,17 @@ use std::{
|
|||||||
use actix_rt::task::{spawn_blocking, JoinHandle};
|
use actix_rt::task::{spawn_blocking, JoinHandle};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use derive_more::Display;
|
use derive_more::Display;
|
||||||
use futures_core::ready;
|
|
||||||
use pin_project_lite::pin_project;
|
|
||||||
|
|
||||||
#[cfg(feature = "compress-brotli")]
|
|
||||||
use brotli2::write::BrotliEncoder;
|
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||||
|
use futures_core::ready;
|
||||||
|
use pin_project_lite::pin_project;
|
||||||
|
use tracing::trace;
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
use zstd::stream::write::Encoder as ZstdEncoder;
|
use zstd::stream::write::Encoder as ZstdEncoder;
|
||||||
|
|
||||||
use super::Writer;
|
use super::Writer;
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{self, BodySize, MessageBody},
|
body::{self, BodySize, MessageBody},
|
||||||
error::BlockingError,
|
|
||||||
header::{self, ContentEncoding, HeaderValue, CONTENT_ENCODING},
|
header::{self, ContentEncoding, HeaderValue, CONTENT_ENCODING},
|
||||||
ResponseHead, StatusCode,
|
ResponseHead, StatusCode,
|
||||||
};
|
};
|
||||||
@ -55,26 +50,36 @@ impl<B: MessageBody> Encoder<B> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn empty() -> Self {
|
||||||
|
Encoder {
|
||||||
|
body: EncoderBody::Full { body: Bytes::new() },
|
||||||
|
encoder: None,
|
||||||
|
fut: None,
|
||||||
|
eof: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
|
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
|
||||||
let can_encode = !(head.headers().contains_key(&CONTENT_ENCODING)
|
// no need to compress empty bodies
|
||||||
|
match body.size() {
|
||||||
|
BodySize::None => return Self::none(),
|
||||||
|
BodySize::Sized(0) => return Self::empty(),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let should_encode = !(head.headers().contains_key(&CONTENT_ENCODING)
|
||||||
|| head.status == StatusCode::SWITCHING_PROTOCOLS
|
|| head.status == StatusCode::SWITCHING_PROTOCOLS
|
||||||
|| head.status == StatusCode::NO_CONTENT
|
|| head.status == StatusCode::NO_CONTENT
|
||||||
|| encoding == ContentEncoding::Identity
|
|| encoding == ContentEncoding::Identity);
|
||||||
|| encoding == ContentEncoding::Auto);
|
|
||||||
|
|
||||||
// no need to compress an empty body
|
|
||||||
if matches!(body.size(), BodySize::None) {
|
|
||||||
return Self::none();
|
|
||||||
}
|
|
||||||
|
|
||||||
let body = match body.try_into_bytes() {
|
let body = match body.try_into_bytes() {
|
||||||
Ok(body) => EncoderBody::Full { body },
|
Ok(body) => EncoderBody::Full { body },
|
||||||
Err(body) => EncoderBody::Stream { body },
|
Err(body) => EncoderBody::Stream { body },
|
||||||
};
|
};
|
||||||
|
|
||||||
if can_encode {
|
if should_encode {
|
||||||
// Modify response body only if encoder is set
|
// wrap body only if encoder is feature-enabled
|
||||||
if let Some(enc) = ContentEncoder::encoder(encoding) {
|
if let Some(enc) = ContentEncoder::select(encoding) {
|
||||||
update_head(encoding, head);
|
update_head(encoding, head);
|
||||||
|
|
||||||
return Encoder {
|
return Encoder {
|
||||||
@ -169,6 +174,7 @@ where
|
|||||||
cx: &mut Context<'_>,
|
cx: &mut Context<'_>,
|
||||||
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
) -> Poll<Option<Result<Bytes, Self::Error>>> {
|
||||||
let mut this = self.project();
|
let mut this = self.project();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if *this.eof {
|
if *this.eof {
|
||||||
return Poll::Ready(None);
|
return Poll::Ready(None);
|
||||||
@ -176,7 +182,12 @@ where
|
|||||||
|
|
||||||
if let Some(ref mut fut) = this.fut {
|
if let Some(ref mut fut) = this.fut {
|
||||||
let mut encoder = ready!(Pin::new(fut).poll(cx))
|
let mut encoder = ready!(Pin::new(fut).poll(cx))
|
||||||
.map_err(|_| EncoderError::Blocking(BlockingError))?
|
.map_err(|_| {
|
||||||
|
EncoderError::Io(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
"Blocking task was cancelled unexpectedly",
|
||||||
|
))
|
||||||
|
})?
|
||||||
.map_err(EncoderError::Io)?;
|
.map_err(EncoderError::Io)?;
|
||||||
|
|
||||||
let chunk = encoder.take();
|
let chunk = encoder.take();
|
||||||
@ -252,10 +263,10 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
|
fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {
|
||||||
head.headers_mut().insert(
|
head.headers_mut()
|
||||||
header::CONTENT_ENCODING,
|
.insert(header::CONTENT_ENCODING, encoding.to_header_value());
|
||||||
HeaderValue::from_static(encoding.as_str()),
|
head.headers_mut()
|
||||||
);
|
.append(header::VARY, HeaderValue::from_static("accept-encoding"));
|
||||||
|
|
||||||
head.no_chunking(false);
|
head.no_chunking(false);
|
||||||
}
|
}
|
||||||
@ -268,7 +279,7 @@ enum ContentEncoder {
|
|||||||
Gzip(GzEncoder<Writer>),
|
Gzip(GzEncoder<Writer>),
|
||||||
|
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
Br(BrotliEncoder<Writer>),
|
Brotli(Box<brotli::CompressorWriter<Writer>>),
|
||||||
|
|
||||||
// Wwe need explicit 'static lifetime here because ZstdEncoder needs a lifetime argument and we
|
// Wwe need explicit 'static lifetime here because ZstdEncoder needs a lifetime argument and we
|
||||||
// use `spawn_blocking` in `Encoder::poll_next` that requires `FnOnce() -> R + Send + 'static`.
|
// use `spawn_blocking` in `Encoder::poll_next` that requires `FnOnce() -> R + Send + 'static`.
|
||||||
@ -277,7 +288,7 @@ enum ContentEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ContentEncoder {
|
impl ContentEncoder {
|
||||||
fn encoder(encoding: ContentEncoding) -> Option<Self> {
|
fn select(encoding: ContentEncoding) -> Option<Self> {
|
||||||
match encoding {
|
match encoding {
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
|
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
|
||||||
@ -292,9 +303,7 @@ impl ContentEncoder {
|
|||||||
))),
|
))),
|
||||||
|
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoding::Br => {
|
ContentEncoding::Brotli => Some(ContentEncoder::Brotli(new_brotli_compressor())),
|
||||||
Some(ContentEncoder::Br(BrotliEncoder::new(Writer::new(), 3)))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "compress-zstd")]
|
#[cfg(feature = "compress-zstd")]
|
||||||
ContentEncoding::Zstd => {
|
ContentEncoding::Zstd => {
|
||||||
@ -310,7 +319,7 @@ impl ContentEncoder {
|
|||||||
pub(crate) fn take(&mut self) -> Bytes {
|
pub(crate) fn take(&mut self) -> Bytes {
|
||||||
match *self {
|
match *self {
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoder::Br(ref mut encoder) => encoder.get_mut().take(),
|
ContentEncoder::Brotli(ref mut encoder) => encoder.get_mut().take(),
|
||||||
|
|
||||||
#[cfg(feature = "compress-gzip")]
|
#[cfg(feature = "compress-gzip")]
|
||||||
ContentEncoder::Deflate(ref mut encoder) => encoder.get_mut().take(),
|
ContentEncoder::Deflate(ref mut encoder) => encoder.get_mut().take(),
|
||||||
@ -326,8 +335,8 @@ impl ContentEncoder {
|
|||||||
fn finish(self) -> Result<Bytes, io::Error> {
|
fn finish(self) -> Result<Bytes, io::Error> {
|
||||||
match self {
|
match self {
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoder::Br(encoder) => match encoder.finish() {
|
ContentEncoder::Brotli(mut encoder) => match encoder.flush() {
|
||||||
Ok(writer) => Ok(writer.buf.freeze()),
|
Ok(()) => Ok(encoder.into_inner().buf.freeze()),
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -354,7 +363,7 @@ impl ContentEncoder {
|
|||||||
fn write(&mut self, data: &[u8]) -> Result<(), io::Error> {
|
fn write(&mut self, data: &[u8]) -> Result<(), io::Error> {
|
||||||
match *self {
|
match *self {
|
||||||
#[cfg(feature = "compress-brotli")]
|
#[cfg(feature = "compress-brotli")]
|
||||||
ContentEncoder::Br(ref mut encoder) => match encoder.write_all(data) {
|
ContentEncoder::Brotli(ref mut encoder) => match encoder.write_all(data) {
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
trace!("Error decoding br encoding: {}", err);
|
trace!("Error decoding br encoding: {}", err);
|
||||||
@ -392,16 +401,25 @@ impl ContentEncoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
|
fn new_brotli_compressor() -> Box<brotli::CompressorWriter<Writer>> {
|
||||||
|
Box::new(brotli::CompressorWriter::new(
|
||||||
|
Writer::new(),
|
||||||
|
32 * 1024, // 32 KiB buffer
|
||||||
|
3, // BROTLI_PARAM_QUALITY
|
||||||
|
22, // BROTLI_PARAM_LGWIN
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Display)]
|
#[derive(Debug, Display)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum EncoderError {
|
pub enum EncoderError {
|
||||||
#[display(fmt = "body")]
|
/// Wrapped body stream error.
|
||||||
|
#[display("body")]
|
||||||
Body(Box<dyn StdError>),
|
Body(Box<dyn StdError>),
|
||||||
|
|
||||||
#[display(fmt = "blocking")]
|
/// Generic I/O error.
|
||||||
Blocking(BlockingError),
|
#[display("io")]
|
||||||
|
|
||||||
#[display(fmt = "io")]
|
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -409,7 +427,6 @@ impl StdError for EncoderError {
|
|||||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||||
match self {
|
match self {
|
||||||
EncoderError::Body(err) => Some(&**err),
|
EncoderError::Body(err) => Some(&**err),
|
||||||
EncoderError::Blocking(err) => Some(err),
|
|
||||||
EncoderError::Io(err) => Some(err),
|
EncoderError::Io(err) => Some(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,13 +7,12 @@ use bytes::{Bytes, BytesMut};
|
|||||||
mod decoder;
|
mod decoder;
|
||||||
mod encoder;
|
mod encoder;
|
||||||
|
|
||||||
pub use self::decoder::Decoder;
|
pub use self::{decoder::Decoder, encoder::Encoder};
|
||||||
pub use self::encoder::Encoder;
|
|
||||||
|
|
||||||
/// Special-purpose writer for streaming (de-)compression.
|
/// Special-purpose writer for streaming (de-)compression.
|
||||||
///
|
///
|
||||||
/// Pre-allocates 8KiB of capacity.
|
/// Pre-allocates 8KiB of capacity.
|
||||||
pub(self) struct Writer {
|
struct Writer {
|
||||||
buf: BytesMut,
|
buf: BytesMut,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,11 +3,10 @@
|
|||||||
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
|
use std::{error::Error as StdError, fmt, io, str::Utf8Error, string::FromUtf8Error};
|
||||||
|
|
||||||
use derive_more::{Display, Error, From};
|
use derive_more::{Display, Error, From};
|
||||||
|
pub use http::{status::InvalidStatusCode, Error as HttpError};
|
||||||
use http::{uri::InvalidUri, StatusCode};
|
use http::{uri::InvalidUri, StatusCode};
|
||||||
|
|
||||||
use crate::{body::BoxBody, ws, Response};
|
use crate::{body::BoxBody, Response};
|
||||||
|
|
||||||
pub use http::Error as HttpError;
|
|
||||||
|
|
||||||
pub struct Error {
|
pub struct Error {
|
||||||
inner: Box<ErrorInner>,
|
inner: Box<ErrorInner>,
|
||||||
@ -51,7 +50,7 @@ impl Error {
|
|||||||
Self::new(Kind::SendResponse)
|
Self::new(Kind::SendResponse)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)] // reserved for future use (TODO: remove allow when being used)
|
#[allow(unused)] // available for future use
|
||||||
pub(crate) fn new_io() -> Self {
|
pub(crate) fn new_io() -> Self {
|
||||||
Self::new(Kind::Io)
|
Self::new(Kind::Io)
|
||||||
}
|
}
|
||||||
@ -61,6 +60,7 @@ impl Error {
|
|||||||
Self::new(Kind::Encoder)
|
Self::new(Kind::Encoder)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused)] // used with `ws` feature flag
|
||||||
pub(crate) fn new_ws() -> Self {
|
pub(crate) fn new_ws() -> Self {
|
||||||
Self::new(Kind::Ws)
|
Self::new(Kind::Ws)
|
||||||
}
|
}
|
||||||
@ -80,35 +80,37 @@ impl From<Error> for Response<BoxBody> {
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
|
||||||
pub(crate) enum Kind {
|
pub(crate) enum Kind {
|
||||||
#[display(fmt = "error processing HTTP")]
|
#[display("error processing HTTP")]
|
||||||
Http,
|
Http,
|
||||||
|
|
||||||
#[display(fmt = "error parsing HTTP message")]
|
#[display("error parsing HTTP message")]
|
||||||
Parse,
|
Parse,
|
||||||
|
|
||||||
#[display(fmt = "request payload read error")]
|
#[display("request payload read error")]
|
||||||
Payload,
|
Payload,
|
||||||
|
|
||||||
#[display(fmt = "response body write error")]
|
#[display("response body write error")]
|
||||||
Body,
|
Body,
|
||||||
|
|
||||||
#[display(fmt = "send response error")]
|
#[display("send response error")]
|
||||||
SendResponse,
|
SendResponse,
|
||||||
|
|
||||||
#[display(fmt = "error in WebSocket process")]
|
#[display("error in WebSocket process")]
|
||||||
Ws,
|
Ws,
|
||||||
|
|
||||||
#[display(fmt = "connection error")]
|
#[display("connection error")]
|
||||||
Io,
|
Io,
|
||||||
|
|
||||||
#[display(fmt = "encoder error")]
|
#[display("encoder error")]
|
||||||
Encoder,
|
Encoder,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Error {
|
impl fmt::Debug for Error {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
// TODO: more detail
|
f.debug_struct("actix_http::Error")
|
||||||
f.write_str("actix_http::Error")
|
.field("kind", &self.inner.kind)
|
||||||
|
.field("cause", &self.inner.cause)
|
||||||
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,14 +141,16 @@ impl From<HttpError> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ws::HandshakeError> for Error {
|
#[cfg(feature = "ws")]
|
||||||
fn from(err: ws::HandshakeError) -> Self {
|
impl From<crate::ws::HandshakeError> for Error {
|
||||||
|
fn from(err: crate::ws::HandshakeError) -> Self {
|
||||||
Self::new_ws().with_cause(err)
|
Self::new_ws().with_cause(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ws::ProtocolError> for Error {
|
#[cfg(feature = "ws")]
|
||||||
fn from(err: ws::ProtocolError) -> Self {
|
impl From<crate::ws::ProtocolError> for Error {
|
||||||
|
fn from(err: crate::ws::ProtocolError) -> Self {
|
||||||
Self::new_ws().with_cause(err)
|
Self::new_ws().with_cause(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -156,44 +160,44 @@ impl From<ws::ProtocolError> for Error {
|
|||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ParseError {
|
pub enum ParseError {
|
||||||
/// An invalid `Method`, such as `GE.T`.
|
/// An invalid `Method`, such as `GE.T`.
|
||||||
#[display(fmt = "Invalid Method specified")]
|
#[display("invalid method specified")]
|
||||||
Method,
|
Method,
|
||||||
|
|
||||||
/// An invalid `Uri`, such as `exam ple.domain`.
|
/// An invalid `Uri`, such as `exam ple.domain`.
|
||||||
#[display(fmt = "Uri error: {}", _0)]
|
#[display("URI error: {}", _0)]
|
||||||
Uri(InvalidUri),
|
Uri(InvalidUri),
|
||||||
|
|
||||||
/// An invalid `HttpVersion`, such as `HTP/1.1`
|
/// An invalid `HttpVersion`, such as `HTP/1.1`
|
||||||
#[display(fmt = "Invalid HTTP version specified")]
|
#[display("invalid HTTP version specified")]
|
||||||
Version,
|
Version,
|
||||||
|
|
||||||
/// An invalid `Header`.
|
/// An invalid `Header`.
|
||||||
#[display(fmt = "Invalid Header provided")]
|
#[display("invalid Header provided")]
|
||||||
Header,
|
Header,
|
||||||
|
|
||||||
/// A message head is too large to be reasonable.
|
/// A message head is too large to be reasonable.
|
||||||
#[display(fmt = "Message head is too large")]
|
#[display("message head is too large")]
|
||||||
TooLarge,
|
TooLarge,
|
||||||
|
|
||||||
/// A message reached EOF, but is not complete.
|
/// A message reached EOF, but is not complete.
|
||||||
#[display(fmt = "Message is incomplete")]
|
#[display("message is incomplete")]
|
||||||
Incomplete,
|
Incomplete,
|
||||||
|
|
||||||
/// An invalid `Status`, such as `1337 ELITE`.
|
/// An invalid `Status`, such as `1337 ELITE`.
|
||||||
#[display(fmt = "Invalid Status provided")]
|
#[display("invalid status provided")]
|
||||||
Status,
|
Status,
|
||||||
|
|
||||||
/// A timeout occurred waiting for an IO event.
|
/// A timeout occurred waiting for an IO event.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[display(fmt = "Timeout")]
|
#[display("timeout")]
|
||||||
Timeout,
|
Timeout,
|
||||||
|
|
||||||
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
/// An I/O error that occurred while trying to read or write to a network stream.
|
||||||
#[display(fmt = "IO error: {}", _0)]
|
#[display("I/O error: {}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
|
|
||||||
/// Parsing a field as string failed.
|
/// Parsing a field as string failed.
|
||||||
#[display(fmt = "UTF8 error: {}", _0)]
|
#[display("UTF-8 error: {}", _0)]
|
||||||
Utf8(Utf8Error),
|
Utf8(Utf8Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,40 +251,33 @@ impl From<ParseError> for Response<BoxBody> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A set of errors that can occur running blocking tasks in thread pool.
|
|
||||||
#[derive(Debug, Display, Error)]
|
|
||||||
#[display(fmt = "Blocking thread pool is gone")]
|
|
||||||
pub struct BlockingError;
|
|
||||||
|
|
||||||
/// A set of errors that can occur during payload parsing.
|
/// A set of errors that can occur during payload parsing.
|
||||||
#[derive(Debug, Display)]
|
#[derive(Debug, Display)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum PayloadError {
|
pub enum PayloadError {
|
||||||
/// A payload reached EOF, but is not complete.
|
/// A payload reached EOF, but is not complete.
|
||||||
#[display(
|
#[display("payload reached EOF before completing: {:?}", _0)]
|
||||||
fmt = "A payload reached EOF, but is not complete. Inner error: {:?}",
|
|
||||||
_0
|
|
||||||
)]
|
|
||||||
Incomplete(Option<io::Error>),
|
Incomplete(Option<io::Error>),
|
||||||
|
|
||||||
/// Content encoding stream corruption.
|
/// Content encoding stream corruption.
|
||||||
#[display(fmt = "Can not decode content-encoding.")]
|
#[display("can not decode content-encoding")]
|
||||||
EncodingCorrupted,
|
EncodingCorrupted,
|
||||||
|
|
||||||
/// Payload reached size limit.
|
/// Payload reached size limit.
|
||||||
#[display(fmt = "Payload reached size limit.")]
|
#[display("payload reached size limit")]
|
||||||
Overflow,
|
Overflow,
|
||||||
|
|
||||||
/// Payload length is unknown.
|
/// Payload length is unknown.
|
||||||
#[display(fmt = "Payload length is unknown.")]
|
#[display("payload length is unknown")]
|
||||||
UnknownLength,
|
UnknownLength,
|
||||||
|
|
||||||
/// HTTP/2 payload error.
|
/// HTTP/2 payload error.
|
||||||
#[display(fmt = "{}", _0)]
|
#[cfg(feature = "http2")]
|
||||||
Http2Payload(h2::Error),
|
#[display("{}", _0)]
|
||||||
|
Http2Payload(::h2::Error),
|
||||||
|
|
||||||
/// Generic I/O error.
|
/// Generic I/O error.
|
||||||
#[display(fmt = "{}", _0)]
|
#[display("{}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -288,18 +285,20 @@ impl std::error::Error for PayloadError {
|
|||||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
match self {
|
match self {
|
||||||
PayloadError::Incomplete(None) => None,
|
PayloadError::Incomplete(None) => None,
|
||||||
PayloadError::Incomplete(Some(err)) => Some(err as &dyn std::error::Error),
|
PayloadError::Incomplete(Some(err)) => Some(err),
|
||||||
PayloadError::EncodingCorrupted => None,
|
PayloadError::EncodingCorrupted => None,
|
||||||
PayloadError::Overflow => None,
|
PayloadError::Overflow => None,
|
||||||
PayloadError::UnknownLength => None,
|
PayloadError::UnknownLength => None,
|
||||||
PayloadError::Http2Payload(err) => Some(err as &dyn std::error::Error),
|
#[cfg(feature = "http2")]
|
||||||
PayloadError::Io(err) => Some(err as &dyn std::error::Error),
|
PayloadError::Http2Payload(err) => Some(err),
|
||||||
|
PayloadError::Io(err) => Some(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<h2::Error> for PayloadError {
|
#[cfg(feature = "http2")]
|
||||||
fn from(err: h2::Error) -> Self {
|
impl From<::h2::Error> for PayloadError {
|
||||||
|
fn from(err: ::h2::Error) -> Self {
|
||||||
PayloadError::Http2Payload(err)
|
PayloadError::Http2Payload(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -316,15 +315,6 @@ impl From<io::Error> for PayloadError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<BlockingError> for PayloadError {
|
|
||||||
fn from(_: BlockingError) -> Self {
|
|
||||||
PayloadError::Io(io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
"Operation is canceled",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PayloadError> for Error {
|
impl From<PayloadError> for Error {
|
||||||
fn from(err: PayloadError) -> Self {
|
fn from(err: PayloadError) -> Self {
|
||||||
Self::new_payload().with_cause(err)
|
Self::new_payload().with_cause(err)
|
||||||
@ -333,52 +323,61 @@ impl From<PayloadError> for Error {
|
|||||||
|
|
||||||
/// A set of errors that can occur during dispatching HTTP requests.
|
/// A set of errors that can occur during dispatching HTTP requests.
|
||||||
#[derive(Debug, Display, From)]
|
#[derive(Debug, Display, From)]
|
||||||
|
#[non_exhaustive]
|
||||||
pub enum DispatchError {
|
pub enum DispatchError {
|
||||||
/// Service error.
|
/// Service error.
|
||||||
#[display(fmt = "Service Error")]
|
#[display("service error")]
|
||||||
Service(Response<BoxBody>),
|
Service(Response<BoxBody>),
|
||||||
|
|
||||||
/// Body streaming error.
|
/// Body streaming error.
|
||||||
#[display(fmt = "Body error: {}", _0)]
|
#[display("body error: {}", _0)]
|
||||||
Body(Box<dyn StdError>),
|
Body(Box<dyn StdError>),
|
||||||
|
|
||||||
/// Upgrade service error.
|
/// Upgrade service error.
|
||||||
|
#[display("upgrade error")]
|
||||||
Upgrade,
|
Upgrade,
|
||||||
|
|
||||||
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
||||||
#[display(fmt = "IO error: {}", _0)]
|
#[display("I/O error: {}", _0)]
|
||||||
Io(io::Error),
|
Io(io::Error),
|
||||||
|
|
||||||
/// Request parse error.
|
/// Request parse error.
|
||||||
#[display(fmt = "Request parse error: {}", _0)]
|
#[display("request parse error: {}", _0)]
|
||||||
Parse(ParseError),
|
Parse(ParseError),
|
||||||
|
|
||||||
/// HTTP/2 error.
|
/// HTTP/2 error.
|
||||||
#[display(fmt = "{}", _0)]
|
#[display("{}", _0)]
|
||||||
|
#[cfg(feature = "http2")]
|
||||||
H2(h2::Error),
|
H2(h2::Error),
|
||||||
|
|
||||||
/// The first request did not complete within the specified timeout.
|
/// The first request did not complete within the specified timeout.
|
||||||
#[display(fmt = "The first request did not complete within the specified timeout")]
|
#[display("request did not complete within the specified timeout")]
|
||||||
SlowRequestTimeout,
|
SlowRequestTimeout,
|
||||||
|
|
||||||
/// Disconnect timeout. Makes sense for ssl streams.
|
/// Disconnect timeout. Makes sense for TLS streams.
|
||||||
#[display(fmt = "Connection shutdown timeout")]
|
#[display("connection shutdown timeout")]
|
||||||
DisconnectTimeout,
|
DisconnectTimeout,
|
||||||
|
|
||||||
|
/// Handler dropped payload before reading EOF.
|
||||||
|
#[display("handler dropped payload before reading EOF")]
|
||||||
|
HandlerDroppedPayload,
|
||||||
|
|
||||||
/// Internal error.
|
/// Internal error.
|
||||||
#[display(fmt = "Internal error")]
|
#[display("internal error")]
|
||||||
InternalError,
|
InternalError,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StdError for DispatchError {
|
impl StdError for DispatchError {
|
||||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||||
match self {
|
match self {
|
||||||
// TODO: error source extraction?
|
|
||||||
DispatchError::Service(_res) => None,
|
DispatchError::Service(_res) => None,
|
||||||
DispatchError::Body(err) => Some(&**err),
|
DispatchError::Body(err) => Some(&**err),
|
||||||
DispatchError::Io(err) => Some(err),
|
DispatchError::Io(err) => Some(err),
|
||||||
DispatchError::Parse(err) => Some(err),
|
DispatchError::Parse(err) => Some(err),
|
||||||
|
|
||||||
|
#[cfg(feature = "http2")]
|
||||||
DispatchError::H2(err) => Some(err),
|
DispatchError::H2(err) => Some(err),
|
||||||
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -386,38 +385,23 @@ impl StdError for DispatchError {
|
|||||||
|
|
||||||
/// A set of error that can occur during parsing content type.
|
/// A set of error that can occur during parsing content type.
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
|
#[cfg_attr(test, derive(PartialEq, Eq))]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ContentTypeError {
|
pub enum ContentTypeError {
|
||||||
/// Can not parse content type
|
/// Can not parse content type.
|
||||||
#[display(fmt = "Can not parse content type")]
|
#[display("could not parse content type")]
|
||||||
ParseError,
|
ParseError,
|
||||||
|
|
||||||
/// Unknown content encoding
|
/// Unknown content encoding.
|
||||||
#[display(fmt = "Unknown content encoding")]
|
#[display("unknown content encoding")]
|
||||||
UnknownEncoding,
|
UnknownEncoding,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod content_type_test_impls {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
impl std::cmp::PartialEq for ContentTypeError {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::ParseError => matches!(other, ContentTypeError::ParseError),
|
|
||||||
Self::UnknownEncoding => {
|
|
||||||
matches!(other, ContentTypeError::UnknownEncoding)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use http::Error as HttpError;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use http::{Error as HttpError, StatusCode};
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_into_response() {
|
fn test_into_response() {
|
||||||
@ -435,7 +419,7 @@ mod tests {
|
|||||||
let err: Error = ParseError::Io(orig).into();
|
let err: Error = ParseError::Io(orig).into();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
format!("{}", err),
|
format!("{}", err),
|
||||||
"error parsing HTTP message: IO error: other"
|
"error parsing HTTP message: I/O error: other"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -462,7 +446,7 @@ mod tests {
|
|||||||
let err = PayloadError::Incomplete(None);
|
let err = PayloadError::Incomplete(None);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
err.to_string(),
|
err.to_string(),
|
||||||
"A payload reached EOF, but is not complete. Inner error: None"
|
"payload reached EOF before completing: None"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -482,7 +466,7 @@ mod tests {
|
|||||||
match ParseError::from($from) {
|
match ParseError::from($from) {
|
||||||
e @ $error => {
|
e @ $error => {
|
||||||
let desc = format!("{}", e);
|
let desc = format!("{}", e);
|
||||||
assert_eq!(desc, format!("IO error: {}", $from));
|
assert_eq!(desc, format!("I/O error: {}", $from));
|
||||||
}
|
}
|
||||||
_ => unreachable!("{:?}", $from),
|
_ => unreachable!("{:?}", $from),
|
||||||
}
|
}
|
||||||
|
@ -1,17 +1,38 @@
|
|||||||
use std::{
|
use std::{
|
||||||
any::{Any, TypeId},
|
any::{Any, TypeId},
|
||||||
|
collections::HashMap,
|
||||||
fmt,
|
fmt,
|
||||||
|
hash::{BuildHasherDefault, Hasher},
|
||||||
};
|
};
|
||||||
|
|
||||||
use ahash::AHashMap;
|
/// A hasher for `TypeId`s that takes advantage of its known characteristics.
|
||||||
|
///
|
||||||
|
/// Author of `anymap` crate has done research on the topic:
|
||||||
|
/// https://github.com/chris-morgan/anymap/blob/2e9a5704/src/lib.rs#L599
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
struct NoOpHasher(u64);
|
||||||
|
|
||||||
|
impl Hasher for NoOpHasher {
|
||||||
|
fn write(&mut self, _bytes: &[u8]) {
|
||||||
|
unimplemented!("This NoOpHasher can only handle u64s")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_u64(&mut self, i: u64) {
|
||||||
|
self.0 = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish(&self) -> u64 {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A type map for request extensions.
|
/// A type map for request extensions.
|
||||||
///
|
///
|
||||||
/// All entries into this map must be owned types (or static references).
|
/// All entries into this map must be owned types (or static references).
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Extensions {
|
pub struct Extensions {
|
||||||
/// Use AHasher with a std HashMap with for faster lookups on the small `TypeId` keys.
|
// use no-op hasher with a std HashMap with for faster lookups on the small `TypeId` keys
|
||||||
map: AHashMap<TypeId, Box<dyn Any>>,
|
map: HashMap<TypeId, Box<dyn Any>, BuildHasherDefault<NoOpHasher>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Extensions {
|
impl Extensions {
|
||||||
@ -19,7 +40,7 @@ impl Extensions {
|
|||||||
#[inline]
|
#[inline]
|
||||||
pub fn new() -> Extensions {
|
pub fn new() -> Extensions {
|
||||||
Extensions {
|
Extensions {
|
||||||
map: AHashMap::new(),
|
map: HashMap::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,6 +104,46 @@ impl Extensions {
|
|||||||
.and_then(|boxed| boxed.downcast_mut())
|
.and_then(|boxed| boxed.downcast_mut())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Inserts the given `value` into the extensions if it is not present, then returns a reference
|
||||||
|
/// to the value in the extensions.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use actix_http::Extensions;
|
||||||
|
/// let mut map = Extensions::new();
|
||||||
|
/// assert_eq!(map.get::<Vec<u32>>(), None);
|
||||||
|
///
|
||||||
|
/// map.get_or_insert(Vec::<u32>::new()).push(1);
|
||||||
|
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1]));
|
||||||
|
///
|
||||||
|
/// map.get_or_insert(Vec::<u32>::new()).push(2);
|
||||||
|
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1,2]));
|
||||||
|
/// ```
|
||||||
|
pub fn get_or_insert<T: 'static>(&mut self, value: T) -> &mut T {
|
||||||
|
self.get_or_insert_with(|| value)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inserts a value computed from `f` into the extensions if the given `value` is not present,
|
||||||
|
/// then returns a reference to the value in the extensions.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use actix_http::Extensions;
|
||||||
|
/// let mut map = Extensions::new();
|
||||||
|
/// assert_eq!(map.get::<Vec<u32>>(), None);
|
||||||
|
///
|
||||||
|
/// map.get_or_insert_with(Vec::<u32>::new).push(1);
|
||||||
|
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1]));
|
||||||
|
///
|
||||||
|
/// map.get_or_insert_with(Vec::<u32>::new).push(2);
|
||||||
|
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1,2]));
|
||||||
|
/// ```
|
||||||
|
pub fn get_or_insert_with<T: 'static, F: FnOnce() -> T>(&mut self, default: F) -> &mut T {
|
||||||
|
self.map
|
||||||
|
.entry(TypeId::of::<T>())
|
||||||
|
.or_insert_with(|| Box::new(default()))
|
||||||
|
.downcast_mut()
|
||||||
|
.expect("extensions map should now contain a T value")
|
||||||
|
}
|
||||||
|
|
||||||
/// Remove an item from the map of a given type.
|
/// Remove an item from the map of a given type.
|
||||||
///
|
///
|
||||||
/// If an item of this type was already stored, it will be returned.
|
/// If an item of this type was already stored, it will be returned.
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use std::{io, task::Poll};
|
use std::{io, task::Poll};
|
||||||
|
|
||||||
use bytes::{Buf as _, Bytes, BytesMut};
|
use bytes::{Buf as _, Bytes, BytesMut};
|
||||||
|
use tracing::{debug, trace};
|
||||||
|
|
||||||
macro_rules! byte (
|
macro_rules! byte (
|
||||||
($rdr:ident) => ({
|
($rdr:ident) => ({
|
||||||
@ -14,7 +15,7 @@ macro_rules! byte (
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub(super) enum ChunkedState {
|
pub(super) enum ChunkedState {
|
||||||
Size,
|
Size,
|
||||||
SizeLws,
|
SizeLws,
|
||||||
@ -70,13 +71,13 @@ impl ChunkedState {
|
|||||||
|
|
||||||
match size.checked_mul(radix) {
|
match size.checked_mul(radix) {
|
||||||
Some(n) => {
|
Some(n) => {
|
||||||
*size = n as u64;
|
*size = n;
|
||||||
*size += rem as u64;
|
*size += rem as u64;
|
||||||
|
|
||||||
Poll::Ready(Ok(ChunkedState::Size))
|
Poll::Ready(Ok(ChunkedState::Size))
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
log::debug!("chunk size would overflow u64");
|
debug!("chunk size would overflow u64");
|
||||||
Poll::Ready(Err(io::Error::new(
|
Poll::Ready(Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidInput,
|
io::ErrorKind::InvalidInput,
|
||||||
"Invalid chunk size line: Size is too big",
|
"Invalid chunk size line: Size is too big",
|
||||||
@ -124,7 +125,7 @@ impl ChunkedState {
|
|||||||
rem: &mut u64,
|
rem: &mut u64,
|
||||||
buf: &mut Option<Bytes>,
|
buf: &mut Option<Bytes>,
|
||||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||||
log::trace!("Chunked read, remaining={:?}", rem);
|
trace!("Chunked read, remaining={:?}", rem);
|
||||||
|
|
||||||
let len = rdr.len() as u64;
|
let len = rdr.len() as u64;
|
||||||
if len == 0 {
|
if len == 0 {
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use std::io;
|
use std::{fmt, io};
|
||||||
|
|
||||||
use actix_codec::{Decoder, Encoder};
|
|
||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use http::{Method, Version};
|
use http::{Method, Version};
|
||||||
|
use tokio_util::codec::{Decoder, Encoder};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
||||||
@ -16,9 +16,10 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
struct Flags: u8 {
|
struct Flags: u8 {
|
||||||
const HEAD = 0b0000_0001;
|
const HEAD = 0b0000_0001;
|
||||||
const KEEPALIVE_ENABLED = 0b0000_1000;
|
const KEEP_ALIVE_ENABLED = 0b0000_1000;
|
||||||
const STREAM = 0b0001_0000;
|
const STREAM = 0b0001_0000;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -38,7 +39,7 @@ struct ClientCodecInner {
|
|||||||
decoder: decoder::MessageDecoder<ResponseHead>,
|
decoder: decoder::MessageDecoder<ResponseHead>,
|
||||||
payload: Option<PayloadDecoder>,
|
payload: Option<PayloadDecoder>,
|
||||||
version: Version,
|
version: Version,
|
||||||
ctype: ConnectionType,
|
conn_type: ConnectionType,
|
||||||
|
|
||||||
// encoder part
|
// encoder part
|
||||||
flags: Flags,
|
flags: Flags,
|
||||||
@ -51,23 +52,32 @@ impl Default for ClientCodec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for ClientCodec {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("h1::ClientCodec")
|
||||||
|
.field("flags", &self.inner.flags)
|
||||||
|
.finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ClientCodec {
|
impl ClientCodec {
|
||||||
/// Create HTTP/1 codec.
|
/// Create HTTP/1 codec.
|
||||||
///
|
///
|
||||||
/// `keepalive_enabled` how response `connection` header get generated.
|
/// `keepalive_enabled` how response `connection` header get generated.
|
||||||
pub fn new(config: ServiceConfig) -> Self {
|
pub fn new(config: ServiceConfig) -> Self {
|
||||||
let flags = if config.keep_alive_enabled() {
|
let flags = if config.keep_alive().enabled() {
|
||||||
Flags::KEEPALIVE_ENABLED
|
Flags::KEEP_ALIVE_ENABLED
|
||||||
} else {
|
} else {
|
||||||
Flags::empty()
|
Flags::empty()
|
||||||
};
|
};
|
||||||
|
|
||||||
ClientCodec {
|
ClientCodec {
|
||||||
inner: ClientCodecInner {
|
inner: ClientCodecInner {
|
||||||
config,
|
config,
|
||||||
decoder: decoder::MessageDecoder::default(),
|
decoder: decoder::MessageDecoder::default(),
|
||||||
payload: None,
|
payload: None,
|
||||||
version: Version::HTTP_11,
|
version: Version::HTTP_11,
|
||||||
ctype: ConnectionType::Close,
|
conn_type: ConnectionType::Close,
|
||||||
|
|
||||||
flags,
|
flags,
|
||||||
encoder: encoder::MessageEncoder::default(),
|
encoder: encoder::MessageEncoder::default(),
|
||||||
@ -77,12 +87,12 @@ impl ClientCodec {
|
|||||||
|
|
||||||
/// Check if request is upgrade
|
/// Check if request is upgrade
|
||||||
pub fn upgrade(&self) -> bool {
|
pub fn upgrade(&self) -> bool {
|
||||||
self.inner.ctype == ConnectionType::Upgrade
|
self.inner.conn_type == ConnectionType::Upgrade
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if last response is keep-alive
|
/// Check if last response is keep-alive
|
||||||
pub fn keepalive(&self) -> bool {
|
pub fn keep_alive(&self) -> bool {
|
||||||
self.inner.ctype == ConnectionType::KeepAlive
|
self.inner.conn_type == ConnectionType::KeepAlive
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check last request's message type
|
/// Check last request's message type
|
||||||
@ -104,8 +114,8 @@ impl ClientCodec {
|
|||||||
|
|
||||||
impl ClientPayloadCodec {
|
impl ClientPayloadCodec {
|
||||||
/// Check if last response is keep-alive
|
/// Check if last response is keep-alive
|
||||||
pub fn keepalive(&self) -> bool {
|
pub fn keep_alive(&self) -> bool {
|
||||||
self.inner.ctype == ConnectionType::KeepAlive
|
self.inner.conn_type == ConnectionType::KeepAlive
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Transform payload codec to a message codec
|
/// Transform payload codec to a message codec
|
||||||
@ -119,15 +129,18 @@ impl Decoder for ClientCodec {
|
|||||||
type Error = ParseError;
|
type Error = ParseError;
|
||||||
|
|
||||||
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
|
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
|
||||||
debug_assert!(!self.inner.payload.is_some(), "Payload decoder is set");
|
debug_assert!(
|
||||||
|
self.inner.payload.is_none(),
|
||||||
|
"Payload decoder should not be set"
|
||||||
|
);
|
||||||
|
|
||||||
if let Some((req, payload)) = self.inner.decoder.decode(src)? {
|
if let Some((req, payload)) = self.inner.decoder.decode(src)? {
|
||||||
if let Some(ctype) = req.conn_type() {
|
if let Some(conn_type) = req.conn_type() {
|
||||||
// do not use peer's keep-alive
|
// do not use peer's keep-alive
|
||||||
self.inner.ctype = if ctype == ConnectionType::KeepAlive {
|
self.inner.conn_type = if conn_type == ConnectionType::KeepAlive {
|
||||||
self.inner.ctype
|
self.inner.conn_type
|
||||||
} else {
|
} else {
|
||||||
ctype
|
conn_type
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -192,9 +205,9 @@ impl Encoder<Message<(RequestHeadType, BodySize)>> for ClientCodec {
|
|||||||
.set(Flags::HEAD, head.as_ref().method == Method::HEAD);
|
.set(Flags::HEAD, head.as_ref().method == Method::HEAD);
|
||||||
|
|
||||||
// connection status
|
// connection status
|
||||||
inner.ctype = match head.as_ref().connection_type() {
|
inner.conn_type = match head.as_ref().connection_type() {
|
||||||
ConnectionType::KeepAlive => {
|
ConnectionType::KeepAlive => {
|
||||||
if inner.flags.contains(Flags::KEEPALIVE_ENABLED) {
|
if inner.flags.contains(Flags::KEEP_ALIVE_ENABLED) {
|
||||||
ConnectionType::KeepAlive
|
ConnectionType::KeepAlive
|
||||||
} else {
|
} else {
|
||||||
ConnectionType::Close
|
ConnectionType::Close
|
||||||
@ -211,7 +224,7 @@ impl Encoder<Message<(RequestHeadType, BodySize)>> for ClientCodec {
|
|||||||
false,
|
false,
|
||||||
inner.version,
|
inner.version,
|
||||||
length,
|
length,
|
||||||
inner.ctype,
|
inner.conn_type,
|
||||||
&inner.config,
|
&inner.config,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
@ -1,22 +1,21 @@
|
|||||||
use std::{fmt, io};
|
use std::{fmt, io};
|
||||||
|
|
||||||
use actix_codec::{Decoder, Encoder};
|
|
||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use bytes::BytesMut;
|
use bytes::BytesMut;
|
||||||
use http::{Method, Version};
|
use http::{Method, Version};
|
||||||
|
use tokio_util::codec::{Decoder, Encoder};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
||||||
encoder, Message, MessageType,
|
encoder, Message, MessageType,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{body::BodySize, error::ParseError, ConnectionType, Request, Response, ServiceConfig};
|
||||||
body::BodySize, error::ParseError, ConnectionType, Request, Response, ServiceConfig,
|
|
||||||
};
|
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
struct Flags: u8 {
|
struct Flags: u8 {
|
||||||
const HEAD = 0b0000_0001;
|
const HEAD = 0b0000_0001;
|
||||||
const KEEPALIVE_ENABLED = 0b0000_0010;
|
const KEEP_ALIVE_ENABLED = 0b0000_0010;
|
||||||
const STREAM = 0b0000_0100;
|
const STREAM = 0b0000_0100;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -42,7 +41,9 @@ impl Default for Codec {
|
|||||||
|
|
||||||
impl fmt::Debug for Codec {
|
impl fmt::Debug for Codec {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "h1::Codec({:?})", self.flags)
|
f.debug_struct("h1::Codec")
|
||||||
|
.field("flags", &self.flags)
|
||||||
|
.finish_non_exhaustive()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,8 +52,8 @@ impl Codec {
|
|||||||
///
|
///
|
||||||
/// `keepalive_enabled` how response `connection` header get generated.
|
/// `keepalive_enabled` how response `connection` header get generated.
|
||||||
pub fn new(config: ServiceConfig) -> Self {
|
pub fn new(config: ServiceConfig) -> Self {
|
||||||
let flags = if config.keep_alive_enabled() {
|
let flags = if config.keep_alive().enabled() {
|
||||||
Flags::KEEPALIVE_ENABLED
|
Flags::KEEP_ALIVE_ENABLED
|
||||||
} else {
|
} else {
|
||||||
Flags::empty()
|
Flags::empty()
|
||||||
};
|
};
|
||||||
@ -76,14 +77,14 @@ impl Codec {
|
|||||||
|
|
||||||
/// Check if last response is keep-alive.
|
/// Check if last response is keep-alive.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn keepalive(&self) -> bool {
|
pub fn keep_alive(&self) -> bool {
|
||||||
self.conn_type == ConnectionType::KeepAlive
|
self.conn_type == ConnectionType::KeepAlive
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if keep-alive enabled on server level.
|
/// Check if keep-alive enabled on server level.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn keepalive_enabled(&self) -> bool {
|
pub fn keep_alive_enabled(&self) -> bool {
|
||||||
self.flags.contains(Flags::KEEPALIVE_ENABLED)
|
self.flags.contains(Flags::KEEP_ALIVE_ENABLED)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check last request's message type.
|
/// Check last request's message type.
|
||||||
@ -123,11 +124,13 @@ impl Decoder for Codec {
|
|||||||
self.flags.set(Flags::HEAD, head.method == Method::HEAD);
|
self.flags.set(Flags::HEAD, head.method == Method::HEAD);
|
||||||
self.version = head.version;
|
self.version = head.version;
|
||||||
self.conn_type = head.connection_type();
|
self.conn_type = head.connection_type();
|
||||||
|
|
||||||
if self.conn_type == ConnectionType::KeepAlive
|
if self.conn_type == ConnectionType::KeepAlive
|
||||||
&& !self.flags.contains(Flags::KEEPALIVE_ENABLED)
|
&& !self.flags.contains(Flags::KEEP_ALIVE_ENABLED)
|
||||||
{
|
{
|
||||||
self.conn_type = ConnectionType::Close
|
self.conn_type = ConnectionType::Close
|
||||||
}
|
}
|
||||||
|
|
||||||
match payload {
|
match payload {
|
||||||
PayloadType::None => self.payload = None,
|
PayloadType::None => self.payload = None,
|
||||||
PayloadType::Payload(pl) => self.payload = Some(pl),
|
PayloadType::Payload(pl) => self.payload = Some(pl),
|
||||||
@ -179,9 +182,11 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
|||||||
&self.config,
|
&self.config,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Message::Chunk(Some(bytes)) => {
|
Message::Chunk(Some(bytes)) => {
|
||||||
self.encoder.encode_chunk(bytes.as_ref(), dst)?;
|
self.encoder.encode_chunk(bytes.as_ref(), dst)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Message::Chunk(None) => {
|
Message::Chunk(None) => {
|
||||||
self.encoder.encode_eof(dst)?;
|
self.encoder.encode_eof(dst)?;
|
||||||
}
|
}
|
||||||
@ -193,9 +198,6 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use bytes::BytesMut;
|
|
||||||
use http::Method;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::HttpMessage as _;
|
use crate::HttpMessage as _;
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll};
|
use std::{io, marker::PhantomData, mem::MaybeUninit, task::Poll};
|
||||||
|
|
||||||
use actix_codec::Decoder;
|
use actix_codec::Decoder;
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
@ -6,7 +6,7 @@ use http::{
|
|||||||
header::{self, HeaderName, HeaderValue},
|
header::{self, HeaderName, HeaderValue},
|
||||||
Method, StatusCode, Uri, Version,
|
Method, StatusCode, Uri, Version,
|
||||||
};
|
};
|
||||||
use log::{debug, error, trace};
|
use tracing::{debug, error, trace};
|
||||||
|
|
||||||
use super::chunked::ChunkedState;
|
use super::chunked::ChunkedState;
|
||||||
use crate::{error::ParseError, header::HeaderMap, ConnectionType, Request, ResponseHead};
|
use crate::{error::ParseError, header::HeaderMap, ConnectionType, Request, ResponseHead};
|
||||||
@ -46,6 +46,23 @@ pub(crate) enum PayloadLength {
|
|||||||
None,
|
None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PayloadLength {
|
||||||
|
/// Returns true if variant is `None`.
|
||||||
|
fn is_none(&self) -> bool {
|
||||||
|
matches!(self, Self::None)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if variant is represents zero-length (not none) payload.
|
||||||
|
fn is_zero(&self) -> bool {
|
||||||
|
matches!(
|
||||||
|
self,
|
||||||
|
PayloadLength::Payload(PayloadType::Payload(PayloadDecoder {
|
||||||
|
kind: Kind::Length(0)
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) trait MessageType: Sized {
|
pub(crate) trait MessageType: Sized {
|
||||||
fn set_connection_type(&mut self, conn_type: Option<ConnectionType>);
|
fn set_connection_type(&mut self, conn_type: Option<ConnectionType>);
|
||||||
|
|
||||||
@ -59,6 +76,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
&mut self,
|
&mut self,
|
||||||
slice: &Bytes,
|
slice: &Bytes,
|
||||||
raw_headers: &[HeaderIndex],
|
raw_headers: &[HeaderIndex],
|
||||||
|
version: Version,
|
||||||
) -> Result<PayloadLength, ParseError> {
|
) -> Result<PayloadLength, ParseError> {
|
||||||
let mut ka = None;
|
let mut ka = None;
|
||||||
let mut has_upgrade_websocket = false;
|
let mut has_upgrade_websocket = false;
|
||||||
@ -76,9 +94,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
// SAFETY: httparse already checks header value is only visible ASCII bytes
|
// SAFETY: httparse already checks header value is only visible ASCII bytes
|
||||||
// from_maybe_shared_unchecked contains debug assertions so they are omitted here
|
// from_maybe_shared_unchecked contains debug assertions so they are omitted here
|
||||||
let value = unsafe {
|
let value = unsafe {
|
||||||
HeaderValue::from_maybe_shared_unchecked(
|
HeaderValue::from_maybe_shared_unchecked(slice.slice(idx.value.0..idx.value.1))
|
||||||
slice.slice(idx.value.0..idx.value.1),
|
|
||||||
)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
match name {
|
match name {
|
||||||
@ -87,21 +103,23 @@ pub(crate) trait MessageType: Sized {
|
|||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
|
|
||||||
header::CONTENT_LENGTH => match value.to_str() {
|
header::CONTENT_LENGTH => match value.to_str().map(str::trim) {
|
||||||
Ok(s) if s.trim().starts_with('+') => {
|
Ok(val) if val.starts_with('+') => {
|
||||||
debug!("illegal Content-Length: {:?}", s);
|
debug!("illegal Content-Length: {:?}", val);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
Ok(s) => {
|
|
||||||
if let Ok(len) = s.parse::<u64>() {
|
Ok(val) => {
|
||||||
if len != 0 {
|
if let Ok(len) = val.parse::<u64>() {
|
||||||
|
// accept 0 lengths here and remove them in `decode` after all
|
||||||
|
// headers have been processed to prevent request smuggling issues
|
||||||
content_length = Some(len);
|
content_length = Some(len);
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
debug!("illegal Content-Length: {:?}", s);
|
debug!("illegal Content-Length: {:?}", val);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
debug!("illegal Content-Length: {:?}", value);
|
debug!("illegal Content-Length: {:?}", value);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
@ -114,22 +132,23 @@ pub(crate) trait MessageType: Sized {
|
|||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
|
|
||||||
header::TRANSFER_ENCODING => {
|
header::TRANSFER_ENCODING if version == Version::HTTP_11 => {
|
||||||
seen_te = true;
|
seen_te = true;
|
||||||
|
|
||||||
if let Ok(s) = value.to_str().map(str::trim) {
|
if let Ok(val) = value.to_str().map(str::trim) {
|
||||||
if s.eq_ignore_ascii_case("chunked") {
|
if val.eq_ignore_ascii_case("chunked") {
|
||||||
chunked = true;
|
chunked = true;
|
||||||
} else if s.eq_ignore_ascii_case("identity") {
|
} else if val.eq_ignore_ascii_case("identity") {
|
||||||
// allow silently since multiple TE headers are already checked
|
// allow silently since multiple TE headers are already checked
|
||||||
} else {
|
} else {
|
||||||
debug!("illegal Transfer-Encoding: {:?}", s);
|
debug!("illegal Transfer-Encoding: {:?}", val);
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// connection keep-alive state
|
// connection keep-alive state
|
||||||
header::CONNECTION => {
|
header::CONNECTION => {
|
||||||
ka = if let Ok(conn) = value.to_str().map(str::trim) {
|
ka = if let Ok(conn) = value.to_str().map(str::trim) {
|
||||||
@ -146,6 +165,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
header::UPGRADE => {
|
header::UPGRADE => {
|
||||||
if let Ok(val) = value.to_str().map(str::trim) {
|
if let Ok(val) = value.to_str().map(str::trim) {
|
||||||
if val.eq_ignore_ascii_case("websocket") {
|
if val.eq_ignore_ascii_case("websocket") {
|
||||||
@ -153,19 +173,23 @@ pub(crate) trait MessageType: Sized {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
header::EXPECT => {
|
header::EXPECT => {
|
||||||
let bytes = value.as_bytes();
|
let bytes = value.as_bytes();
|
||||||
if bytes.len() >= 4 && &bytes[0..4] == b"100-" {
|
if bytes.len() >= 4 && &bytes[0..4] == b"100-" {
|
||||||
expect = true;
|
expect = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
headers.append(name, value);
|
headers.append(name, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.set_connection_type(ka);
|
self.set_connection_type(ka);
|
||||||
|
|
||||||
if expect {
|
if expect {
|
||||||
self.set_expect()
|
self.set_expect()
|
||||||
}
|
}
|
||||||
@ -209,15 +233,16 @@ impl MessageType for Request {
|
|||||||
|
|
||||||
let (len, method, uri, ver, h_len) = {
|
let (len, method, uri, ver, h_len) = {
|
||||||
// SAFETY:
|
// SAFETY:
|
||||||
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
|
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is safe because the
|
||||||
// safe because the type we are claiming to have initialized here is a
|
// type we are claiming to have initialized here is a bunch of `MaybeUninit`s, which
|
||||||
// bunch of `MaybeUninit`s, which do not require initialization.
|
// do not require initialization.
|
||||||
let mut parsed = unsafe {
|
let mut parsed = unsafe {
|
||||||
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||||
.assume_init()
|
.assume_init()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut req = httparse::Request::new(&mut []);
|
let mut req = httparse::Request::new(&mut []);
|
||||||
|
|
||||||
match req.parse_with_uninit_headers(src, &mut parsed)? {
|
match req.parse_with_uninit_headers(src, &mut parsed)? {
|
||||||
httparse::Status::Complete(len) => {
|
httparse::Status::Complete(len) => {
|
||||||
let method = Method::from_bytes(req.method.unwrap().as_bytes())
|
let method = Method::from_bytes(req.method.unwrap().as_bytes())
|
||||||
@ -232,6 +257,7 @@ impl MessageType for Request {
|
|||||||
|
|
||||||
(len, method, uri, version, req.headers.len())
|
(len, method, uri, version, req.headers.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
httparse::Status::Partial => {
|
httparse::Status::Partial => {
|
||||||
return if src.len() >= MAX_BUFFER_SIZE {
|
return if src.len() >= MAX_BUFFER_SIZE {
|
||||||
trace!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
trace!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
||||||
@ -247,7 +273,21 @@ impl MessageType for Request {
|
|||||||
let mut msg = Request::new();
|
let mut msg = Request::new();
|
||||||
|
|
||||||
// convert headers
|
// convert headers
|
||||||
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
|
let mut length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||||
|
|
||||||
|
// disallow HTTP/1.0 POST requests that do not contain a Content-Length headers
|
||||||
|
// see https://datatracker.ietf.org/doc/html/rfc1945#section-7.2.2
|
||||||
|
if ver == Version::HTTP_10 && method == Method::POST && length.is_none() {
|
||||||
|
debug!("no Content-Length specified for HTTP/1.0 POST request");
|
||||||
|
return Err(ParseError::Header);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
||||||
|
// Protects against some request smuggling attacks.
|
||||||
|
// See https://github.com/actix/actix-web/issues/2767.
|
||||||
|
if length.is_zero() {
|
||||||
|
length = PayloadLength::None;
|
||||||
|
}
|
||||||
|
|
||||||
// payload decoder
|
// payload decoder
|
||||||
let decoder = match length {
|
let decoder = match length {
|
||||||
@ -291,22 +331,35 @@ impl MessageType for ResponseHead {
|
|||||||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||||
|
|
||||||
let (len, ver, status, h_len) = {
|
let (len, ver, status, h_len) = {
|
||||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
// SAFETY:
|
||||||
|
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is safe because the
|
||||||
|
// type we are claiming to have initialized here is a bunch of `MaybeUninit`s, which
|
||||||
|
// do not require initialization.
|
||||||
|
let mut parsed = unsafe {
|
||||||
|
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||||
|
.assume_init()
|
||||||
|
};
|
||||||
|
|
||||||
let mut res = httparse::Response::new(&mut parsed);
|
let mut res = httparse::Response::new(&mut []);
|
||||||
match res.parse(src)? {
|
|
||||||
|
let mut config = httparse::ParserConfig::default();
|
||||||
|
config.allow_spaces_after_header_name_in_responses(true);
|
||||||
|
|
||||||
|
match config.parse_response_with_uninit_headers(&mut res, src, &mut parsed)? {
|
||||||
httparse::Status::Complete(len) => {
|
httparse::Status::Complete(len) => {
|
||||||
let version = if res.version.unwrap() == 1 {
|
let version = if res.version.unwrap() == 1 {
|
||||||
Version::HTTP_11
|
Version::HTTP_11
|
||||||
} else {
|
} else {
|
||||||
Version::HTTP_10
|
Version::HTTP_10
|
||||||
};
|
};
|
||||||
let status = StatusCode::from_u16(res.code.unwrap())
|
|
||||||
.map_err(|_| ParseError::Status)?;
|
let status =
|
||||||
|
StatusCode::from_u16(res.code.unwrap()).map_err(|_| ParseError::Status)?;
|
||||||
HeaderIndex::record(src, res.headers, &mut headers);
|
HeaderIndex::record(src, res.headers, &mut headers);
|
||||||
|
|
||||||
(len, version, status, res.headers.len())
|
(len, version, status, res.headers.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
httparse::Status::Partial => {
|
httparse::Status::Partial => {
|
||||||
return if src.len() >= MAX_BUFFER_SIZE {
|
return if src.len() >= MAX_BUFFER_SIZE {
|
||||||
error!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
error!("MAX_BUFFER_SIZE unprocessed data reached, closing");
|
||||||
@ -322,7 +375,14 @@ impl MessageType for ResponseHead {
|
|||||||
msg.version = ver;
|
msg.version = ver;
|
||||||
|
|
||||||
// convert headers
|
// convert headers
|
||||||
let length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len])?;
|
let mut length = msg.set_headers(&src.split_to(len).freeze(), &headers[..h_len], ver)?;
|
||||||
|
|
||||||
|
// Remove CL value if 0 now that all headers and HTTP/1.0 special cases are processed.
|
||||||
|
// Protects against some request smuggling attacks.
|
||||||
|
// See https://github.com/actix/actix-web/issues/2767.
|
||||||
|
if length.is_zero() {
|
||||||
|
length = PayloadLength::None;
|
||||||
|
}
|
||||||
|
|
||||||
// message payload
|
// message payload
|
||||||
let decoder = if let PayloadLength::Payload(pl) = length {
|
let decoder = if let PayloadLength::Payload(pl) = length {
|
||||||
@ -358,9 +418,6 @@ pub(crate) const EMPTY_HEADER_INDEX: HeaderIndex = HeaderIndex {
|
|||||||
pub(crate) const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
|
pub(crate) const EMPTY_HEADER_INDEX_ARRAY: [HeaderIndex; MAX_HEADERS] =
|
||||||
[EMPTY_HEADER_INDEX; MAX_HEADERS];
|
[EMPTY_HEADER_INDEX; MAX_HEADERS];
|
||||||
|
|
||||||
pub(crate) const EMPTY_HEADER_ARRAY: [httparse::Header<'static>; MAX_HEADERS] =
|
|
||||||
[httparse::EMPTY_HEADER; MAX_HEADERS];
|
|
||||||
|
|
||||||
impl HeaderIndex {
|
impl HeaderIndex {
|
||||||
pub(crate) fn record(
|
pub(crate) fn record(
|
||||||
bytes: &[u8],
|
bytes: &[u8],
|
||||||
@ -379,61 +436,64 @@ impl HeaderIndex {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
/// Http payload item
|
/// Chunk type yielded while decoding a payload.
|
||||||
pub enum PayloadItem {
|
pub enum PayloadItem {
|
||||||
Chunk(Bytes),
|
Chunk(Bytes),
|
||||||
Eof,
|
Eof,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decoders to handle different Transfer-Encodings.
|
/// Decoder that can handle different payload types.
|
||||||
///
|
///
|
||||||
/// If a message body does not include a Transfer-Encoding, it *should*
|
/// If a message body does not use `Transfer-Encoding`, it should include a `Content-Length`.
|
||||||
/// include a Content-Length header.
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct PayloadDecoder {
|
pub struct PayloadDecoder {
|
||||||
kind: Kind,
|
kind: Kind,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PayloadDecoder {
|
impl PayloadDecoder {
|
||||||
|
/// Constructs a fixed-length payload decoder.
|
||||||
pub fn length(x: u64) -> PayloadDecoder {
|
pub fn length(x: u64) -> PayloadDecoder {
|
||||||
PayloadDecoder {
|
PayloadDecoder {
|
||||||
kind: Kind::Length(x),
|
kind: Kind::Length(x),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Constructs a chunked encoding decoder.
|
||||||
pub fn chunked() -> PayloadDecoder {
|
pub fn chunked() -> PayloadDecoder {
|
||||||
PayloadDecoder {
|
PayloadDecoder {
|
||||||
kind: Kind::Chunked(ChunkedState::Size, 0),
|
kind: Kind::Chunked(ChunkedState::Size, 0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates an decoder that yields chunks until the stream returns EOF.
|
||||||
pub fn eof() -> PayloadDecoder {
|
pub fn eof() -> PayloadDecoder {
|
||||||
PayloadDecoder { kind: Kind::Eof }
|
PayloadDecoder { kind: Kind::Eof }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
enum Kind {
|
enum Kind {
|
||||||
/// A Reader used when a Content-Length header is passed with a positive
|
/// A reader used when a `Content-Length` header is passed with a positive integer.
|
||||||
/// integer.
|
|
||||||
Length(u64),
|
Length(u64),
|
||||||
/// A Reader used when Transfer-Encoding is `chunked`.
|
|
||||||
|
/// A reader used when `Transfer-Encoding` is `chunked`.
|
||||||
Chunked(ChunkedState, u64),
|
Chunked(ChunkedState, u64),
|
||||||
/// A Reader used for responses that don't indicate a length or chunked.
|
|
||||||
|
/// A reader used for responses that don't indicate a length or chunked.
|
||||||
///
|
///
|
||||||
/// Note: This should only used for `Response`s. It is illegal for a
|
/// Note: This should only used for `Response`s. It is illegal for a `Request` to be made
|
||||||
/// `Request` to be made with both `Content-Length` and
|
/// without either of `Content-Length` and `Transfer-Encoding: chunked` missing, as explained
|
||||||
/// `Transfer-Encoding: chunked` missing, as explained from the spec:
|
/// in [RFC 7230 §3.3.3]:
|
||||||
///
|
///
|
||||||
/// > If a Transfer-Encoding header field is present in a response and
|
/// > If a Transfer-Encoding header field is present in a response and the chunked transfer
|
||||||
/// > the chunked transfer coding is not the final encoding, the
|
/// > coding is not the final encoding, the message body length is determined by reading the
|
||||||
/// > message body length is determined by reading the connection until
|
/// > connection until it is closed by the server. If a Transfer-Encoding header field is
|
||||||
/// > it is closed by the server. If a Transfer-Encoding header field
|
/// > present in a request and the chunked transfer coding is not the final encoding, the
|
||||||
/// > is present in a request and the chunked transfer coding is not
|
/// > message body length cannot be determined reliably; the server MUST respond with the 400
|
||||||
/// > the final encoding, the message body length cannot be determined
|
/// > (Bad Request) status code and then close the connection.
|
||||||
/// > reliably; the server MUST respond with the 400 (Bad Request)
|
///
|
||||||
/// > status code and then close the connection.
|
/// [RFC 7230 §3.3.3]: https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.3
|
||||||
Eof,
|
Eof,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -463,6 +523,7 @@ impl Decoder for PayloadDecoder {
|
|||||||
Ok(Some(PayloadItem::Chunk(buf)))
|
Ok(Some(PayloadItem::Chunk(buf)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Kind::Chunked(ref mut state, ref mut size) => {
|
Kind::Chunked(ref mut state, ref mut size) => {
|
||||||
loop {
|
loop {
|
||||||
let mut buf = None;
|
let mut buf = None;
|
||||||
@ -471,7 +532,7 @@ impl Decoder for PayloadDecoder {
|
|||||||
*state = match state.step(src, size, &mut buf) {
|
*state = match state.step(src, size, &mut buf) {
|
||||||
Poll::Pending => return Ok(None),
|
Poll::Pending => return Ok(None),
|
||||||
Poll::Ready(Ok(state)) => state,
|
Poll::Ready(Ok(state)) => state,
|
||||||
Poll::Ready(Err(e)) => return Err(e),
|
Poll::Ready(Err(err)) => return Err(err),
|
||||||
};
|
};
|
||||||
|
|
||||||
if *state == ChunkedState::End {
|
if *state == ChunkedState::End {
|
||||||
@ -488,6 +549,7 @@ impl Decoder for PayloadDecoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Kind::Eof => {
|
Kind::Eof => {
|
||||||
if src.is_empty() {
|
if src.is_empty() {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
@ -501,15 +563,8 @@ impl Decoder for PayloadDecoder {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use bytes::{Bytes, BytesMut};
|
|
||||||
use http::{Method, Version};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{header::SET_COOKIE, HttpMessage as _};
|
||||||
error::ParseError,
|
|
||||||
header::{HeaderName, SET_COOKIE},
|
|
||||||
HttpMessage as _,
|
|
||||||
};
|
|
||||||
|
|
||||||
impl PayloadType {
|
impl PayloadType {
|
||||||
pub(crate) fn unwrap(self) -> PayloadDecoder {
|
pub(crate) fn unwrap(self) -> PayloadDecoder {
|
||||||
@ -589,14 +644,100 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_post() {
|
fn parse_h09_reject() {
|
||||||
let mut buf = BytesMut::from("POST /test2 HTTP/1.0\r\n\r\n");
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test1 HTTP/0.9\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
reader.decode(&mut buf).unwrap_err();
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test2 HTTP/0.9\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
\r\n
|
||||||
|
abc",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
reader.decode(&mut buf).unwrap_err();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_h10_get() {
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test1 HTTP/1.0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::GET);
|
||||||
|
assert_eq!(req.path(), "/test1");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test2 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::GET);
|
||||||
|
assert_eq!(req.path(), "/test2");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET /test3 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
\r\n
|
||||||
|
abc",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::GET);
|
||||||
|
assert_eq!(req.path(), "/test3");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_h10_post() {
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test1 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
\r\n\
|
||||||
|
abc",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
|
assert_eq!(*req.method(), Method::POST);
|
||||||
|
assert_eq!(req.path(), "/test1");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test2 HTTP/1.0\r\n\
|
||||||
|
Content-Length: 0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
let mut reader = MessageDecoder::<Request>::default();
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
assert_eq!(req.version(), Version::HTTP_10);
|
assert_eq!(req.version(), Version::HTTP_10);
|
||||||
assert_eq!(*req.method(), Method::POST);
|
assert_eq!(*req.method(), Method::POST);
|
||||||
assert_eq!(req.path(), "/test2");
|
assert_eq!(req.path(), "/test2");
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"POST /test3 HTTP/1.0\r\n\
|
||||||
|
\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
|
let err = reader.decode(&mut buf).unwrap_err();
|
||||||
|
assert!(err.to_string().contains("Header"))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -692,121 +833,98 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_default_1_0() {
|
fn test_conn_default_1_0() {
|
||||||
let mut buf = BytesMut::from("GET /test HTTP/1.0\r\n\r\n");
|
let req = parse_ready!(&mut BytesMut::from("GET /test HTTP/1.0\r\n\r\n"));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_default_1_1() {
|
fn test_conn_default_1_1() {
|
||||||
let mut buf = BytesMut::from("GET /test HTTP/1.1\r\n\r\n");
|
let req = parse_ready!(&mut BytesMut::from("GET /test HTTP/1.1\r\n\r\n"));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_close() {
|
fn test_conn_close() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
connection: close\r\n\r\n",
|
connection: close\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||||
|
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
connection: Close\r\n\r\n",
|
connection: Close\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_close_1_0() {
|
fn test_conn_close_1_0() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.0\r\n\
|
"GET /test HTTP/1.0\r\n\
|
||||||
connection: close\r\n\r\n",
|
connection: close\r\n\r\n",
|
||||||
);
|
));
|
||||||
|
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_keep_alive_1_0() {
|
fn test_conn_keep_alive_1_0() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.0\r\n\
|
"GET /test HTTP/1.0\r\n\
|
||||||
connection: keep-alive\r\n\r\n",
|
connection: keep-alive\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||||
|
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.0\r\n\
|
"GET /test HTTP/1.0\r\n\
|
||||||
connection: Keep-Alive\r\n\r\n",
|
connection: Keep-Alive\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_keep_alive_1_1() {
|
fn test_conn_keep_alive_1_1() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
connection: keep-alive\r\n\r\n",
|
connection: keep-alive\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_other_1_0() {
|
fn test_conn_other_1_0() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.0\r\n\
|
"GET /test HTTP/1.0\r\n\
|
||||||
connection: other\r\n\r\n",
|
connection: other\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
assert_eq!(req.head().connection_type(), ConnectionType::Close);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_other_1_1() {
|
fn test_conn_other_1_1() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
connection: other\r\n\r\n",
|
connection: other\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
assert_eq!(req.head().connection_type(), ConnectionType::KeepAlive);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_upgrade() {
|
fn test_conn_upgrade() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
upgrade: websockets\r\n\
|
upgrade: websockets\r\n\
|
||||||
connection: upgrade\r\n\r\n",
|
connection: upgrade\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert!(req.upgrade());
|
assert!(req.upgrade());
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
|
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
|
||||||
|
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
upgrade: Websockets\r\n\
|
upgrade: Websockets\r\n\
|
||||||
connection: Upgrade\r\n\r\n",
|
connection: Upgrade\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert!(req.upgrade());
|
assert!(req.upgrade());
|
||||||
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
|
assert_eq!(req.head().connection_type(), ConnectionType::Upgrade);
|
||||||
@ -814,59 +932,62 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conn_upgrade_connect_method() {
|
fn test_conn_upgrade_connect_method() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"CONNECT /test HTTP/1.1\r\n\
|
"CONNECT /test HTTP/1.1\r\n\
|
||||||
content-type: text/plain\r\n\r\n",
|
content-type: text/plain\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert!(req.upgrade());
|
assert!(req.upgrade());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_headers_content_length_err_1() {
|
fn test_headers_bad_content_length() {
|
||||||
let mut buf = BytesMut::from(
|
// string CL
|
||||||
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
content-length: line\r\n\r\n",
|
content-length: line\r\n\r\n",
|
||||||
);
|
));
|
||||||
|
|
||||||
expect_parse_err!(&mut buf)
|
// negative CL
|
||||||
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
|
"GET /test HTTP/1.1\r\n\
|
||||||
|
content-length: -1\r\n\r\n",
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_headers_content_length_err_2() {
|
fn octal_ish_cl_parsed_as_decimal() {
|
||||||
let mut buf = BytesMut::from(
|
let mut buf = BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"POST /test HTTP/1.1\r\n\
|
||||||
content-length: -1\r\n\r\n",
|
content-length: 011\r\n\r\n",
|
||||||
);
|
);
|
||||||
|
let mut reader = MessageDecoder::<Request>::default();
|
||||||
expect_parse_err!(&mut buf);
|
let (_req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||||
|
assert!(matches!(
|
||||||
|
pl,
|
||||||
|
PayloadType::Payload(pl) if pl == PayloadDecoder::length(11)
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_invalid_header() {
|
fn test_invalid_header() {
|
||||||
let mut buf = BytesMut::from(
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
test line\r\n\r\n",
|
test line\r\n\r\n",
|
||||||
);
|
));
|
||||||
|
|
||||||
expect_parse_err!(&mut buf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_invalid_name() {
|
fn test_invalid_name() {
|
||||||
let mut buf = BytesMut::from(
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
test[]: line\r\n\r\n",
|
test[]: line\r\n\r\n",
|
||||||
);
|
));
|
||||||
|
|
||||||
expect_parse_err!(&mut buf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_http_request_bad_status_line() {
|
fn test_http_request_bad_status_line() {
|
||||||
let mut buf = BytesMut::from("getpath \r\n\r\n");
|
expect_parse_err!(&mut BytesMut::from("getpath \r\n\r\n"));
|
||||||
expect_parse_err!(&mut buf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -906,11 +1027,10 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_http_request_parser_utf8() {
|
fn test_http_request_parser_utf8() {
|
||||||
let mut buf = BytesMut::from(
|
let req = parse_ready!(&mut BytesMut::from(
|
||||||
"GET /test HTTP/1.1\r\n\
|
"GET /test HTTP/1.1\r\n\
|
||||||
x-test: тест\r\n\r\n",
|
x-test: тест\r\n\r\n",
|
||||||
);
|
));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
req.headers().get("x-test").unwrap().as_bytes(),
|
req.headers().get("x-test").unwrap().as_bytes(),
|
||||||
@ -920,24 +1040,18 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_http_request_parser_two_slashes() {
|
fn test_http_request_parser_two_slashes() {
|
||||||
let mut buf = BytesMut::from("GET //path HTTP/1.1\r\n\r\n");
|
let req = parse_ready!(&mut BytesMut::from("GET //path HTTP/1.1\r\n\r\n"));
|
||||||
let req = parse_ready!(&mut buf);
|
|
||||||
|
|
||||||
assert_eq!(req.path(), "//path");
|
assert_eq!(req.path(), "//path");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_http_request_parser_bad_method() {
|
fn test_http_request_parser_bad_method() {
|
||||||
let mut buf = BytesMut::from("!12%()+=~$ /get HTTP/1.1\r\n\r\n");
|
expect_parse_err!(&mut BytesMut::from("!12%()+=~$ /get HTTP/1.1\r\n\r\n"));
|
||||||
|
|
||||||
expect_parse_err!(&mut buf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_http_request_parser_bad_version() {
|
fn test_http_request_parser_bad_version() {
|
||||||
let mut buf = BytesMut::from("GET //get HT/11\r\n\r\n");
|
expect_parse_err!(&mut BytesMut::from("GET //get HT/11\r\n\r\n"));
|
||||||
|
|
||||||
expect_parse_err!(&mut buf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -954,29 +1068,66 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn hrs_multiple_content_length() {
|
fn hrs_multiple_content_length() {
|
||||||
let mut buf = BytesMut::from(
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
"GET / HTTP/1.1\r\n\
|
"GET / HTTP/1.1\r\n\
|
||||||
Host: example.com\r\n\
|
Host: example.com\r\n\
|
||||||
Content-Length: 4\r\n\
|
Content-Length: 4\r\n\
|
||||||
Content-Length: 2\r\n\
|
Content-Length: 2\r\n\
|
||||||
\r\n\
|
\r\n\
|
||||||
abcd",
|
abcd",
|
||||||
);
|
));
|
||||||
|
|
||||||
expect_parse_err!(&mut buf);
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
|
"GET / HTTP/1.1\r\n\
|
||||||
|
Host: example.com\r\n\
|
||||||
|
Content-Length: 0\r\n\
|
||||||
|
Content-Length: 2\r\n\
|
||||||
|
\r\n\
|
||||||
|
ab",
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn hrs_content_length_plus() {
|
fn hrs_content_length_plus() {
|
||||||
let mut buf = BytesMut::from(
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
"GET / HTTP/1.1\r\n\
|
"GET / HTTP/1.1\r\n\
|
||||||
Host: example.com\r\n\
|
Host: example.com\r\n\
|
||||||
Content-Length: +3\r\n\
|
Content-Length: +3\r\n\
|
||||||
\r\n\
|
\r\n\
|
||||||
000",
|
000",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hrs_te_http10() {
|
||||||
|
// in HTTP/1.0 transfer encoding is ignored and must therefore contain a CL header
|
||||||
|
|
||||||
|
expect_parse_err!(&mut BytesMut::from(
|
||||||
|
"POST / HTTP/1.0\r\n\
|
||||||
|
Host: example.com\r\n\
|
||||||
|
Transfer-Encoding: chunked\r\n\
|
||||||
|
\r\n\
|
||||||
|
3\r\n\
|
||||||
|
aaa\r\n\
|
||||||
|
0\r\n\
|
||||||
|
",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hrs_cl_and_te_http10() {
|
||||||
|
// in HTTP/1.0 transfer encoding is simply ignored so it's fine to have both
|
||||||
|
|
||||||
|
let mut buf = BytesMut::from(
|
||||||
|
"GET / HTTP/1.0\r\n\
|
||||||
|
Host: example.com\r\n\
|
||||||
|
Content-Length: 3\r\n\
|
||||||
|
Transfer-Encoding: chunked\r\n\
|
||||||
|
\r\n\
|
||||||
|
000",
|
||||||
);
|
);
|
||||||
|
|
||||||
expect_parse_err!(&mut buf);
|
parse_ready!(&mut buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
File diff suppressed because it is too large
Load Diff
972
actix-http/src/h1/dispatcher_tests.rs
Normal file
972
actix-http/src/h1/dispatcher_tests.rs
Normal file
@ -0,0 +1,972 @@
|
|||||||
|
use std::{future::Future, str, task::Poll, time::Duration};
|
||||||
|
|
||||||
|
use actix_codec::Framed;
|
||||||
|
use actix_rt::{pin, time::sleep};
|
||||||
|
use actix_service::{fn_service, Service};
|
||||||
|
use actix_utils::future::{ready, Ready};
|
||||||
|
use bytes::{Buf, Bytes, BytesMut};
|
||||||
|
use futures_util::future::lazy;
|
||||||
|
|
||||||
|
use super::dispatcher::{Dispatcher, DispatcherState, DispatcherStateProj, Flags};
|
||||||
|
use crate::{
|
||||||
|
body::MessageBody,
|
||||||
|
config::ServiceConfig,
|
||||||
|
h1::{Codec, ExpectHandler, UpgradeHandler},
|
||||||
|
service::HttpFlow,
|
||||||
|
test::{TestBuffer, TestSeqBuffer},
|
||||||
|
Error, HttpMessage, KeepAlive, Method, OnConnectData, Request, Response, StatusCode,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn find_slice(haystack: &[u8], needle: &[u8], from: usize) -> Option<usize> {
|
||||||
|
memchr::memmem::find(&haystack[from..], needle)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stabilize_date_header(payload: &mut [u8]) {
|
||||||
|
let mut from = 0;
|
||||||
|
while let Some(pos) = find_slice(payload, b"date", from) {
|
||||||
|
payload[(from + pos)..(from + pos + 35)]
|
||||||
|
.copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC");
|
||||||
|
from += 35;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ok_service() -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {
|
||||||
|
status_service(StatusCode::OK)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn status_service(
|
||||||
|
status: StatusCode,
|
||||||
|
) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {
|
||||||
|
fn_service(move |_req: Request| ready(Ok::<_, Error>(Response::new(status))))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn echo_path_service() -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error>
|
||||||
|
{
|
||||||
|
fn_service(|req: Request| {
|
||||||
|
let path = req.path().as_bytes();
|
||||||
|
ready(Ok::<_, Error>(
|
||||||
|
Response::ok().set_body(Bytes::copy_from_slice(path)),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn drop_payload_service() -> impl Service<Request, Response = Response<&'static str>, Error = Error>
|
||||||
|
{
|
||||||
|
fn_service(|mut req: Request| async move {
|
||||||
|
let _ = req.take_payload();
|
||||||
|
Ok::<_, Error>(Response::with_body(StatusCode::OK, "payload dropped"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn echo_payload_service() -> impl Service<Request, Response = Response<Bytes>, Error = Error> {
|
||||||
|
fn_service(|mut req: Request| {
|
||||||
|
Box::pin(async move {
|
||||||
|
use futures_util::StreamExt as _;
|
||||||
|
|
||||||
|
let mut pl = req.take_payload();
|
||||||
|
let mut body = BytesMut::new();
|
||||||
|
while let Some(chunk) = pl.next().await {
|
||||||
|
body.extend_from_slice(chunk.unwrap().chunk())
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok::<_, Error>(Response::ok().set_body(body.freeze()))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn late_request() {
|
||||||
|
let mut buf = TestBuffer::empty();
|
||||||
|
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Disabled,
|
||||||
|
Duration::from_millis(100),
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let services = HttpFlow::new(ok_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
match h1.as_mut().poll(cx) {
|
||||||
|
Poll::Ready(_) => panic!("first poll should not be ready"),
|
||||||
|
Poll::Pending => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// polls: initial
|
||||||
|
assert_eq!(h1.poll_count, 1);
|
||||||
|
|
||||||
|
buf.extend_read_buf("GET /abcd HTTP/1.1\r\nConnection: close\r\n\r\n");
|
||||||
|
|
||||||
|
match h1.as_mut().poll(cx) {
|
||||||
|
Poll::Pending => panic!("second poll should not be pending"),
|
||||||
|
Poll::Ready(res) => assert!(res.is_ok()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// polls: initial pending => handle req => shutdown
|
||||||
|
assert_eq!(h1.poll_count, 3);
|
||||||
|
|
||||||
|
let mut res = buf.take_write_buf().to_vec();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = b"\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 0\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn oneshot_connection() {
|
||||||
|
let buf = TestBuffer::new("GET /abcd HTTP/1.1\r\n\r\n");
|
||||||
|
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Disabled,
|
||||||
|
Duration::from_millis(100),
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let services = HttpFlow::new(echo_path_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
match h1.as_mut().poll(cx) {
|
||||||
|
Poll::Pending => panic!("first poll should not be pending"),
|
||||||
|
Poll::Ready(res) => assert!(res.is_ok()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// polls: initial => shutdown
|
||||||
|
assert_eq!(h1.poll_count, 2);
|
||||||
|
|
||||||
|
let mut res = buf.take_write_buf().to_vec();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = http_msg(
|
||||||
|
r"
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
content-length: 5
|
||||||
|
connection: close
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||||
|
|
||||||
|
/abcd
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(&exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn keep_alive_timeout() {
|
||||||
|
let buf = TestBuffer::new("GET /abcd HTTP/1.1\r\n\r\n");
|
||||||
|
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Timeout(Duration::from_millis(200)),
|
||||||
|
Duration::from_millis(100),
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let services = HttpFlow::new(echo_path_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
h1.as_mut().poll(cx).is_pending(),
|
||||||
|
"keep-alive should prevent poll from resolving"
|
||||||
|
);
|
||||||
|
|
||||||
|
// polls: initial
|
||||||
|
assert_eq!(h1.poll_count, 1);
|
||||||
|
|
||||||
|
let mut res = buf.take_write_buf().to_vec();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = b"\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 5\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
/abcd\
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// sleep slightly longer than keep-alive timeout
|
||||||
|
sleep(Duration::from_millis(250)).await;
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(
|
||||||
|
h1.as_mut().poll(cx).is_ready(),
|
||||||
|
"keep-alive should have resolved",
|
||||||
|
);
|
||||||
|
|
||||||
|
// polls: initial => keep-alive wake-up shutdown
|
||||||
|
assert_eq!(h1.poll_count, 2);
|
||||||
|
|
||||||
|
if let DispatcherStateProj::Normal { inner } = h1.project().inner.project() {
|
||||||
|
// connection closed
|
||||||
|
assert!(inner.flags.contains(Flags::SHUTDOWN));
|
||||||
|
assert!(inner.flags.contains(Flags::WRITE_DISCONNECT));
|
||||||
|
// and nothing added to write buffer
|
||||||
|
assert!(buf.write_buf_slice().is_empty());
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn keep_alive_follow_up_req() {
|
||||||
|
let mut buf = TestBuffer::new("GET /abcd HTTP/1.1\r\n\r\n");
|
||||||
|
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Timeout(Duration::from_millis(500)),
|
||||||
|
Duration::from_millis(100),
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let services = HttpFlow::new(echo_path_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
h1.as_mut().poll(cx).is_pending(),
|
||||||
|
"keep-alive should prevent poll from resolving"
|
||||||
|
);
|
||||||
|
|
||||||
|
// polls: initial
|
||||||
|
assert_eq!(h1.poll_count, 1);
|
||||||
|
|
||||||
|
let mut res = buf.take_write_buf().to_vec();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = b"\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 5\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
/abcd\
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// sleep for less than KA timeout
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(
|
||||||
|
h1.as_mut().poll(cx).is_pending(),
|
||||||
|
"keep-alive should not have resolved dispatcher yet",
|
||||||
|
);
|
||||||
|
|
||||||
|
// polls: initial => manual
|
||||||
|
assert_eq!(h1.poll_count, 2);
|
||||||
|
|
||||||
|
if let DispatcherStateProj::Normal { inner } = h1.as_mut().project().inner.project() {
|
||||||
|
// connection not closed
|
||||||
|
assert!(!inner.flags.contains(Flags::SHUTDOWN));
|
||||||
|
assert!(!inner.flags.contains(Flags::WRITE_DISCONNECT));
|
||||||
|
// and nothing added to write buffer
|
||||||
|
assert!(buf.write_buf_slice().is_empty());
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
buf.extend_read_buf(
|
||||||
|
"\
|
||||||
|
GET /efg HTTP/1.1\r\n\
|
||||||
|
Connection: close\r\n\
|
||||||
|
\r\n\r\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
h1.as_mut().poll(cx).is_ready(),
|
||||||
|
"connection close header should override keep-alive setting",
|
||||||
|
);
|
||||||
|
|
||||||
|
// polls: initial => manual => follow-up req => shutdown
|
||||||
|
assert_eq!(h1.poll_count, 4);
|
||||||
|
|
||||||
|
if let DispatcherStateProj::Normal { inner } = h1.as_mut().project().inner.project() {
|
||||||
|
// connection closed
|
||||||
|
assert!(inner.flags.contains(Flags::SHUTDOWN));
|
||||||
|
assert!(!inner.flags.contains(Flags::WRITE_DISCONNECT));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut res = buf.take_write_buf().to_vec();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = b"\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 4\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
/efg\
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn req_parse_err() {
|
||||||
|
lazy(|cx| {
|
||||||
|
let buf = TestBuffer::new("GET /test HTTP/1\r\n\r\n");
|
||||||
|
|
||||||
|
let services = HttpFlow::new(ok_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
ServiceConfig::default(),
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
match h1.as_mut().poll(cx) {
|
||||||
|
Poll::Pending => panic!(),
|
||||||
|
Poll::Ready(res) => assert!(res.is_err()),
|
||||||
|
}
|
||||||
|
|
||||||
|
if let DispatcherStateProj::Normal { inner } = h1.project().inner.project() {
|
||||||
|
assert!(inner.flags.contains(Flags::READ_DISCONNECT));
|
||||||
|
assert_eq!(
|
||||||
|
&buf.write_buf_slice()[..26],
|
||||||
|
b"HTTP/1.1 400 Bad Request\r\n"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn pipelining_ok_then_ok() {
|
||||||
|
lazy(|cx| {
|
||||||
|
let buf = TestBuffer::new(
|
||||||
|
"\
|
||||||
|
GET /abcd HTTP/1.1\r\n\r\n\
|
||||||
|
GET /def HTTP/1.1\r\n\r\n\
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Disabled,
|
||||||
|
Duration::from_millis(1),
|
||||||
|
Duration::from_millis(1),
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let services = HttpFlow::new(echo_path_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
match h1.as_mut().poll(cx) {
|
||||||
|
Poll::Pending => panic!("first poll should not be pending"),
|
||||||
|
Poll::Ready(res) => assert!(res.is_ok()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// polls: initial => shutdown
|
||||||
|
assert_eq!(h1.poll_count, 2);
|
||||||
|
|
||||||
|
let mut res = buf.write_buf_slice_mut();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = b"\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 5\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
/abcd\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 4\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
/def\
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn pipelining_ok_then_bad() {
|
||||||
|
lazy(|cx| {
|
||||||
|
let buf = TestBuffer::new(
|
||||||
|
"\
|
||||||
|
GET /abcd HTTP/1.1\r\n\r\n\
|
||||||
|
GET /def HTTP/1\r\n\r\n\
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Disabled,
|
||||||
|
Duration::from_millis(1),
|
||||||
|
Duration::from_millis(1),
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let services = HttpFlow::new(echo_path_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
match h1.as_mut().poll(cx) {
|
||||||
|
Poll::Pending => panic!("first poll should not be pending"),
|
||||||
|
Poll::Ready(res) => assert!(res.is_err()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// polls: initial => shutdown
|
||||||
|
assert_eq!(h1.poll_count, 1);
|
||||||
|
|
||||||
|
let mut res = buf.write_buf_slice_mut();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = b"\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 5\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
/abcd\
|
||||||
|
HTTP/1.1 400 Bad Request\r\n\
|
||||||
|
content-length: 0\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\r\n\
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn expect_handling() {
|
||||||
|
lazy(|cx| {
|
||||||
|
let mut buf = TestSeqBuffer::empty();
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Disabled,
|
||||||
|
Duration::ZERO,
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let services = HttpFlow::new(echo_payload_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
buf.extend_read_buf(
|
||||||
|
"\
|
||||||
|
POST /upload HTTP/1.1\r\n\
|
||||||
|
Content-Length: 5\r\n\
|
||||||
|
Expect: 100-continue\r\n\
|
||||||
|
\r\n\
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
assert!(h1.as_mut().poll(cx).is_pending());
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
// polls: manual
|
||||||
|
assert_eq!(h1.poll_count, 1);
|
||||||
|
|
||||||
|
if let DispatcherState::Normal { ref inner } = h1.inner {
|
||||||
|
let io = inner.io.as_ref().unwrap();
|
||||||
|
let res = &io.write_buf()[..];
|
||||||
|
assert_eq!(
|
||||||
|
str::from_utf8(res).unwrap(),
|
||||||
|
"HTTP/1.1 100 Continue\r\n\r\n"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.extend_read_buf("12345");
|
||||||
|
assert!(h1.as_mut().poll(cx).is_ready());
|
||||||
|
|
||||||
|
// polls: manual manual shutdown
|
||||||
|
assert_eq!(h1.poll_count, 3);
|
||||||
|
|
||||||
|
if let DispatcherState::Normal { ref inner } = h1.inner {
|
||||||
|
let io = inner.io.as_ref().unwrap();
|
||||||
|
let mut res = io.write_buf()[..].to_owned();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
str::from_utf8(&res).unwrap(),
|
||||||
|
"\
|
||||||
|
HTTP/1.1 100 Continue\r\n\
|
||||||
|
\r\n\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 5\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\
|
||||||
|
\r\n\
|
||||||
|
12345\
|
||||||
|
"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn expect_eager() {
|
||||||
|
lazy(|cx| {
|
||||||
|
let mut buf = TestSeqBuffer::empty();
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Disabled,
|
||||||
|
Duration::ZERO,
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let services = HttpFlow::new(echo_path_service(), ExpectHandler, None);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, UpgradeHandler>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
buf.extend_read_buf(
|
||||||
|
"\
|
||||||
|
POST /upload HTTP/1.1\r\n\
|
||||||
|
Content-Length: 5\r\n\
|
||||||
|
Expect: 100-continue\r\n\
|
||||||
|
\r\n\
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
assert!(h1.as_mut().poll(cx).is_ready());
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Normal { .. }));
|
||||||
|
|
||||||
|
// polls: manual shutdown
|
||||||
|
assert_eq!(h1.poll_count, 2);
|
||||||
|
|
||||||
|
if let DispatcherState::Normal { ref inner } = h1.inner {
|
||||||
|
let io = inner.io.as_ref().unwrap();
|
||||||
|
let mut res = io.write_buf()[..].to_owned();
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
|
||||||
|
// Despite the content-length header and even though the request payload has not
|
||||||
|
// been sent, this test expects a complete service response since the payload
|
||||||
|
// is not used at all. The service passed to dispatcher is path echo and doesn't
|
||||||
|
// consume payload bytes.
|
||||||
|
assert_eq!(
|
||||||
|
str::from_utf8(&res).unwrap(),
|
||||||
|
"\
|
||||||
|
HTTP/1.1 100 Continue\r\n\
|
||||||
|
\r\n\
|
||||||
|
HTTP/1.1 200 OK\r\n\
|
||||||
|
content-length: 7\r\n\
|
||||||
|
connection: close\r\n\
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC\r\n\
|
||||||
|
\r\n\
|
||||||
|
/upload\
|
||||||
|
"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn upgrade_handling() {
|
||||||
|
struct TestUpgrade;
|
||||||
|
|
||||||
|
impl<T> Service<(Request, Framed<T, Codec>)> for TestUpgrade {
|
||||||
|
type Response = ();
|
||||||
|
type Error = Error;
|
||||||
|
type Future = Ready<Result<Self::Response, Self::Error>>;
|
||||||
|
|
||||||
|
actix_service::always_ready!();
|
||||||
|
|
||||||
|
fn call(&self, (req, _framed): (Request, Framed<T, Codec>)) -> Self::Future {
|
||||||
|
assert_eq!(req.method(), Method::GET);
|
||||||
|
assert!(req.upgrade());
|
||||||
|
assert_eq!(req.headers().get("upgrade").unwrap(), "websocket");
|
||||||
|
ready(Ok(()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
let mut buf = TestSeqBuffer::empty();
|
||||||
|
let cfg = ServiceConfig::new(
|
||||||
|
KeepAlive::Disabled,
|
||||||
|
Duration::ZERO,
|
||||||
|
Duration::ZERO,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let services = HttpFlow::new(ok_service(), ExpectHandler, Some(TestUpgrade));
|
||||||
|
|
||||||
|
let h1 = Dispatcher::<_, _, _, _, TestUpgrade>::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
cfg,
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
buf.extend_read_buf(
|
||||||
|
"\
|
||||||
|
GET /ws HTTP/1.1\r\n\
|
||||||
|
Connection: Upgrade\r\n\
|
||||||
|
Upgrade: websocket\r\n\
|
||||||
|
\r\n\
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
assert!(h1.as_mut().poll(cx).is_ready());
|
||||||
|
assert!(matches!(&h1.inner, DispatcherState::Upgrade { .. }));
|
||||||
|
|
||||||
|
// polls: manual shutdown
|
||||||
|
assert_eq!(h1.poll_count, 2);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// fix in #2624 reverted temporarily
|
||||||
|
// complete fix tracked in #2745
|
||||||
|
#[ignore]
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn handler_drop_payload() {
|
||||||
|
let _ = env_logger::try_init();
|
||||||
|
|
||||||
|
let mut buf = TestBuffer::new(http_msg(
|
||||||
|
r"
|
||||||
|
POST /drop-payload HTTP/1.1
|
||||||
|
Content-Length: 3
|
||||||
|
|
||||||
|
abc
|
||||||
|
",
|
||||||
|
));
|
||||||
|
|
||||||
|
let services = HttpFlow::new(
|
||||||
|
drop_payload_service(),
|
||||||
|
ExpectHandler,
|
||||||
|
None::<UpgradeHandler>,
|
||||||
|
);
|
||||||
|
|
||||||
|
let h1 = Dispatcher::new(
|
||||||
|
buf.clone(),
|
||||||
|
services,
|
||||||
|
ServiceConfig::default(),
|
||||||
|
None,
|
||||||
|
OnConnectData::default(),
|
||||||
|
);
|
||||||
|
pin!(h1);
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(h1.as_mut().poll(cx).is_pending());
|
||||||
|
|
||||||
|
// polls: manual
|
||||||
|
assert_eq!(h1.poll_count, 1);
|
||||||
|
|
||||||
|
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
let exp = http_msg(
|
||||||
|
r"
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
content-length: 15
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||||
|
|
||||||
|
payload dropped
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(&exp)
|
||||||
|
);
|
||||||
|
|
||||||
|
if let DispatcherStateProj::Normal { inner } = h1.as_mut().project().inner.project() {
|
||||||
|
assert!(inner.state.is_none());
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
// add message that claims to have payload longer than provided
|
||||||
|
buf.extend_read_buf(http_msg(
|
||||||
|
r"
|
||||||
|
POST /drop-payload HTTP/1.1
|
||||||
|
Content-Length: 200
|
||||||
|
|
||||||
|
abc
|
||||||
|
",
|
||||||
|
));
|
||||||
|
|
||||||
|
assert!(h1.as_mut().poll(cx).is_pending());
|
||||||
|
|
||||||
|
// polls: manual => manual
|
||||||
|
assert_eq!(h1.poll_count, 2);
|
||||||
|
|
||||||
|
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
// expect response immediately even though request side has not finished reading payload
|
||||||
|
let exp = http_msg(
|
||||||
|
r"
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
content-length: 15
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||||
|
|
||||||
|
payload dropped
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(&exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
lazy(|cx| {
|
||||||
|
assert!(h1.as_mut().poll(cx).is_ready());
|
||||||
|
|
||||||
|
// polls: manual => manual => manual
|
||||||
|
assert_eq!(h1.poll_count, 3);
|
||||||
|
|
||||||
|
let mut res = BytesMut::from(buf.take_write_buf().as_ref());
|
||||||
|
stabilize_date_header(&mut res);
|
||||||
|
let res = &res[..];
|
||||||
|
|
||||||
|
// expect that unrequested error response is sent back since connection could not be cleaned
|
||||||
|
let exp = http_msg(
|
||||||
|
r"
|
||||||
|
HTTP/1.1 500 Internal Server Error
|
||||||
|
content-length: 0
|
||||||
|
connection: close
|
||||||
|
date: Thu, 01 Jan 1970 12:34:56 UTC
|
||||||
|
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
exp,
|
||||||
|
"\nexpected response not in write buffer:\n\
|
||||||
|
response: {:?}\n\
|
||||||
|
expected: {:?}",
|
||||||
|
String::from_utf8_lossy(res),
|
||||||
|
String::from_utf8_lossy(&exp)
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn http_msg(msg: impl AsRef<str>) -> BytesMut {
|
||||||
|
let mut msg = msg
|
||||||
|
.as_ref()
|
||||||
|
.trim()
|
||||||
|
.split('\n')
|
||||||
|
.map(|line| [line.trim_start(), "\r"].concat())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
// remove trailing \r
|
||||||
|
msg.pop();
|
||||||
|
|
||||||
|
if !msg.is_empty() && !msg.contains("\r\n\r\n") {
|
||||||
|
msg.push_str("\r\n\r\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
BytesMut::from(msg.as_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn http_msg_creates_msg() {
|
||||||
|
assert_eq!(http_msg(r""), "");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
http_msg(
|
||||||
|
r"
|
||||||
|
POST / HTTP/1.1
|
||||||
|
Content-Length: 3
|
||||||
|
|
||||||
|
abc
|
||||||
|
"
|
||||||
|
),
|
||||||
|
"POST / HTTP/1.1\r\nContent-Length: 3\r\n\r\nabc"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
http_msg(
|
||||||
|
r"
|
||||||
|
GET / HTTP/1.1
|
||||||
|
Content-Length: 3
|
||||||
|
|
||||||
|
"
|
||||||
|
),
|
||||||
|
"GET / HTTP/1.1\r\nContent-Length: 3\r\n\r\n"
|
||||||
|
);
|
||||||
|
}
|
@ -105,7 +105,7 @@ pub(crate) trait MessageType: Sized {
|
|||||||
}
|
}
|
||||||
BodySize::Sized(0) if camel_case => dst.put_slice(b"\r\nContent-Length: 0\r\n"),
|
BodySize::Sized(0) if camel_case => dst.put_slice(b"\r\nContent-Length: 0\r\n"),
|
||||||
BodySize::Sized(0) => dst.put_slice(b"\r\ncontent-length: 0\r\n"),
|
BodySize::Sized(0) => dst.put_slice(b"\r\ncontent-length: 0\r\n"),
|
||||||
BodySize::Sized(len) => helpers::write_content_length(len, dst),
|
BodySize::Sized(len) => helpers::write_content_length(len, dst, camel_case),
|
||||||
BodySize::None => dst.put_slice(b"\r\n"),
|
BodySize::None => dst.put_slice(b"\r\n"),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,7 +152,6 @@ pub(crate) trait MessageType: Sized {
|
|||||||
let k = key.as_str().as_bytes();
|
let k = key.as_str().as_bytes();
|
||||||
let k_len = k.len();
|
let k_len = k.len();
|
||||||
|
|
||||||
// TODO: drain?
|
|
||||||
for val in value.iter() {
|
for val in value.iter() {
|
||||||
let v = val.as_ref();
|
let v = val.as_ref();
|
||||||
let v_len = v.len();
|
let v_len = v.len();
|
||||||
@ -211,14 +210,14 @@ pub(crate) trait MessageType: Sized {
|
|||||||
dst.advance_mut(pos);
|
dst.advance_mut(pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
// optimized date header, set_date writes \r\n
|
|
||||||
if !has_date {
|
if !has_date {
|
||||||
config.set_date(dst);
|
// optimized date header, write_date_header writes its own \r\n
|
||||||
} else {
|
config.write_date_header(dst, camel_case);
|
||||||
// msg eof
|
|
||||||
dst.extend_from_slice(b"\r\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// end-of-headers marker
|
||||||
|
dst.extend_from_slice(b"\r\n");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,6 +257,12 @@ impl MessageType for Response<()> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn camel_case(&self) -> bool {
|
||||||
|
self.head()
|
||||||
|
.flags
|
||||||
|
.contains(crate::message::Flags::CAMEL_CASE)
|
||||||
|
}
|
||||||
|
|
||||||
fn encode_status(&mut self, dst: &mut BytesMut) -> io::Result<()> {
|
fn encode_status(&mut self, dst: &mut BytesMut) -> io::Result<()> {
|
||||||
let head = self.head();
|
let head = self.head();
|
||||||
let reason = head.reason().as_bytes();
|
let reason = head.reason().as_bytes();
|
||||||
@ -308,21 +313,22 @@ impl MessageType for RequestHeadType {
|
|||||||
_ => return Err(io::Error::new(io::ErrorKind::Other, "unsupported version")),
|
_ => return Err(io::Error::new(io::ErrorKind::Other, "unsupported version")),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))
|
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: MessageType> MessageEncoder<T> {
|
impl<T: MessageType> MessageEncoder<T> {
|
||||||
/// Encode message
|
/// Encode chunk.
|
||||||
pub fn encode_chunk(&mut self, msg: &[u8], buf: &mut BytesMut) -> io::Result<bool> {
|
pub fn encode_chunk(&mut self, msg: &[u8], buf: &mut BytesMut) -> io::Result<bool> {
|
||||||
self.te.encode(msg, buf)
|
self.te.encode(msg, buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode eof
|
/// Encode EOF.
|
||||||
pub fn encode_eof(&mut self, buf: &mut BytesMut) -> io::Result<()> {
|
pub fn encode_eof(&mut self, buf: &mut BytesMut) -> io::Result<()> {
|
||||||
self.te.encode_eof(buf)
|
self.te.encode_eof(buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Encode message.
|
||||||
pub fn encode(
|
pub fn encode(
|
||||||
&mut self,
|
&mut self,
|
||||||
dst: &mut BytesMut,
|
dst: &mut BytesMut,
|
||||||
@ -427,7 +433,7 @@ impl TransferEncoding {
|
|||||||
buf.extend_from_slice(b"0\r\n\r\n");
|
buf.extend_from_slice(b"0\r\n\r\n");
|
||||||
} else {
|
} else {
|
||||||
writeln!(helpers::MutWriter(buf), "{:X}\r", msg.len())
|
writeln!(helpers::MutWriter(buf), "{:X}\r", msg.len())
|
||||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
|
||||||
|
|
||||||
buf.reserve(msg.len() + 2);
|
buf.reserve(msg.len() + 2);
|
||||||
buf.extend_from_slice(msg);
|
buf.extend_from_slice(msg);
|
||||||
@ -444,7 +450,7 @@ impl TransferEncoding {
|
|||||||
|
|
||||||
buf.extend_from_slice(&msg[..len as usize]);
|
buf.extend_from_slice(&msg[..len as usize]);
|
||||||
|
|
||||||
*remaining -= len as u64;
|
*remaining -= len;
|
||||||
Ok(*remaining == 0)
|
Ok(*remaining == 0)
|
||||||
} else {
|
} else {
|
||||||
Ok(true)
|
Ok(true)
|
||||||
@ -511,6 +517,7 @@ unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
|||||||
if let Some(c @ b'a'..=b'z') = iter.next() {
|
if let Some(c @ b'a'..=b'z') = iter.next() {
|
||||||
buffer[index] = c & 0b1101_1111;
|
buffer[index] = c & 0b1101_1111;
|
||||||
}
|
}
|
||||||
|
index += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
index += 1;
|
index += 1;
|
||||||
@ -522,7 +529,7 @@ mod tests {
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use http::header::AUTHORIZATION;
|
use http::header::{AUTHORIZATION, UPGRADE_INSECURE_REQUESTS};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -553,6 +560,9 @@ mod tests {
|
|||||||
head.headers
|
head.headers
|
||||||
.insert(CONTENT_TYPE, HeaderValue::from_static("plain/text"));
|
.insert(CONTENT_TYPE, HeaderValue::from_static("plain/text"));
|
||||||
|
|
||||||
|
head.headers
|
||||||
|
.insert(UPGRADE_INSECURE_REQUESTS, HeaderValue::from_static("1"));
|
||||||
|
|
||||||
let mut head = RequestHeadType::Owned(head);
|
let mut head = RequestHeadType::Owned(head);
|
||||||
|
|
||||||
let _ = head.encode_headers(
|
let _ = head.encode_headers(
|
||||||
@ -568,6 +578,7 @@ mod tests {
|
|||||||
assert!(data.contains("Connection: close\r\n"));
|
assert!(data.contains("Connection: close\r\n"));
|
||||||
assert!(data.contains("Content-Type: plain/text\r\n"));
|
assert!(data.contains("Content-Type: plain/text\r\n"));
|
||||||
assert!(data.contains("Date: date\r\n"));
|
assert!(data.contains("Date: date\r\n"));
|
||||||
|
assert!(data.contains("Upgrade-Insecure-Requests: 1\r\n"));
|
||||||
|
|
||||||
let _ = head.encode_headers(
|
let _ = head.encode_headers(
|
||||||
&mut bytes,
|
&mut bytes,
|
||||||
|
@ -7,28 +7,34 @@ mod client;
|
|||||||
mod codec;
|
mod codec;
|
||||||
mod decoder;
|
mod decoder;
|
||||||
mod dispatcher;
|
mod dispatcher;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod dispatcher_tests;
|
||||||
mod encoder;
|
mod encoder;
|
||||||
mod expect;
|
mod expect;
|
||||||
mod payload;
|
mod payload;
|
||||||
mod service;
|
mod service;
|
||||||
|
mod timer;
|
||||||
mod upgrade;
|
mod upgrade;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
pub use self::client::{ClientCodec, ClientPayloadCodec};
|
pub use self::{
|
||||||
pub use self::codec::Codec;
|
client::{ClientCodec, ClientPayloadCodec},
|
||||||
pub use self::dispatcher::Dispatcher;
|
codec::Codec,
|
||||||
pub use self::expect::ExpectHandler;
|
dispatcher::Dispatcher,
|
||||||
pub use self::payload::Payload;
|
expect::ExpectHandler,
|
||||||
pub use self::service::{H1Service, H1ServiceHandler};
|
payload::Payload,
|
||||||
pub use self::upgrade::UpgradeHandler;
|
service::{H1Service, H1ServiceHandler},
|
||||||
pub use self::utils::SendResponse;
|
upgrade::UpgradeHandler,
|
||||||
|
utils::SendResponse,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// Codec message
|
/// Codec message
|
||||||
pub enum Message<T> {
|
pub enum Message<T> {
|
||||||
/// Http message
|
/// HTTP message.
|
||||||
Item(T),
|
Item(T),
|
||||||
/// Payload chunk
|
|
||||||
|
/// Payload chunk.
|
||||||
Chunk(Option<Bytes>),
|
Chunk(Option<Bytes>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ use crate::error::PayloadError;
|
|||||||
/// max buffer size 32k
|
/// max buffer size 32k
|
||||||
pub(crate) const MAX_BUFFER_SIZE: usize = 32_768;
|
pub(crate) const MAX_BUFFER_SIZE: usize = 32_768;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum PayloadStatus {
|
pub enum PayloadStatus {
|
||||||
Read,
|
Read,
|
||||||
Pause,
|
Pause,
|
||||||
@ -117,6 +117,7 @@ impl PayloadSender {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_ref_mut)]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn need_read(&self, cx: &mut Context<'_>) -> PayloadStatus {
|
pub fn need_read(&self, cx: &mut Context<'_>) -> PayloadStatus {
|
||||||
// we check need_read only if Payload (other side) is alive,
|
// we check need_read only if Payload (other side) is alive,
|
||||||
@ -174,7 +175,7 @@ impl Inner {
|
|||||||
|
|
||||||
/// Register future waiting data from payload.
|
/// Register future waiting data from payload.
|
||||||
/// Waker would be used in `Inner::wake`
|
/// Waker would be used in `Inner::wake`
|
||||||
fn register(&mut self, cx: &mut Context<'_>) {
|
fn register(&mut self, cx: &Context<'_>) {
|
||||||
if self
|
if self
|
||||||
.task
|
.task
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@ -186,7 +187,7 @@ impl Inner {
|
|||||||
|
|
||||||
// Register future feeding data to payload.
|
// Register future feeding data to payload.
|
||||||
/// Waker would be used in `Inner::wake_io`
|
/// Waker would be used in `Inner::wake_io`
|
||||||
fn register_io(&mut self, cx: &mut Context<'_>) {
|
fn register_io(&mut self, cx: &Context<'_>) {
|
||||||
if self
|
if self
|
||||||
.io_task
|
.io_task
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@ -221,7 +222,7 @@ impl Inner {
|
|||||||
|
|
||||||
fn poll_next(
|
fn poll_next(
|
||||||
mut self: Pin<&mut Self>,
|
mut self: Pin<&mut Self>,
|
||||||
cx: &mut Context<'_>,
|
cx: &Context<'_>,
|
||||||
) -> Poll<Option<Result<Bytes, PayloadError>>> {
|
) -> Poll<Option<Result<Bytes, PayloadError>>> {
|
||||||
if let Some(data) = self.items.pop_front() {
|
if let Some(data) = self.items.pop_front() {
|
||||||
self.len -= data.len();
|
self.len -= data.len();
|
||||||
@ -252,18 +253,15 @@ impl Inner {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::panic::{RefUnwindSafe, UnwindSafe};
|
|
||||||
|
|
||||||
use actix_utils::future::poll_fn;
|
use actix_utils::future::poll_fn;
|
||||||
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
use static_assertions::{assert_impl_all, assert_not_impl_any};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
assert_impl_all!(Payload: Unpin);
|
assert_impl_all!(Payload: Unpin);
|
||||||
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
|
assert_not_impl_any!(Payload: Send, Sync);
|
||||||
|
|
||||||
assert_impl_all!(Inner: Unpin, Send, Sync);
|
assert_impl_all!(Inner: Unpin, Send, Sync);
|
||||||
assert_not_impl_any!(Inner: UnwindSafe, RefUnwindSafe);
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_unread_data() {
|
async fn test_unread_data() {
|
||||||
|
@ -13,7 +13,9 @@ use actix_service::{
|
|||||||
};
|
};
|
||||||
use actix_utils::future::ready;
|
use actix_utils::future::ready;
|
||||||
use futures_core::future::LocalBoxFuture;
|
use futures_core::future::LocalBoxFuture;
|
||||||
|
use tracing::error;
|
||||||
|
|
||||||
|
use super::{codec::Codec, dispatcher::Dispatcher, ExpectHandler, UpgradeHandler};
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
@ -22,8 +24,6 @@ use crate::{
|
|||||||
ConnectCallback, OnConnectData, Request, Response,
|
ConnectCallback, OnConnectData, Request, Response,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{codec::Codec, dispatcher::Dispatcher, ExpectHandler, UpgradeHandler};
|
|
||||||
|
|
||||||
/// `ServiceFactory` implementation for HTTP1 transport
|
/// `ServiceFactory` implementation for HTTP1 transport
|
||||||
pub struct H1Service<T, S, B, X = ExpectHandler, U = UpgradeHandler> {
|
pub struct H1Service<T, S, B, X = ExpectHandler, U = UpgradeHandler> {
|
||||||
srv: S,
|
srv: S,
|
||||||
@ -81,13 +81,8 @@ where
|
|||||||
/// Create simple tcp stream service
|
/// Create simple tcp stream service
|
||||||
pub fn tcp(
|
pub fn tcp(
|
||||||
self,
|
self,
|
||||||
) -> impl ServiceFactory<
|
) -> impl ServiceFactory<TcpStream, Config = (), Response = (), Error = DispatchError, InitError = ()>
|
||||||
TcpStream,
|
{
|
||||||
Config = (),
|
|
||||||
Response = (),
|
|
||||||
Error = DispatchError,
|
|
||||||
InitError = (),
|
|
||||||
> {
|
|
||||||
fn_service(|io: TcpStream| {
|
fn_service(|io: TcpStream| {
|
||||||
let peer_addr = io.peer_addr().ok();
|
let peer_addr = io.peer_addr().ok();
|
||||||
ready(Ok((io, peer_addr)))
|
ready(Ok((io, peer_addr)))
|
||||||
@ -98,8 +93,6 @@ where
|
|||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
#[cfg(feature = "openssl")]
|
||||||
mod openssl {
|
mod openssl {
|
||||||
use super::*;
|
|
||||||
|
|
||||||
use actix_tls::accept::{
|
use actix_tls::accept::{
|
||||||
openssl::{
|
openssl::{
|
||||||
reexports::{Error as SslError, SslAcceptor},
|
reexports::{Error as SslError, SslAcceptor},
|
||||||
@ -108,6 +101,8 @@ mod openssl {
|
|||||||
TlsError,
|
TlsError,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
where
|
where
|
||||||
S: ServiceFactory<Request, Config = ()>,
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
@ -157,14 +152,13 @@ mod openssl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "rustls")]
|
#[cfg(feature = "rustls-0_20")]
|
||||||
mod rustls {
|
mod rustls_0_20 {
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use actix_service::ServiceFactoryExt as _;
|
use actix_service::ServiceFactoryExt as _;
|
||||||
use actix_tls::accept::{
|
use actix_tls::accept::{
|
||||||
rustls::{reexports::ServerConfig, Acceptor, TlsStream},
|
rustls_0_20::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
TlsError,
|
TlsError,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -194,7 +188,7 @@ mod rustls {
|
|||||||
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
U::InitError: fmt::Debug,
|
U::InitError: fmt::Debug,
|
||||||
{
|
{
|
||||||
/// Create Rustls based service.
|
/// Create Rustls v0.20 based service.
|
||||||
pub fn rustls(
|
pub fn rustls(
|
||||||
self,
|
self,
|
||||||
config: ServerConfig,
|
config: ServerConfig,
|
||||||
@ -219,6 +213,189 @@ mod rustls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_21")]
|
||||||
|
mod rustls_0_21 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_21::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>>,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>>,
|
||||||
|
|
||||||
|
B: MessageBody,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.21 based service.
|
||||||
|
pub fn rustls_021(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_22")]
|
||||||
|
mod rustls_0_22 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>>,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>>,
|
||||||
|
|
||||||
|
B: MessageBody,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.22 based service.
|
||||||
|
pub fn rustls_0_22(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_23")]
|
||||||
|
mod rustls_0_23 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B, X, U> H1Service<TlsStream<TcpStream>, S, B, X, U>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>>,
|
||||||
|
S::InitError: fmt::Debug,
|
||||||
|
S::Response: Into<Response<B>>,
|
||||||
|
|
||||||
|
B: MessageBody,
|
||||||
|
|
||||||
|
X: ServiceFactory<Request, Config = (), Response = Request>,
|
||||||
|
X::Future: 'static,
|
||||||
|
X::Error: Into<Response<BoxBody>>,
|
||||||
|
X::InitError: fmt::Debug,
|
||||||
|
|
||||||
|
U: ServiceFactory<
|
||||||
|
(Request, Framed<TlsStream<TcpStream>, Codec>),
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
>,
|
||||||
|
U::Future: 'static,
|
||||||
|
U::Error: fmt::Display + Into<Response<BoxBody>>,
|
||||||
|
U::InitError: fmt::Debug,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.23 based service.
|
||||||
|
pub fn rustls_0_23(
|
||||||
|
self,
|
||||||
|
config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = (),
|
||||||
|
> {
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T, S, B, X, U> H1Service<T, S, B, X, U>
|
impl<T, S, B, X, U> H1Service<T, S, B, X, U>
|
||||||
where
|
where
|
||||||
S: ServiceFactory<Request, Config = ()>,
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
@ -303,15 +480,15 @@ where
|
|||||||
let cfg = self.cfg.clone();
|
let cfg = self.cfg.clone();
|
||||||
|
|
||||||
Box::pin(async move {
|
Box::pin(async move {
|
||||||
let expect = expect
|
let expect = expect.await.map_err(|err| {
|
||||||
.await
|
tracing::error!("Initialization of HTTP expect service error: {err:?}");
|
||||||
.map_err(|e| log::error!("Init http expect service error: {:?}", e))?;
|
})?;
|
||||||
|
|
||||||
let upgrade = match upgrade {
|
let upgrade = match upgrade {
|
||||||
Some(upgrade) => {
|
Some(upgrade) => {
|
||||||
let upgrade = upgrade
|
let upgrade = upgrade.await.map_err(|err| {
|
||||||
.await
|
tracing::error!("Initialization of HTTP upgrade service error: {err:?}");
|
||||||
.map_err(|e| log::error!("Init http upgrade service error: {:?}", e))?;
|
})?;
|
||||||
Some(upgrade)
|
Some(upgrade)
|
||||||
}
|
}
|
||||||
None => None,
|
None => None,
|
||||||
@ -319,7 +496,7 @@ where
|
|||||||
|
|
||||||
let service = service
|
let service = service
|
||||||
.await
|
.await
|
||||||
.map_err(|e| log::error!("Init http service error: {:?}", e))?;
|
.map_err(|err| error!("Initialization of HTTP service error: {err:?}"))?;
|
||||||
|
|
||||||
Ok(H1ServiceHandler::new(
|
Ok(H1ServiceHandler::new(
|
||||||
cfg,
|
cfg,
|
||||||
@ -357,13 +534,13 @@ where
|
|||||||
|
|
||||||
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||||
self._poll_ready(cx).map_err(|err| {
|
self._poll_ready(cx).map_err(|err| {
|
||||||
log::error!("HTTP/1 service readiness error: {:?}", err);
|
error!("HTTP/1 service readiness error: {:?}", err);
|
||||||
DispatchError::Service(err)
|
DispatchError::Service(err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future {
|
fn call(&self, (io, addr): (T, Option<net::SocketAddr>)) -> Self::Future {
|
||||||
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
|
let conn_data = OnConnectData::from_io(&io, self.on_connect_ext.as_deref());
|
||||||
Dispatcher::new(io, self.flow.clone(), self.cfg.clone(), addr, conn_data)
|
Dispatcher::new(io, Rc::clone(&self.flow), self.cfg.clone(), addr, conn_data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
81
actix-http/src/h1/timer.rs
Normal file
81
actix-http/src/h1/timer.rs
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
use std::{fmt, future::Future, pin::Pin, task::Context};
|
||||||
|
|
||||||
|
use actix_rt::time::{Instant, Sleep};
|
||||||
|
use tracing::trace;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(super) enum TimerState {
|
||||||
|
Disabled,
|
||||||
|
Inactive,
|
||||||
|
Active { timer: Pin<Box<Sleep>> },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TimerState {
|
||||||
|
pub(super) fn new(enabled: bool) -> Self {
|
||||||
|
if enabled {
|
||||||
|
Self::Inactive
|
||||||
|
} else {
|
||||||
|
Self::Disabled
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn is_enabled(&self) -> bool {
|
||||||
|
matches!(self, Self::Active { .. } | Self::Inactive)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn set(&mut self, timer: Sleep, line: u32) {
|
||||||
|
if matches!(self, Self::Disabled) {
|
||||||
|
trace!("setting disabled timer from line {}", line);
|
||||||
|
}
|
||||||
|
|
||||||
|
*self = Self::Active {
|
||||||
|
timer: Box::pin(timer),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn set_and_init(&mut self, cx: &mut Context<'_>, timer: Sleep, line: u32) {
|
||||||
|
self.set(timer, line);
|
||||||
|
self.init(cx);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn clear(&mut self, line: u32) {
|
||||||
|
if matches!(self, Self::Disabled) {
|
||||||
|
trace!("trying to clear a disabled timer from line {}", line);
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches!(self, Self::Inactive) {
|
||||||
|
trace!("trying to clear an inactive timer from line {}", line);
|
||||||
|
}
|
||||||
|
|
||||||
|
*self = Self::Inactive;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn init(&mut self, cx: &mut Context<'_>) {
|
||||||
|
if let TimerState::Active { timer } = self {
|
||||||
|
let _ = timer.as_mut().poll(cx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for TimerState {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
TimerState::Disabled => f.write_str("timer is disabled"),
|
||||||
|
TimerState::Inactive => f.write_str("timer is inactive"),
|
||||||
|
TimerState::Active { timer } => {
|
||||||
|
let deadline = timer.deadline();
|
||||||
|
let now = Instant::now();
|
||||||
|
|
||||||
|
if deadline < now {
|
||||||
|
f.write_str("timer is active and has reached deadline")
|
||||||
|
} else {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"timer is active and due to expire in {} milliseconds",
|
||||||
|
((deadline - now).as_secs_f32() * 1000.0)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -4,7 +4,7 @@ use std::{
|
|||||||
future::Future,
|
future::Future,
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
net,
|
net,
|
||||||
pin::Pin,
|
pin::{pin, Pin},
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
@ -19,15 +19,16 @@ use h2::{
|
|||||||
server::{Connection, SendResponse},
|
server::{Connection, SendResponse},
|
||||||
Ping, PingPong,
|
Ping, PingPong,
|
||||||
};
|
};
|
||||||
use log::{error, trace};
|
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BodySize, BoxBody, MessageBody},
|
body::{BodySize, BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
header::{HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING},
|
header::{
|
||||||
|
HeaderName, HeaderValue, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING, UPGRADE,
|
||||||
|
},
|
||||||
service::HttpFlow,
|
service::HttpFlow,
|
||||||
Extensions, OnConnectData, Payload, Request, Response, ResponseHead,
|
Extensions, Method, OnConnectData, Payload, Request, Response, ResponseHead,
|
||||||
};
|
};
|
||||||
|
|
||||||
const CHUNK_SIZE: usize = 16_384;
|
const CHUNK_SIZE: usize = 16_384;
|
||||||
@ -57,15 +58,15 @@ where
|
|||||||
conn_data: OnConnectData,
|
conn_data: OnConnectData,
|
||||||
timer: Option<Pin<Box<Sleep>>>,
|
timer: Option<Pin<Box<Sleep>>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let ping_pong = config.keep_alive().map(|dur| H2PingPong {
|
let ping_pong = config.keep_alive().duration().map(|dur| H2PingPong {
|
||||||
timer: timer
|
timer: timer
|
||||||
.map(|mut timer| {
|
.map(|mut timer| {
|
||||||
// reset timer if it's received from new function.
|
// reuse timer slot if it was initialized for handshake
|
||||||
timer.as_mut().reset(config.now() + dur);
|
timer.as_mut().reset((config.now() + dur).into());
|
||||||
timer
|
timer
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|| Box::pin(sleep(dur))),
|
.unwrap_or_else(|| Box::pin(sleep(dur))),
|
||||||
on_flight: false,
|
in_flight: false,
|
||||||
ping_pong: conn.ping_pong().unwrap(),
|
ping_pong: conn.ping_pong().unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -82,9 +83,14 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
struct H2PingPong {
|
struct H2PingPong {
|
||||||
timer: Pin<Box<Sleep>>,
|
/// Handle to send ping frames from the peer.
|
||||||
on_flight: bool,
|
|
||||||
ping_pong: PingPong,
|
ping_pong: PingPong,
|
||||||
|
|
||||||
|
/// True when a ping has been sent and is waiting for a reply.
|
||||||
|
in_flight: bool,
|
||||||
|
|
||||||
|
/// Timeout for pong response.
|
||||||
|
timer: Pin<Box<Sleep>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, S, B, X, U> Future for Dispatcher<T, S, B, X, U>
|
impl<T, S, B, X, U> Future for Dispatcher<T, S, B, X, U>
|
||||||
@ -111,6 +117,7 @@ where
|
|||||||
let payload = crate::h2::Payload::new(body);
|
let payload = crate::h2::Payload::new(body);
|
||||||
let pl = Payload::H2 { payload };
|
let pl = Payload::H2 { payload };
|
||||||
let mut req = Request::with_payload(pl);
|
let mut req = Request::with_payload(pl);
|
||||||
|
let head_req = parts.method == Method::HEAD;
|
||||||
|
|
||||||
let head = req.head_mut();
|
let head = req.head_mut();
|
||||||
head.uri = parts.uri;
|
head.uri = parts.uri;
|
||||||
@ -119,7 +126,7 @@ where
|
|||||||
head.headers = parts.headers.into();
|
head.headers = parts.headers.into();
|
||||||
head.peer_addr = this.peer_addr;
|
head.peer_addr = this.peer_addr;
|
||||||
|
|
||||||
req.conn_data = this.conn_data.as_ref().map(Rc::clone);
|
req.conn_data.clone_from(&this.conn_data);
|
||||||
|
|
||||||
let fut = this.flow.service.call(req);
|
let fut = this.flow.service.call(req);
|
||||||
let config = this.config.clone();
|
let config = this.config.clone();
|
||||||
@ -128,10 +135,10 @@ where
|
|||||||
actix_rt::spawn(async move {
|
actix_rt::spawn(async move {
|
||||||
// resolve service call and send response.
|
// resolve service call and send response.
|
||||||
let res = match fut.await {
|
let res = match fut.await {
|
||||||
Ok(res) => handle_response(res.into(), tx, config).await,
|
Ok(res) => handle_response(res.into(), tx, config, head_req).await,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let res: Response<BoxBody> = err.into();
|
let res: Response<BoxBody> = err.into();
|
||||||
handle_response(res, tx, config).await
|
handle_response(res, tx, config, head_req).await
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -139,45 +146,49 @@ where
|
|||||||
if let Err(err) = res {
|
if let Err(err) = res {
|
||||||
match err {
|
match err {
|
||||||
DispatchError::SendResponse(err) => {
|
DispatchError::SendResponse(err) => {
|
||||||
trace!("Error sending HTTP/2 response: {:?}", err)
|
tracing::trace!("Error sending response: {err:?}");
|
||||||
|
}
|
||||||
|
DispatchError::SendData(err) => {
|
||||||
|
tracing::warn!("Send data error: {err:?}");
|
||||||
}
|
}
|
||||||
DispatchError::SendData(err) => warn!("{:?}", err),
|
|
||||||
DispatchError::ResponseBody(err) => {
|
DispatchError::ResponseBody(err) => {
|
||||||
error!("Response payload stream error: {:?}", err)
|
tracing::error!("Response payload stream error: {err:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Poll::Ready(None) => return Poll::Ready(Ok(())),
|
Poll::Ready(None) => return Poll::Ready(Ok(())),
|
||||||
|
|
||||||
Poll::Pending => match this.ping_pong.as_mut() {
|
Poll::Pending => match this.ping_pong.as_mut() {
|
||||||
Some(ping_pong) => loop {
|
Some(ping_pong) => loop {
|
||||||
if ping_pong.on_flight {
|
if ping_pong.in_flight {
|
||||||
// When have on flight ping pong. poll pong and and keep alive timer.
|
// When there is an in-flight ping-pong, poll pong and and keep-alive
|
||||||
// on success pong received update keep alive timer to determine the next timing of
|
// timer. On successful pong received, update keep-alive timer to
|
||||||
// ping pong.
|
// determine the next timing of ping pong.
|
||||||
match ping_pong.ping_pong.poll_pong(cx)? {
|
match ping_pong.ping_pong.poll_pong(cx)? {
|
||||||
Poll::Ready(_) => {
|
Poll::Ready(_) => {
|
||||||
ping_pong.on_flight = false;
|
ping_pong.in_flight = false;
|
||||||
|
|
||||||
let dead_line = this.config.keep_alive_expire().unwrap();
|
let dead_line = this.config.keep_alive_deadline().unwrap();
|
||||||
ping_pong.timer.as_mut().reset(dead_line);
|
ping_pong.timer.as_mut().reset(dead_line.into());
|
||||||
}
|
}
|
||||||
Poll::Pending => {
|
Poll::Pending => {
|
||||||
return ping_pong.timer.as_mut().poll(cx).map(|_| Ok(()))
|
return ping_pong.timer.as_mut().poll(cx).map(|_| Ok(()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// When there is no on flight ping pong. keep alive timer is used to wait for next
|
// When there is no in-flight ping-pong, keep-alive timer is used to
|
||||||
// timing of ping pong. Therefore at this point it serves as an interval instead.
|
// wait for next timing of ping-pong. Therefore, at this point it serves
|
||||||
|
// as an interval instead.
|
||||||
ready!(ping_pong.timer.as_mut().poll(cx));
|
ready!(ping_pong.timer.as_mut().poll(cx));
|
||||||
|
|
||||||
ping_pong.ping_pong.send_ping(Ping::opaque())?;
|
ping_pong.ping_pong.send_ping(Ping::opaque())?;
|
||||||
|
|
||||||
let dead_line = this.config.keep_alive_expire().unwrap();
|
let dead_line = this.config.keep_alive_deadline().unwrap();
|
||||||
ping_pong.timer.as_mut().reset(dead_line);
|
ping_pong.timer.as_mut().reset(dead_line.into());
|
||||||
|
|
||||||
ping_pong.on_flight = true;
|
ping_pong.in_flight = true;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
None => return Poll::Pending,
|
None => return Poll::Pending,
|
||||||
@ -197,6 +208,7 @@ async fn handle_response<B>(
|
|||||||
res: Response<B>,
|
res: Response<B>,
|
||||||
mut tx: SendResponse<Bytes>,
|
mut tx: SendResponse<Bytes>,
|
||||||
config: ServiceConfig,
|
config: ServiceConfig,
|
||||||
|
head_req: bool,
|
||||||
) -> Result<(), DispatchError>
|
) -> Result<(), DispatchError>
|
||||||
where
|
where
|
||||||
B: MessageBody,
|
B: MessageBody,
|
||||||
@ -206,20 +218,20 @@ where
|
|||||||
// prepare response.
|
// prepare response.
|
||||||
let mut size = body.size();
|
let mut size = body.size();
|
||||||
let res = prepare_response(config, res.head(), &mut size);
|
let res = prepare_response(config, res.head(), &mut size);
|
||||||
let eof = size.is_eof();
|
let eof_or_head = size.is_eof() || head_req;
|
||||||
|
|
||||||
// send response head and return on eof.
|
// send response head and return on eof.
|
||||||
let mut stream = tx
|
let mut stream = tx
|
||||||
.send_response(res, eof)
|
.send_response(res, eof_or_head)
|
||||||
.map_err(DispatchError::SendResponse)?;
|
.map_err(DispatchError::SendResponse)?;
|
||||||
|
|
||||||
if eof {
|
if eof_or_head {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// poll response body and send chunks to client
|
let mut body = pin!(body);
|
||||||
actix_rt::pin!(body);
|
|
||||||
|
|
||||||
|
// poll response body and send chunks to client
|
||||||
while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await {
|
while let Some(res) = poll_fn(|cx| body.as_mut().poll_next(cx)).await {
|
||||||
let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?;
|
let mut chunk = res.map_err(|err| DispatchError::ResponseBody(err.into()))?;
|
||||||
|
|
||||||
@ -285,13 +297,13 @@ fn prepare_response(
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = match size {
|
match size {
|
||||||
BodySize::None | BodySize::Stream => None,
|
BodySize::None | BodySize::Stream => {}
|
||||||
|
|
||||||
BodySize::Sized(0) => {
|
BodySize::Sized(0) => {
|
||||||
#[allow(clippy::declare_interior_mutable_const)]
|
#[allow(clippy::declare_interior_mutable_const)]
|
||||||
const HV_ZERO: HeaderValue = HeaderValue::from_static("0");
|
const HV_ZERO: HeaderValue = HeaderValue::from_static("0");
|
||||||
res.headers_mut().insert(CONTENT_LENGTH, HV_ZERO)
|
res.headers_mut().insert(CONTENT_LENGTH, HV_ZERO);
|
||||||
}
|
}
|
||||||
|
|
||||||
BodySize::Sized(len) => {
|
BodySize::Sized(len) => {
|
||||||
@ -300,19 +312,28 @@ fn prepare_response(
|
|||||||
res.headers_mut().insert(
|
res.headers_mut().insert(
|
||||||
CONTENT_LENGTH,
|
CONTENT_LENGTH,
|
||||||
HeaderValue::from_str(buf.format(*len)).unwrap(),
|
HeaderValue::from_str(buf.format(*len)).unwrap(),
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// copy headers
|
// copy headers
|
||||||
for (key, value) in head.headers.iter() {
|
for (key, value) in head.headers.iter() {
|
||||||
match *key {
|
match key {
|
||||||
// TODO: consider skipping other headers according to:
|
// omit HTTP/1.x only headers according to:
|
||||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
||||||
// omit HTTP/1.x only headers
|
&CONNECTION | &TRANSFER_ENCODING | &UPGRADE => continue,
|
||||||
CONNECTION | TRANSFER_ENCODING => continue,
|
|
||||||
CONTENT_LENGTH if skip_len => continue,
|
&CONTENT_LENGTH if skip_len => continue,
|
||||||
DATE => has_date = true,
|
&DATE => has_date = true,
|
||||||
|
|
||||||
|
// omit HTTP/1.x only headers according to:
|
||||||
|
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
||||||
|
hdr if hdr == HeaderName::from_static("keep-alive")
|
||||||
|
|| hdr == HeaderName::from_static("proxy-connection") =>
|
||||||
|
{
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -322,7 +343,7 @@ fn prepare_response(
|
|||||||
// set date header
|
// set date header
|
||||||
if !has_date {
|
if !has_date {
|
||||||
let mut bytes = BytesMut::with_capacity(29);
|
let mut bytes = BytesMut::with_capacity(29);
|
||||||
config.set_date_header(&mut bytes);
|
config.write_date_header_value(&mut bytes);
|
||||||
res.headers_mut().insert(
|
res.headers_mut().insert(
|
||||||
DATE,
|
DATE,
|
||||||
// SAFETY: serialized date-times are known ASCII strings
|
// SAFETY: serialized date-times are known ASCII strings
|
||||||
|
@ -7,7 +7,7 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use actix_codec::{AsyncRead, AsyncWrite};
|
use actix_codec::{AsyncRead, AsyncWrite};
|
||||||
use actix_rt::time::Sleep;
|
use actix_rt::time::{sleep_until, Sleep};
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use futures_core::{ready, Stream};
|
use futures_core::{ready, Stream};
|
||||||
use h2::{
|
use h2::{
|
||||||
@ -15,17 +15,16 @@ use h2::{
|
|||||||
RecvStream,
|
RecvStream,
|
||||||
};
|
};
|
||||||
|
|
||||||
mod dispatcher;
|
|
||||||
mod service;
|
|
||||||
|
|
||||||
pub use self::dispatcher::Dispatcher;
|
|
||||||
pub use self::service::H2Service;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
error::{DispatchError, PayloadError},
|
error::{DispatchError, PayloadError},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
mod dispatcher;
|
||||||
|
mod service;
|
||||||
|
|
||||||
|
pub use self::{dispatcher::Dispatcher, service::H2Service};
|
||||||
|
|
||||||
/// HTTP/2 peer stream.
|
/// HTTP/2 peer stream.
|
||||||
pub struct Payload {
|
pub struct Payload {
|
||||||
stream: RecvStream,
|
stream: RecvStream,
|
||||||
@ -58,16 +57,15 @@ impl Stream for Payload {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn handshake_with_timeout<T>(
|
pub(crate) fn handshake_with_timeout<T>(io: T, config: &ServiceConfig) -> HandshakeWithTimeout<T>
|
||||||
io: T,
|
|
||||||
config: &ServiceConfig,
|
|
||||||
) -> HandshakeWithTimeout<T>
|
|
||||||
where
|
where
|
||||||
T: AsyncRead + AsyncWrite + Unpin,
|
T: AsyncRead + AsyncWrite + Unpin,
|
||||||
{
|
{
|
||||||
HandshakeWithTimeout {
|
HandshakeWithTimeout {
|
||||||
handshake: handshake(io),
|
handshake: handshake(io),
|
||||||
timer: config.client_timer().map(Box::pin),
|
timer: config
|
||||||
|
.client_request_deadline()
|
||||||
|
.map(|deadline| Box::pin(sleep_until(deadline.into()))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,7 +84,7 @@ where
|
|||||||
let this = self.get_mut();
|
let this = self.get_mut();
|
||||||
|
|
||||||
match Pin::new(&mut this.handshake).poll(cx)? {
|
match Pin::new(&mut this.handshake).poll(cx)? {
|
||||||
// return the timer on success handshake. It can be re-used for h2 ping-pong.
|
// return the timer on success handshake; its slot can be re-used for h2 ping-pong
|
||||||
Poll::Ready(conn) => Poll::Ready(Ok((conn, this.timer.take()))),
|
Poll::Ready(conn) => Poll::Ready(Ok((conn, this.timer.take()))),
|
||||||
Poll::Pending => match this.timer.as_mut() {
|
Poll::Pending => match this.timer.as_mut() {
|
||||||
Some(timer) => {
|
Some(timer) => {
|
||||||
@ -101,11 +99,9 @@ where
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::panic::{RefUnwindSafe, UnwindSafe};
|
|
||||||
|
|
||||||
use static_assertions::assert_impl_all;
|
use static_assertions::assert_impl_all;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
assert_impl_all!(Payload: Unpin, Send, Sync, UnwindSafe, RefUnwindSafe);
|
assert_impl_all!(Payload: Unpin, Send, Sync);
|
||||||
}
|
}
|
||||||
|
@ -14,8 +14,9 @@ use actix_service::{
|
|||||||
};
|
};
|
||||||
use actix_utils::future::ready;
|
use actix_utils::future::ready;
|
||||||
use futures_core::{future::LocalBoxFuture, ready};
|
use futures_core::{future::LocalBoxFuture, ready};
|
||||||
use log::error;
|
use tracing::{error, trace};
|
||||||
|
|
||||||
|
use super::{dispatcher::Dispatcher, handshake_with_timeout, HandshakeWithTimeout};
|
||||||
use crate::{
|
use crate::{
|
||||||
body::{BoxBody, MessageBody},
|
body::{BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
@ -24,8 +25,6 @@ use crate::{
|
|||||||
ConnectCallback, OnConnectData, Request, Response,
|
ConnectCallback, OnConnectData, Request, Response,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{dispatcher::Dispatcher, handshake_with_timeout, HandshakeWithTimeout};
|
|
||||||
|
|
||||||
/// `ServiceFactory` implementation for HTTP/2 transport
|
/// `ServiceFactory` implementation for HTTP/2 transport
|
||||||
pub struct H2Service<T, S, B> {
|
pub struct H2Service<T, S, B> {
|
||||||
srv: S,
|
srv: S,
|
||||||
@ -141,8 +140,8 @@ mod openssl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "rustls")]
|
#[cfg(feature = "rustls-0_20")]
|
||||||
mod rustls {
|
mod rustls_0_20 {
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
use actix_service::ServiceFactoryExt as _;
|
use actix_service::ServiceFactoryExt as _;
|
||||||
@ -163,7 +162,7 @@ mod rustls {
|
|||||||
|
|
||||||
B: MessageBody + 'static,
|
B: MessageBody + 'static,
|
||||||
{
|
{
|
||||||
/// Create Rustls based service.
|
/// Create Rustls v0.20 based service.
|
||||||
pub fn rustls(
|
pub fn rustls(
|
||||||
self,
|
self,
|
||||||
mut config: ServerConfig,
|
mut config: ServerConfig,
|
||||||
@ -192,6 +191,159 @@ mod rustls {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_21")]
|
||||||
|
mod rustls_0_21 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_21::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.21 based service.
|
||||||
|
pub fn rustls_021(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = S::InitError,
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_22")]
|
||||||
|
mod rustls_0_22 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_22::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.22 based service.
|
||||||
|
pub fn rustls_0_22(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = S::InitError,
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "rustls-0_23")]
|
||||||
|
mod rustls_0_23 {
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use actix_service::ServiceFactoryExt as _;
|
||||||
|
use actix_tls::accept::{
|
||||||
|
rustls_0_23::{reexports::ServerConfig, Acceptor, TlsStream},
|
||||||
|
TlsError,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl<S, B> H2Service<TlsStream<TcpStream>, S, B>
|
||||||
|
where
|
||||||
|
S: ServiceFactory<Request, Config = ()>,
|
||||||
|
S::Future: 'static,
|
||||||
|
S::Error: Into<Response<BoxBody>> + 'static,
|
||||||
|
S::Response: Into<Response<B>> + 'static,
|
||||||
|
<S::Service as Service<Request>>::Future: 'static,
|
||||||
|
|
||||||
|
B: MessageBody + 'static,
|
||||||
|
{
|
||||||
|
/// Create Rustls v0.23 based service.
|
||||||
|
pub fn rustls_0_23(
|
||||||
|
self,
|
||||||
|
mut config: ServerConfig,
|
||||||
|
) -> impl ServiceFactory<
|
||||||
|
TcpStream,
|
||||||
|
Config = (),
|
||||||
|
Response = (),
|
||||||
|
Error = TlsError<io::Error, DispatchError>,
|
||||||
|
InitError = S::InitError,
|
||||||
|
> {
|
||||||
|
let mut protos = vec![b"h2".to_vec()];
|
||||||
|
protos.extend_from_slice(&config.alpn_protocols);
|
||||||
|
config.alpn_protocols = protos;
|
||||||
|
|
||||||
|
Acceptor::new(config)
|
||||||
|
.map_init_err(|_| {
|
||||||
|
unreachable!("TLS acceptor service factory does not error on init")
|
||||||
|
})
|
||||||
|
.map_err(TlsError::into_service_error)
|
||||||
|
.map(|io: TlsStream<TcpStream>| {
|
||||||
|
let peer_addr = io.get_ref().0.peer_addr().ok();
|
||||||
|
(io, peer_addr)
|
||||||
|
})
|
||||||
|
.and_then(self.map_err(TlsError::Service))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B>
|
impl<T, S, B> ServiceFactory<(T, Option<net::SocketAddr>)> for H2Service<T, S, B>
|
||||||
where
|
where
|
||||||
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
T: AsyncRead + AsyncWrite + Unpin + 'static,
|
||||||
@ -282,7 +434,7 @@ where
|
|||||||
|
|
||||||
H2ServiceHandlerResponse {
|
H2ServiceHandlerResponse {
|
||||||
state: State::Handshake(
|
state: State::Handshake(
|
||||||
Some(self.flow.clone()),
|
Some(Rc::clone(&self.flow)),
|
||||||
Some(self.cfg.clone()),
|
Some(self.cfg.clone()),
|
||||||
addr,
|
addr,
|
||||||
on_connect_data,
|
on_connect_data,
|
||||||
|
61
actix-http/src/header/common.rs
Normal file
61
actix-http/src/header/common.rs
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
//! Common header names not defined in [`http`].
|
||||||
|
//!
|
||||||
|
//! Any headers added to this file will need to be re-exported from the list at `crate::headers`.
|
||||||
|
|
||||||
|
use http::header::HeaderName;
|
||||||
|
|
||||||
|
/// Response header field that indicates how caches have handled that response and its corresponding
|
||||||
|
/// request.
|
||||||
|
///
|
||||||
|
/// See [RFC 9211](https://www.rfc-editor.org/rfc/rfc9211) for full semantics.
|
||||||
|
// TODO(breaking): replace with http's version
|
||||||
|
pub const CACHE_STATUS: HeaderName = HeaderName::from_static("cache-status");
|
||||||
|
|
||||||
|
/// Response header field that allows origin servers to control the behavior of CDN caches
|
||||||
|
/// interposed between them and clients separately from other caches that might handle the response.
|
||||||
|
///
|
||||||
|
/// See [RFC 9213](https://www.rfc-editor.org/rfc/rfc9213) for full semantics.
|
||||||
|
// TODO(breaking): replace with http's version
|
||||||
|
pub const CDN_CACHE_CONTROL: HeaderName = HeaderName::from_static("cdn-cache-control");
|
||||||
|
|
||||||
|
/// Response header field that sends a signal to the user agent that it ought to remove all data of
|
||||||
|
/// a certain set of types.
|
||||||
|
///
|
||||||
|
/// See the [W3C Clear-Site-Data spec] for full semantics.
|
||||||
|
///
|
||||||
|
/// [W3C Clear-Site-Data spec]: https://www.w3.org/TR/clear-site-data/#header
|
||||||
|
pub const CLEAR_SITE_DATA: HeaderName = HeaderName::from_static("clear-site-data");
|
||||||
|
|
||||||
|
/// Response header that prevents a document from loading any cross-origin resources that don't
|
||||||
|
/// explicitly grant the document permission (using [CORP] or [CORS]).
|
||||||
|
///
|
||||||
|
/// [CORP]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Cross-Origin_Resource_Policy_(CORP)
|
||||||
|
/// [CORS]: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS
|
||||||
|
pub const CROSS_ORIGIN_EMBEDDER_POLICY: HeaderName =
|
||||||
|
HeaderName::from_static("cross-origin-embedder-policy");
|
||||||
|
|
||||||
|
/// Response header that allows you to ensure a top-level document does not share a browsing context
|
||||||
|
/// group with cross-origin documents.
|
||||||
|
pub const CROSS_ORIGIN_OPENER_POLICY: HeaderName =
|
||||||
|
HeaderName::from_static("cross-origin-opener-policy");
|
||||||
|
|
||||||
|
/// Response header that conveys a desire that the browser blocks no-cors cross-origin/cross-site
|
||||||
|
/// requests to the given resource.
|
||||||
|
pub const CROSS_ORIGIN_RESOURCE_POLICY: HeaderName =
|
||||||
|
HeaderName::from_static("cross-origin-resource-policy");
|
||||||
|
|
||||||
|
/// Response header that provides a mechanism to allow and deny the use of browser features in a
|
||||||
|
/// document or within any `<iframe>` elements in the document.
|
||||||
|
pub const PERMISSIONS_POLICY: HeaderName = HeaderName::from_static("permissions-policy");
|
||||||
|
|
||||||
|
/// Request header (de-facto standard) for identifying the originating IP address of a client
|
||||||
|
/// connecting to a web server through a proxy server.
|
||||||
|
pub const X_FORWARDED_FOR: HeaderName = HeaderName::from_static("x-forwarded-for");
|
||||||
|
|
||||||
|
/// Request header (de-facto standard) for identifying the original host requested by the client in
|
||||||
|
/// the `Host` HTTP request header.
|
||||||
|
pub const X_FORWARDED_HOST: HeaderName = HeaderName::from_static("x-forwarded-host");
|
||||||
|
|
||||||
|
/// Request header (de-facto standard) for identifying the protocol that a client used to connect to
|
||||||
|
/// your proxy or load balancer.
|
||||||
|
pub const X_FORWARDED_PROTO: HeaderName = HeaderName::from_static("x-forwarded-proto");
|
@ -1,7 +1,5 @@
|
|||||||
//! [`TryIntoHeaderPair`] trait and implementations.
|
//! [`TryIntoHeaderPair`] trait and implementations.
|
||||||
|
|
||||||
use std::convert::TryFrom as _;
|
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
Header, HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, TryIntoHeaderValue,
|
Header, HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, TryIntoHeaderValue,
|
||||||
};
|
};
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
//! [`TryIntoHeaderValue`] trait and implementations.
|
//! [`TryIntoHeaderValue`] trait and implementations.
|
||||||
|
|
||||||
use std::convert::TryFrom as _;
|
|
||||||
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use http::{header::InvalidHeaderValue, Error as HttpError, HeaderValue};
|
use http::{header::InvalidHeaderValue, Error as HttpError, HeaderValue};
|
||||||
use mime::Mime;
|
use mime::Mime;
|
||||||
|
@ -2,19 +2,20 @@
|
|||||||
|
|
||||||
use std::{borrow::Cow, collections::hash_map, iter, ops};
|
use std::{borrow::Cow, collections::hash_map, iter, ops};
|
||||||
|
|
||||||
use ahash::AHashMap;
|
use foldhash::{HashMap as FoldHashMap, HashMapExt as _};
|
||||||
use http::header::{HeaderName, HeaderValue};
|
use http::header::{HeaderName, HeaderValue};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
|
||||||
use crate::header::AsHeaderName;
|
use super::AsHeaderName;
|
||||||
|
|
||||||
/// A multi-map of HTTP headers.
|
/// A multi-map of HTTP headers.
|
||||||
///
|
///
|
||||||
/// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s.
|
/// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// use actix_http::header::{self, HeaderMap, HeaderValue};
|
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
|
||||||
///
|
///
|
||||||
/// let mut map = HeaderMap::new();
|
/// let mut map = HeaderMap::new();
|
||||||
///
|
///
|
||||||
@ -29,9 +30,24 @@ use crate::header::AsHeaderName;
|
|||||||
///
|
///
|
||||||
/// assert!(!map.contains_key(header::ORIGIN));
|
/// assert!(!map.contains_key(header::ORIGIN));
|
||||||
/// ```
|
/// ```
|
||||||
|
///
|
||||||
|
/// Construct a header map using the [`FromIterator`] implementation. Note that it uses the append
|
||||||
|
/// strategy, so duplicate header names are preserved.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use actix_http::header::{self, HeaderMap, HeaderValue};
|
||||||
|
///
|
||||||
|
/// let headers = HeaderMap::from_iter([
|
||||||
|
/// (header::CONTENT_TYPE, HeaderValue::from_static("text/plain")),
|
||||||
|
/// (header::COOKIE, HeaderValue::from_static("foo=1")),
|
||||||
|
/// (header::COOKIE, HeaderValue::from_static("bar=1")),
|
||||||
|
/// ]);
|
||||||
|
///
|
||||||
|
/// assert_eq!(headers.len(), 3);
|
||||||
|
/// ```
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct HeaderMap {
|
pub struct HeaderMap {
|
||||||
pub(crate) inner: AHashMap<HeaderName, Value>,
|
pub(crate) inner: FoldHashMap<HeaderName, Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A bespoke non-empty list for HeaderMap values.
|
/// A bespoke non-empty list for HeaderMap values.
|
||||||
@ -100,7 +116,7 @@ impl HeaderMap {
|
|||||||
/// ```
|
/// ```
|
||||||
pub fn with_capacity(capacity: usize) -> Self {
|
pub fn with_capacity(capacity: usize) -> Self {
|
||||||
HeaderMap {
|
HeaderMap {
|
||||||
inner: AHashMap::with_capacity(capacity),
|
inner: FoldHashMap::with_capacity(capacity),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,9 +166,7 @@ impl HeaderMap {
|
|||||||
/// assert_eq!(map.len(), 3);
|
/// assert_eq!(map.len(), 3);
|
||||||
/// ```
|
/// ```
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
self.inner
|
self.inner.values().map(|vals| vals.len()).sum()
|
||||||
.iter()
|
|
||||||
.fold(0, |acc, (_, values)| acc + values.len())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the number of _keys_ stored in the map.
|
/// Returns the number of _keys_ stored in the map.
|
||||||
@ -309,7 +323,7 @@ impl HeaderMap {
|
|||||||
pub fn get_all(&self, key: impl AsHeaderName) -> std::slice::Iter<'_, HeaderValue> {
|
pub fn get_all(&self, key: impl AsHeaderName) -> std::slice::Iter<'_, HeaderValue> {
|
||||||
match self.get_value(key) {
|
match self.get_value(key) {
|
||||||
Some(value) => value.iter(),
|
Some(value) => value.iter(),
|
||||||
None => (&[]).iter(),
|
None => [].iter(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -370,8 +384,8 @@ impl HeaderMap {
|
|||||||
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
|
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
|
||||||
/// assert!(!removed.is_empty());
|
/// assert!(!removed.is_empty());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn insert(&mut self, key: HeaderName, val: HeaderValue) -> Removed {
|
pub fn insert(&mut self, name: HeaderName, val: HeaderValue) -> Removed {
|
||||||
let value = self.inner.insert(key, Value::one(val));
|
let value = self.inner.insert(name, Value::one(val));
|
||||||
Removed::new(value)
|
Removed::new(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -552,6 +566,39 @@ impl HeaderMap {
|
|||||||
Keys(self.inner.keys())
|
Keys(self.inner.keys())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Retains only the headers specified by the predicate.
|
||||||
|
///
|
||||||
|
/// In other words, removes all headers `(name, val)` for which `retain_fn(&name, &mut val)`
|
||||||
|
/// returns false.
|
||||||
|
///
|
||||||
|
/// The order in which headers are visited should be considered arbitrary.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
/// ```
|
||||||
|
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
|
||||||
|
/// let mut map = HeaderMap::new();
|
||||||
|
///
|
||||||
|
/// map.append(header::HOST, HeaderValue::from_static("duck.com"));
|
||||||
|
/// map.append(header::SET_COOKIE, HeaderValue::from_static("one=1"));
|
||||||
|
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
|
||||||
|
///
|
||||||
|
/// map.retain(|name, val| val.as_bytes().starts_with(b"one"));
|
||||||
|
///
|
||||||
|
/// assert_eq!(map.len(), 1);
|
||||||
|
/// assert!(map.contains_key(&header::SET_COOKIE));
|
||||||
|
/// ```
|
||||||
|
pub fn retain<F>(&mut self, mut retain_fn: F)
|
||||||
|
where
|
||||||
|
F: FnMut(&HeaderName, &mut HeaderValue) -> bool,
|
||||||
|
{
|
||||||
|
self.inner.retain(|name, vals| {
|
||||||
|
vals.inner.retain(|val| retain_fn(name, val));
|
||||||
|
|
||||||
|
// invariant: make sure newly empty value lists are removed
|
||||||
|
!vals.is_empty()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Clears the map, returning all name-value sets as an iterator.
|
/// Clears the map, returning all name-value sets as an iterator.
|
||||||
///
|
///
|
||||||
/// Header names will only be yielded for the first value in each set. All items that are
|
/// Header names will only be yielded for the first value in each set. All items that are
|
||||||
@ -605,6 +652,37 @@ impl<'a> IntoIterator for &'a HeaderMap {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl FromIterator<(HeaderName, HeaderValue)> for HeaderMap {
|
||||||
|
fn from_iter<T: IntoIterator<Item = (HeaderName, HeaderValue)>>(iter: T) -> Self {
|
||||||
|
iter.into_iter()
|
||||||
|
.fold(Self::new(), |mut map, (name, value)| {
|
||||||
|
map.append(name, value);
|
||||||
|
map
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert a `http::HeaderMap` to our `HeaderMap`.
|
||||||
|
impl From<http::HeaderMap> for HeaderMap {
|
||||||
|
fn from(mut map: http::HeaderMap) -> Self {
|
||||||
|
Self::from_drain(map.drain())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert our `HeaderMap` to a `http::HeaderMap`.
|
||||||
|
impl From<HeaderMap> for http::HeaderMap {
|
||||||
|
fn from(map: HeaderMap) -> Self {
|
||||||
|
Self::from_iter(map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert our `&HeaderMap` to a `http::HeaderMap`.
|
||||||
|
impl From<&HeaderMap> for http::HeaderMap {
|
||||||
|
fn from(map: &HeaderMap) -> Self {
|
||||||
|
map.to_owned().into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Iterator over removed, owned values with the same associated name.
|
/// Iterator over removed, owned values with the same associated name.
|
||||||
///
|
///
|
||||||
/// Returned from methods that remove or replace items. See [`HeaderMap::insert`]
|
/// Returned from methods that remove or replace items. See [`HeaderMap::insert`]
|
||||||
@ -623,7 +701,7 @@ impl Removed {
|
|||||||
/// Returns true if iterator contains no elements, without consuming it.
|
/// Returns true if iterator contains no elements, without consuming it.
|
||||||
///
|
///
|
||||||
/// If called immediately after [`HeaderMap::insert`] or [`HeaderMap::remove`], it will indicate
|
/// If called immediately after [`HeaderMap::insert`] or [`HeaderMap::remove`], it will indicate
|
||||||
/// wether any items were actually replaced or removed, respectively.
|
/// whether any items were actually replaced or removed, respectively.
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
match self.inner {
|
match self.inner {
|
||||||
// size hint lower bound of smallvec is the correct length
|
// size hint lower bound of smallvec is the correct length
|
||||||
@ -752,7 +830,7 @@ impl<'a> Drain<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for Drain<'a> {
|
impl Iterator for Drain<'_> {
|
||||||
type Item = (Option<HeaderName>, HeaderValue);
|
type Item = (Option<HeaderName>, HeaderValue);
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
@ -936,6 +1014,55 @@ mod tests {
|
|||||||
assert!(map.is_empty());
|
assert!(map.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn retain() {
|
||||||
|
let mut map = HeaderMap::new();
|
||||||
|
|
||||||
|
map.append(header::LOCATION, HeaderValue::from_static("/test"));
|
||||||
|
map.append(header::HOST, HeaderValue::from_static("duck.com"));
|
||||||
|
map.append(header::COOKIE, HeaderValue::from_static("one=1"));
|
||||||
|
map.append(header::COOKIE, HeaderValue::from_static("two=2"));
|
||||||
|
|
||||||
|
assert_eq!(map.len(), 4);
|
||||||
|
|
||||||
|
// by value
|
||||||
|
map.retain(|_, val| !val.as_bytes().contains(&b'/'));
|
||||||
|
assert_eq!(map.len(), 3);
|
||||||
|
|
||||||
|
// by name
|
||||||
|
map.retain(|name, _| name.as_str() != "cookie");
|
||||||
|
assert_eq!(map.len(), 1);
|
||||||
|
|
||||||
|
// keep but mutate value
|
||||||
|
map.retain(|_, val| {
|
||||||
|
*val = HeaderValue::from_static("replaced");
|
||||||
|
true
|
||||||
|
});
|
||||||
|
assert_eq!(map.len(), 1);
|
||||||
|
assert_eq!(map.get("host").unwrap(), "replaced");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn retain_removes_empty_value_lists() {
|
||||||
|
let mut map = HeaderMap::with_capacity(3);
|
||||||
|
|
||||||
|
map.append(header::HOST, HeaderValue::from_static("duck.com"));
|
||||||
|
map.append(header::HOST, HeaderValue::from_static("duck.com"));
|
||||||
|
|
||||||
|
assert_eq!(map.len(), 2);
|
||||||
|
assert_eq!(map.len_keys(), 1);
|
||||||
|
assert_eq!(map.inner.len(), 1);
|
||||||
|
assert_eq!(map.capacity(), 3);
|
||||||
|
|
||||||
|
// remove everything
|
||||||
|
map.retain(|_n, _v| false);
|
||||||
|
|
||||||
|
assert_eq!(map.len(), 0);
|
||||||
|
assert_eq!(map.len_keys(), 0);
|
||||||
|
assert_eq!(map.inner.len(), 0);
|
||||||
|
assert_eq!(map.capacity(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn entries_into_iter() {
|
fn entries_into_iter() {
|
||||||
let mut map = HeaderMap::new();
|
let mut map = HeaderMap::new();
|
||||||
@ -1033,9 +1160,7 @@ mod tests {
|
|||||||
assert!(vals.next().is_none());
|
assert!(vals.next().is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn owned_pair<'a>(
|
fn owned_pair<'a>((name, val): (&'a HeaderName, &'a HeaderValue)) -> (HeaderName, HeaderValue) {
|
||||||
(name, val): (&'a HeaderName, &'a HeaderValue),
|
|
||||||
) -> (HeaderName, HeaderValue) {
|
|
||||||
(name.clone(), val.clone())
|
(name.clone(), val.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,69 +1,70 @@
|
|||||||
//! Pre-defined `HeaderName`s, traits for parsing and conversion, and other header utility methods.
|
//! Pre-defined `HeaderName`s, traits for parsing and conversion, and other header utility methods.
|
||||||
|
|
||||||
use percent_encoding::{AsciiSet, CONTROLS};
|
// declaring new header consts will yield this error
|
||||||
|
#![allow(clippy::declare_interior_mutable_const)]
|
||||||
|
|
||||||
// re-export from http except header map related items
|
// re-export from http except header map related items
|
||||||
pub use http::header::{
|
pub use ::http::header::{
|
||||||
HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, ToStrError,
|
HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, ToStrError,
|
||||||
};
|
};
|
||||||
|
// re-export const header names, list is explicit so that any updates to `common` module do not
|
||||||
// re-export const header names
|
// conflict with this set
|
||||||
pub use http::header::{
|
pub use ::http::header::{
|
||||||
ACCEPT, ACCEPT_CHARSET, ACCEPT_ENCODING, ACCEPT_LANGUAGE, ACCEPT_RANGES,
|
ACCEPT, ACCEPT_CHARSET, ACCEPT_ENCODING, ACCEPT_LANGUAGE, ACCEPT_RANGES,
|
||||||
ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS,
|
ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS,
|
||||||
ACCESS_CONTROL_ALLOW_METHODS, ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS,
|
ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_MAX_AGE,
|
||||||
ACCESS_CONTROL_MAX_AGE, ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, AGE,
|
ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, AGE, ALLOW, ALT_SVC,
|
||||||
ALLOW, ALT_SVC, AUTHORIZATION, CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION,
|
AUTHORIZATION, CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION, CONTENT_ENCODING,
|
||||||
CONTENT_ENCODING, CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_LOCATION, CONTENT_RANGE,
|
CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_LOCATION, CONTENT_RANGE, CONTENT_SECURITY_POLICY,
|
||||||
CONTENT_SECURITY_POLICY, CONTENT_SECURITY_POLICY_REPORT_ONLY, CONTENT_TYPE, COOKIE, DATE,
|
CONTENT_SECURITY_POLICY_REPORT_ONLY, CONTENT_TYPE, COOKIE, DATE, DNT, ETAG, EXPECT, EXPIRES,
|
||||||
DNT, ETAG, EXPECT, EXPIRES, FORWARDED, FROM, HOST, IF_MATCH, IF_MODIFIED_SINCE,
|
FORWARDED, FROM, HOST, IF_MATCH, IF_MODIFIED_SINCE, IF_NONE_MATCH, IF_RANGE,
|
||||||
IF_NONE_MATCH, IF_RANGE, IF_UNMODIFIED_SINCE, LAST_MODIFIED, LINK, LOCATION, MAX_FORWARDS,
|
IF_UNMODIFIED_SINCE, LAST_MODIFIED, LINK, LOCATION, MAX_FORWARDS, ORIGIN, PRAGMA,
|
||||||
ORIGIN, PRAGMA, PROXY_AUTHENTICATE, PROXY_AUTHORIZATION, PUBLIC_KEY_PINS,
|
PROXY_AUTHENTICATE, PROXY_AUTHORIZATION, PUBLIC_KEY_PINS, PUBLIC_KEY_PINS_REPORT_ONLY, RANGE,
|
||||||
PUBLIC_KEY_PINS_REPORT_ONLY, RANGE, REFERER, REFERRER_POLICY, REFRESH, RETRY_AFTER,
|
REFERER, REFERRER_POLICY, REFRESH, RETRY_AFTER, SEC_WEBSOCKET_ACCEPT, SEC_WEBSOCKET_EXTENSIONS,
|
||||||
SEC_WEBSOCKET_ACCEPT, SEC_WEBSOCKET_EXTENSIONS, SEC_WEBSOCKET_KEY, SEC_WEBSOCKET_PROTOCOL,
|
SEC_WEBSOCKET_KEY, SEC_WEBSOCKET_PROTOCOL, SEC_WEBSOCKET_VERSION, SERVER, SET_COOKIE,
|
||||||
SEC_WEBSOCKET_VERSION, SERVER, SET_COOKIE, STRICT_TRANSPORT_SECURITY, TE, TRAILER,
|
STRICT_TRANSPORT_SECURITY, TE, TRAILER, TRANSFER_ENCODING, UPGRADE, UPGRADE_INSECURE_REQUESTS,
|
||||||
TRANSFER_ENCODING, UPGRADE, UPGRADE_INSECURE_REQUESTS, USER_AGENT, VARY, VIA, WARNING,
|
USER_AGENT, VARY, VIA, WARNING, WWW_AUTHENTICATE, X_CONTENT_TYPE_OPTIONS,
|
||||||
WWW_AUTHENTICATE, X_CONTENT_TYPE_OPTIONS, X_DNS_PREFETCH_CONTROL, X_FRAME_OPTIONS,
|
X_DNS_PREFETCH_CONTROL, X_FRAME_OPTIONS, X_XSS_PROTECTION,
|
||||||
X_XSS_PROTECTION,
|
|
||||||
};
|
};
|
||||||
|
use percent_encoding::{AsciiSet, CONTROLS};
|
||||||
|
|
||||||
use crate::{error::ParseError, HttpMessage};
|
use crate::{error::ParseError, HttpMessage};
|
||||||
|
|
||||||
mod as_name;
|
mod as_name;
|
||||||
|
mod common;
|
||||||
mod into_pair;
|
mod into_pair;
|
||||||
mod into_value;
|
mod into_value;
|
||||||
pub mod map;
|
pub mod map;
|
||||||
mod shared;
|
mod shared;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
pub use self::as_name::AsHeaderName;
|
pub use self::{
|
||||||
pub use self::into_pair::TryIntoHeaderPair;
|
as_name::AsHeaderName,
|
||||||
pub use self::into_value::TryIntoHeaderValue;
|
// re-export list is explicit so that any updates to `http` do not conflict with this set
|
||||||
pub use self::map::HeaderMap;
|
common::{
|
||||||
pub use self::shared::{
|
CACHE_STATUS, CDN_CACHE_CONTROL, CLEAR_SITE_DATA, CROSS_ORIGIN_EMBEDDER_POLICY,
|
||||||
|
CROSS_ORIGIN_OPENER_POLICY, CROSS_ORIGIN_RESOURCE_POLICY, PERMISSIONS_POLICY,
|
||||||
|
X_FORWARDED_FOR, X_FORWARDED_HOST, X_FORWARDED_PROTO,
|
||||||
|
},
|
||||||
|
into_pair::TryIntoHeaderPair,
|
||||||
|
into_value::TryIntoHeaderValue,
|
||||||
|
map::HeaderMap,
|
||||||
|
shared::{
|
||||||
parse_extended_value, q, Charset, ContentEncoding, ExtendedValue, HttpDate, LanguageTag,
|
parse_extended_value, q, Charset, ContentEncoding, ExtendedValue, HttpDate, LanguageTag,
|
||||||
Quality, QualityItem,
|
Quality, QualityItem,
|
||||||
};
|
},
|
||||||
pub use self::utils::{
|
utils::{fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode},
|
||||||
fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// An interface for types that already represent a valid header.
|
/// An interface for types that already represent a valid header.
|
||||||
pub trait Header: TryIntoHeaderValue {
|
pub trait Header: TryIntoHeaderValue {
|
||||||
/// Returns the name of the header field
|
/// Returns the name of the header field.
|
||||||
fn name() -> HeaderName;
|
fn name() -> HeaderName;
|
||||||
|
|
||||||
/// Parse a header
|
/// Parse the header from a HTTP message.
|
||||||
fn parse<M: HttpMessage>(msg: &M) -> Result<Self, ParseError>;
|
fn parse<M: HttpMessage>(msg: &M) -> Result<Self, ParseError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert `http::HeaderMap` to our `HeaderMap`.
|
|
||||||
impl From<http::HeaderMap> for HeaderMap {
|
|
||||||
fn from(mut map: http::HeaderMap) -> HeaderMap {
|
|
||||||
HeaderMap::from_drain(map.drain())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This encode set is used for HTTP header values and is defined at
|
/// This encode set is used for HTTP header values and is defined at
|
||||||
/// <https://datatracker.ietf.org/doc/html/rfc5987#section-3.2>.
|
/// <https://datatracker.ietf.org/doc/html/rfc5987#section-3.2>.
|
||||||
pub(crate) const HTTP_VALUE: &AsciiSet = &CONTROLS
|
pub(crate) const HTTP_VALUE: &AsciiSet = &CONTROLS
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::{convert::TryFrom, str::FromStr};
|
use std::str::FromStr;
|
||||||
|
|
||||||
use derive_more::{Display, Error};
|
use derive_more::{Display, Error};
|
||||||
use http::header::InvalidHeaderValue;
|
use http::header::InvalidHeaderValue;
|
||||||
@ -11,7 +11,7 @@ use crate::{
|
|||||||
|
|
||||||
/// Error returned when a content encoding is unknown.
|
/// Error returned when a content encoding is unknown.
|
||||||
#[derive(Debug, Display, Error)]
|
#[derive(Debug, Display, Error)]
|
||||||
#[display(fmt = "unsupported content encoding")]
|
#[display("unsupported content encoding")]
|
||||||
pub struct ContentEncodingParseError;
|
pub struct ContentEncodingParseError;
|
||||||
|
|
||||||
/// Represents a supported content encoding.
|
/// Represents a supported content encoding.
|
||||||
@ -20,14 +20,16 @@ pub struct ContentEncodingParseError;
|
|||||||
/// See [IANA HTTP Content Coding Registry].
|
/// See [IANA HTTP Content Coding Registry].
|
||||||
///
|
///
|
||||||
/// [IANA HTTP Content Coding Registry]: https://www.iana.org/assignments/http-parameters/http-parameters.xhtml
|
/// [IANA HTTP Content Coding Registry]: https://www.iana.org/assignments/http-parameters/http-parameters.xhtml
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum ContentEncoding {
|
pub enum ContentEncoding {
|
||||||
/// Automatically select encoding based on encoding negotiation.
|
/// Indicates the no-op identity encoding.
|
||||||
Auto,
|
///
|
||||||
|
/// I.e., no compression or modification.
|
||||||
|
Identity,
|
||||||
|
|
||||||
/// A format using the Brotli algorithm.
|
/// A format using the Brotli algorithm.
|
||||||
Br,
|
Brotli,
|
||||||
|
|
||||||
/// A format using the zlib structure with deflate algorithm.
|
/// A format using the zlib structure with deflate algorithm.
|
||||||
Deflate,
|
Deflate,
|
||||||
@ -37,32 +39,36 @@ pub enum ContentEncoding {
|
|||||||
|
|
||||||
/// Zstd algorithm.
|
/// Zstd algorithm.
|
||||||
Zstd,
|
Zstd,
|
||||||
|
|
||||||
/// Indicates the identity function (i.e. no compression, nor modification).
|
|
||||||
Identity,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ContentEncoding {
|
impl ContentEncoding {
|
||||||
/// Is the content compressed?
|
|
||||||
#[inline]
|
|
||||||
pub const fn is_compression(self) -> bool {
|
|
||||||
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert content encoding to string.
|
/// Convert content encoding to string.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub const fn as_str(self) -> &'static str {
|
pub const fn as_str(self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
ContentEncoding::Br => "br",
|
ContentEncoding::Brotli => "br",
|
||||||
ContentEncoding::Gzip => "gzip",
|
ContentEncoding::Gzip => "gzip",
|
||||||
ContentEncoding::Deflate => "deflate",
|
ContentEncoding::Deflate => "deflate",
|
||||||
ContentEncoding::Zstd => "zstd",
|
ContentEncoding::Zstd => "zstd",
|
||||||
ContentEncoding::Identity | ContentEncoding::Auto => "identity",
|
ContentEncoding::Identity => "identity",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert content encoding to header value.
|
||||||
|
#[inline]
|
||||||
|
pub const fn to_header_value(self) -> HeaderValue {
|
||||||
|
match self {
|
||||||
|
ContentEncoding::Brotli => HeaderValue::from_static("br"),
|
||||||
|
ContentEncoding::Gzip => HeaderValue::from_static("gzip"),
|
||||||
|
ContentEncoding::Deflate => HeaderValue::from_static("deflate"),
|
||||||
|
ContentEncoding::Zstd => HeaderValue::from_static("zstd"),
|
||||||
|
ContentEncoding::Identity => HeaderValue::from_static("identity"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for ContentEncoding {
|
impl Default for ContentEncoding {
|
||||||
|
#[inline]
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::Identity
|
Self::Identity
|
||||||
}
|
}
|
||||||
@ -71,16 +77,18 @@ impl Default for ContentEncoding {
|
|||||||
impl FromStr for ContentEncoding {
|
impl FromStr for ContentEncoding {
|
||||||
type Err = ContentEncodingParseError;
|
type Err = ContentEncodingParseError;
|
||||||
|
|
||||||
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
fn from_str(enc: &str) -> Result<Self, Self::Err> {
|
||||||
let val = val.trim();
|
let enc = enc.trim();
|
||||||
|
|
||||||
if val.eq_ignore_ascii_case("br") {
|
if enc.eq_ignore_ascii_case("br") {
|
||||||
Ok(ContentEncoding::Br)
|
Ok(ContentEncoding::Brotli)
|
||||||
} else if val.eq_ignore_ascii_case("gzip") {
|
} else if enc.eq_ignore_ascii_case("gzip") {
|
||||||
Ok(ContentEncoding::Gzip)
|
Ok(ContentEncoding::Gzip)
|
||||||
} else if val.eq_ignore_ascii_case("deflate") {
|
} else if enc.eq_ignore_ascii_case("deflate") {
|
||||||
Ok(ContentEncoding::Deflate)
|
Ok(ContentEncoding::Deflate)
|
||||||
} else if val.eq_ignore_ascii_case("zstd") {
|
} else if enc.eq_ignore_ascii_case("identity") {
|
||||||
|
Ok(ContentEncoding::Identity)
|
||||||
|
} else if enc.eq_ignore_ascii_case("zstd") {
|
||||||
Ok(ContentEncoding::Zstd)
|
Ok(ContentEncoding::Zstd)
|
||||||
} else {
|
} else {
|
||||||
Err(ContentEncodingParseError)
|
Err(ContentEncodingParseError)
|
||||||
|
@ -12,7 +12,7 @@ use crate::header::{Charset, HTTP_VALUE};
|
|||||||
/// - A character sequence representing the actual value (`value`), separated by single quotes.
|
/// - A character sequence representing the actual value (`value`), separated by single quotes.
|
||||||
///
|
///
|
||||||
/// It is defined in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
|
/// It is defined in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub struct ExtendedValue {
|
pub struct ExtendedValue {
|
||||||
/// The character set that is used to encode the `value` to a string.
|
/// The character set that is used to encode the `value` to a string.
|
||||||
pub charset: Charset,
|
pub charset: Charset,
|
||||||
|
@ -4,8 +4,7 @@ use bytes::BytesMut;
|
|||||||
use http::header::{HeaderValue, InvalidHeaderValue};
|
use http::header::{HeaderValue, InvalidHeaderValue};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::DATE_VALUE_LENGTH, error::ParseError, header::TryIntoHeaderValue,
|
date::DATE_VALUE_LENGTH, error::ParseError, header::TryIntoHeaderValue, helpers::MutWriter,
|
||||||
helpers::MutWriter,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// A timestamp with HTTP-style formatting and parsing.
|
/// A timestamp with HTTP-style formatting and parsing.
|
||||||
@ -25,8 +24,7 @@ impl FromStr for HttpDate {
|
|||||||
|
|
||||||
impl fmt::Display for HttpDate {
|
impl fmt::Display for HttpDate {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let date_str = httpdate::fmt_http_date(self.0);
|
httpdate::HttpDate::from(self.0).fmt(f)
|
||||||
f.write_str(&date_str)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,7 +36,7 @@ impl TryIntoHeaderValue for HttpDate {
|
|||||||
let mut wrt = MutWriter(&mut buf);
|
let mut wrt = MutWriter(&mut buf);
|
||||||
|
|
||||||
// unwrap: date output is known to be well formed and of known length
|
// unwrap: date output is known to be well formed and of known length
|
||||||
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
|
write!(wrt, "{}", self).unwrap();
|
||||||
|
|
||||||
HeaderValue::from_maybe_shared(buf.split().freeze())
|
HeaderValue::from_maybe_shared(buf.split().freeze())
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
//! Originally taken from `hyper::header::shared`.
|
//! Originally taken from `hyper::header::shared`.
|
||||||
|
|
||||||
|
pub use language_tags::LanguageTag;
|
||||||
|
|
||||||
mod charset;
|
mod charset;
|
||||||
mod content_encoding;
|
mod content_encoding;
|
||||||
mod extended;
|
mod extended;
|
||||||
@ -7,10 +9,11 @@ mod http_date;
|
|||||||
mod quality;
|
mod quality;
|
||||||
mod quality_item;
|
mod quality_item;
|
||||||
|
|
||||||
pub use self::charset::Charset;
|
pub use self::{
|
||||||
pub use self::content_encoding::ContentEncoding;
|
charset::Charset,
|
||||||
pub use self::extended::{parse_extended_value, ExtendedValue};
|
content_encoding::ContentEncoding,
|
||||||
pub use self::http_date::HttpDate;
|
extended::{parse_extended_value, ExtendedValue},
|
||||||
pub use self::quality::{q, Quality};
|
http_date::HttpDate,
|
||||||
pub use self::quality_item::QualityItem;
|
quality::{q, Quality},
|
||||||
pub use language_tags::LanguageTag;
|
quality_item::QualityItem,
|
||||||
|
};
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user