Skip to content

Commit 347f34d

Browse files
author
glendc
committed
improve codebase based on tower-http's efforts (David)
1 parent 13b210c commit 347f34d

File tree

16 files changed

+287
-246
lines changed

16 files changed

+287
-246
lines changed

tower-async-http/src/auth/add_authorization.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -178,6 +178,8 @@ where
178178

179179
#[cfg(test)]
180180
mod tests {
181+
use std::convert::Infallible;
182+
181183
#[allow(unused_imports)]
182184
use super::*;
183185

@@ -225,7 +227,7 @@ mod tests {
225227
let auth = request.headers().get(http::header::AUTHORIZATION).unwrap();
226228
assert!(auth.is_sensitive());
227229

228-
Ok::<_, hyper::Error>(Response::new(Body::empty()))
230+
Ok::<_, Infallible>(Response::new(Body::empty()))
229231
});
230232

231233
let client = AddAuthorization::bearer(svc, "foo").as_sensitive(true);

tower-async-http/src/classify/grpc_errors_as_failures.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -125,8 +125,7 @@ impl GrpcCodeBitmask {
125125
///
126126
/// Responses are considered successful if
127127
///
128-
/// - `grpc-status` header value matches the defines success codes in [`GrpcErrorsAsFailures`] (only `Ok` by
129-
/// default).
128+
/// - `grpc-status` header value contains a successs value.
130129
/// - `grpc-status` header is missing.
131130
/// - `grpc-status` header value isn't a valid `String`.
132131
/// - `grpc-status` header value can't parsed into an `i32`.

tower-async-http/src/compression/body.rs

+4-7
Original file line numberDiff line numberDiff line change
@@ -157,13 +157,10 @@ where
157157
#[cfg(feature = "compression-zstd")]
158158
BodyInnerProj::Zstd { inner } => inner.poll_frame(cx),
159159
BodyInnerProj::Identity { inner } => match ready!(inner.poll_frame(cx)) {
160-
Some(Ok(frame)) => match frame.into_data() {
161-
Ok(mut buf) => {
162-
let bytes = buf.copy_to_bytes(buf.remaining());
163-
Poll::Ready(Some(Ok(Frame::data(bytes))))
164-
}
165-
Err(_) => Poll::Ready(None),
166-
},
160+
Some(Ok(frame)) => {
161+
let frame = frame.map_data(|mut buf| buf.copy_to_bytes(buf.remaining()));
162+
Poll::Ready(Some(Ok(frame)))
163+
}
167164
Some(Err(err)) => Poll::Ready(Some(Err(err.into()))),
168165
None => Poll::Ready(None),
169166
},

tower-async-http/src/compression/layer.rs

+7-18
Original file line numberDiff line numberDiff line change
@@ -124,13 +124,12 @@ impl CompressionLayer {
124124
mod tests {
125125
use super::*;
126126

127-
use crate::test_helpers::{Body, TowerHttpBodyExt};
127+
use crate::test_helpers::Body;
128128

129129
use http::{header::ACCEPT_ENCODING, Request, Response};
130-
use tokio::fs::File;
131-
// for Body::data
132-
use bytes::{Bytes, BytesMut};
130+
use http_body_util::BodyExt;
133131
use std::convert::Infallible;
132+
use tokio::fs::File;
134133
use tokio_util::io::ReaderStream;
135134
use tower_async::{Service, ServiceBuilder};
136135

@@ -167,13 +166,8 @@ mod tests {
167166
assert_eq!(response.headers()["content-encoding"], "deflate");
168167

169168
// Read the body
170-
let mut body = response.into_body();
171-
let mut bytes = BytesMut::new();
172-
while let Some(chunk) = body.data().await {
173-
let chunk = chunk?;
174-
bytes.extend_from_slice(&chunk[..]);
175-
}
176-
let bytes: Bytes = bytes.freeze();
169+
let body = response.into_body();
170+
let bytes = body.collect().await.unwrap().to_bytes();
177171

178172
let deflate_bytes_len = bytes.len();
179173

@@ -197,13 +191,8 @@ mod tests {
197191
assert_eq!(response.headers()["content-encoding"], "br");
198192

199193
// Read the body
200-
let mut body = response.into_body();
201-
let mut bytes = BytesMut::new();
202-
while let Some(chunk) = body.data().await {
203-
let chunk = chunk?;
204-
bytes.extend_from_slice(&chunk[..]);
205-
}
206-
let bytes: Bytes = bytes.freeze();
194+
let body = response.into_body();
195+
let bytes = body.collect().await.unwrap().to_bytes();
207196

208197
let br_byte_length = bytes.len();
209198

tower-async-http/src/compression/mod.rs

+29-47
Original file line numberDiff line numberDiff line change
@@ -88,13 +88,14 @@ mod tests {
8888
use super::*;
8989

9090
use crate::compression::predicate::SizeAbove;
91-
use crate::test_helpers::{Body, TowerHttpBodyExt};
91+
use crate::test_helpers::{Body, WithTrailers};
9292

9393
use async_compression::tokio::write::{BrotliDecoder, BrotliEncoder};
94-
use bytes::BytesMut;
9594
use flate2::read::GzDecoder;
9695
use http::header::{ACCEPT_ENCODING, CONTENT_ENCODING, CONTENT_TYPE};
97-
use hyper::{Error, Request, Response};
96+
use http::{HeaderMap, HeaderName, Request, Response};
97+
use http_body_util::BodyExt;
98+
use std::convert::Infallible;
9899
use std::io::Read;
99100
use std::sync::{Arc, RwLock};
100101
use tokio::io::{AsyncReadExt, AsyncWriteExt};
@@ -127,13 +128,9 @@ mod tests {
127128
let res = svc.call(req).await.unwrap();
128129

129130
// read the compressed body
130-
let mut body = res.into_body();
131-
let mut data = BytesMut::new();
132-
while let Some(chunk) = body.data().await {
133-
let chunk = chunk.unwrap();
134-
data.extend_from_slice(&chunk[..]);
135-
}
136-
let compressed_data = data.freeze().to_vec();
131+
let collected = res.into_body().collect().await.unwrap();
132+
let trailers = collected.trailers().cloned().unwrap();
133+
let compressed_data = collected.to_bytes();
137134

138135
// decompress the body
139136
// doing this with flate2 as that is much easier than async-compression and blocking during
@@ -143,6 +140,9 @@ mod tests {
143140
decoder.read_to_string(&mut decompressed).unwrap();
144141

145142
assert_eq!(decompressed, "Hello, World!");
143+
144+
// trailers are maintained
145+
assert_eq!(trailers["foo"], "bar");
146146
}
147147

148148
#[tokio::test]
@@ -158,13 +158,8 @@ mod tests {
158158
let res = svc.call(req).await.unwrap();
159159

160160
// read the compressed body
161-
let mut body = res.into_body();
162-
let mut data = BytesMut::new();
163-
while let Some(chunk) = body.data().await {
164-
let chunk = chunk.unwrap();
165-
data.extend_from_slice(&chunk[..]);
166-
}
167-
let compressed_data = data.freeze().to_vec();
161+
let body = res.into_body();
162+
let compressed_data = body.collect().await.unwrap().to_bytes();
168163

169164
// decompress the body
170165
let decompressed = zstd::stream::decode_all(std::io::Cursor::new(compressed_data)).unwrap();
@@ -215,12 +210,8 @@ mod tests {
215210
);
216211

217212
// read the compressed body
218-
let mut body = res.into_body();
219-
let mut data = BytesMut::new();
220-
while let Some(chunk) = body.data().await {
221-
let chunk = chunk.unwrap();
222-
data.extend_from_slice(&chunk[..]);
223-
}
213+
let body = res.into_body();
214+
let data = body.collect().await.unwrap().to_bytes();
224215

225216
// decompress the body
226217
let data = {
@@ -237,8 +228,11 @@ mod tests {
237228
assert_eq!(data, DATA.as_bytes());
238229
}
239230

240-
async fn handle(_req: Request<Body>) -> Result<Response<Body>, Error> {
241-
Ok(Response::new(Body::from("Hello, World!")))
231+
async fn handle(_req: Request<Body>) -> Result<Response<WithTrailers<Body>>, Infallible> {
232+
let mut trailers = HeaderMap::new();
233+
trailers.insert(HeaderName::from_static("foo"), "bar".parse().unwrap());
234+
let body = Body::from("Hello, World!").with_trailers(trailers);
235+
Ok(Response::builder().body(body).unwrap())
242236
}
243237

244238
#[tokio::test]
@@ -259,6 +253,7 @@ mod tests {
259253
#[derive(Default, Clone)]
260254
struct EveryOtherResponse(Arc<RwLock<u64>>);
261255

256+
#[allow(clippy::dbg_macro)]
262257
impl Predicate for EveryOtherResponse {
263258
fn should_compress<B>(&self, _: &http::Response<B>) -> bool
264259
where
@@ -279,12 +274,8 @@ mod tests {
279274
let res = svc.call(req).await.unwrap();
280275

281276
// read the uncompressed body
282-
let mut body = res.into_body();
283-
let mut data = BytesMut::new();
284-
while let Some(chunk) = body.data().await {
285-
let chunk = chunk.unwrap();
286-
data.extend_from_slice(&chunk[..]);
287-
}
277+
let body = res.into_body();
278+
let data = body.collect().await.unwrap().to_bytes();
288279
let still_uncompressed = String::from_utf8(data.to_vec()).unwrap();
289280
assert_eq!(DATA, &still_uncompressed);
290281

@@ -296,18 +287,14 @@ mod tests {
296287
let res = svc.call(req).await.unwrap();
297288

298289
// read the compressed body
299-
let mut body = res.into_body();
300-
let mut data = BytesMut::new();
301-
while let Some(chunk) = body.data().await {
302-
let chunk = chunk.unwrap();
303-
data.extend_from_slice(&chunk[..]);
304-
}
290+
let body = res.into_body();
291+
let data = body.collect().await.unwrap().to_bytes();
305292
assert!(String::from_utf8(data.to_vec()).is_err());
306293
}
307294

308295
#[tokio::test]
309296
async fn doesnt_compress_images() {
310-
async fn handle(_req: Request<Body>) -> Result<Response<Body>, Error> {
297+
async fn handle(_req: Request<Body>) -> Result<Response<Body>, Infallible> {
311298
let mut res = Response::new(Body::from(
312299
"a".repeat((SizeAbove::DEFAULT_MIN_SIZE * 2) as usize),
313300
));
@@ -332,7 +319,7 @@ mod tests {
332319

333320
#[tokio::test]
334321
async fn does_compress_svg() {
335-
async fn handle(_req: Request<Body>) -> Result<Response<Body>, Error> {
322+
async fn handle(_req: Request<Body>) -> Result<Response<Body>, Infallible> {
336323
let mut res = Response::new(Body::from(
337324
"a".repeat((SizeAbove::DEFAULT_MIN_SIZE * 2) as usize),
338325
));
@@ -377,13 +364,8 @@ mod tests {
377364
let res = svc.call(req).await.unwrap();
378365

379366
// read the compressed body
380-
let mut body = res.into_body();
381-
let mut data = BytesMut::new();
382-
while let Some(chunk) = body.data().await {
383-
let chunk = chunk.unwrap();
384-
data.extend_from_slice(&chunk[..]);
385-
}
386-
let compressed_data = data.freeze().to_vec();
367+
let body = res.into_body();
368+
let compressed_data = body.collect().await.unwrap().to_bytes();
387369

388370
// build the compressed body with the same quality level
389371
let compressed_with_level = {
@@ -401,7 +383,7 @@ mod tests {
401383
};
402384

403385
assert_eq!(
404-
compressed_data.as_slice(),
386+
compressed_data,
405387
compressed_with_level.as_slice(),
406388
"Compression level is not respected"
407389
);

0 commit comments

Comments
 (0)