Skip to content

Commit

Permalink
Changes for new iron version with hyper 0.12 and removed limit_reader
Browse files Browse the repository at this point in the history
  • Loading branch information
zgtm committed Feb 11, 2019
1 parent 49ae53c commit 6402a3c
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 60 deletions.
6 changes: 3 additions & 3 deletions examples/bodyparser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ extern crate serde;
extern crate serde_derive;

use persistent::Read;
use iron::status;
use iron::StatusCode;
use iron::prelude::*;

#[derive(Deserialize, Debug, Clone)]
Expand Down Expand Up @@ -37,7 +37,7 @@ fn log_body(req: &mut Request) -> IronResult<Response> {
Err(err) => println!("Error: {:?}", err)
}

Ok(Response::with(status::Ok))
Ok(Response::with(StatusCode::OK))
}

const MAX_BODY_LENGTH: usize = 1024 * 1024 * 10;
Expand All @@ -51,5 +51,5 @@ const MAX_BODY_LENGTH: usize = 1024 * 1024 * 10;
fn main() {
let mut chain = Chain::new(log_body);
chain.link_before(Read::<bodyparser::MaxBodyLength>::one(MAX_BODY_LENGTH));
Iron::new(chain).http("localhost:3000").unwrap();
Iron::new(chain).http("localhost:3000");
}
3 changes: 1 addition & 2 deletions src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
use std::error::Error as StdError;
use std::fmt;
use std::io;
use std::str;

use serde_json;

#[derive(Debug)]
pub enum BodyErrorCause {
Utf8Error(str::Utf8Error),
IoError(io::Error),
IoError(iron::error::HttpError),
JsonError(serde_json::Error),
}

Expand Down
28 changes: 12 additions & 16 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,19 @@ use iron::mime;
use iron::prelude::*;
use iron::headers;
use iron::typemap::{Key};
use std::io::Read;
use std::any::Any;
use std::marker;

pub use self::errors::{BodyError, BodyErrorCause};
pub use self::limit_reader::{LimitReader};

mod errors;
mod limit_reader;

fn read_body_as_utf8(req: &mut Request, limit: usize) -> Result<String, errors::BodyError> {
let mut bytes = Vec::new();
match LimitReader::new(req.body.by_ref(), limit).read_to_end(&mut bytes) {
Ok(_) => {
match String::from_utf8(bytes) {
/// This implementation currently ignores the limit parameter, since irons
/// request::get_body_contents() reads the data unlimited.
fn read_body_as_utf8(req: &mut Request, _limit: usize) -> Result<String, errors::BodyError> {
match req.get_body_contents() {
Ok(bytes) => {
match String::from_utf8(bytes.to_vec()) {
Ok(e) => Ok(e),
Err(err) => Err(errors::BodyError {
detail: "Invalid UTF-8 sequence".to_string(),
Expand Down Expand Up @@ -62,15 +60,13 @@ impl Key for Raw {

const DEFAULT_BODY_LIMIT: usize = 1024 * 1024 * 100;

impl<'a, 'b> plugin::Plugin<Request<'a, 'b>> for Raw {
impl plugin::Plugin<Request> for Raw {
type Error = BodyError;

fn eval(req: &mut Request) -> Result<Option<String>, BodyError> {
let need_read = req.headers.get::<headers::ContentType>().map(|header| {
match **header {
mime::Mime(mime::TopLevel::Multipart, mime::SubLevel::FormData, _) => false,
_ => true
}
let need_read = req.headers.get(headers::CONTENT_TYPE).map(|header| {
header.to_str().unwrap().parse::<mime::Mime>().unwrap()
!= "multipart/form-data".parse::<mime::Mime>().unwrap()
}).unwrap_or(false);

if need_read {
Expand All @@ -96,7 +92,7 @@ impl Key for Json {
type Value = Option<serde_json::Value>;
}

impl<'a, 'b> plugin::Plugin<Request<'a, 'b>> for Json {
impl plugin::Plugin<Request> for Json {
type Error = BodyError;

fn eval(req: &mut Request) -> Result<Option<serde_json::Value>, BodyError> {
Expand All @@ -122,7 +118,7 @@ impl<T> Key for Struct<T> where T: for<'a> Deserialize<'a> + Any {
type Value = Option<T>;
}

impl<'a, 'b, T> plugin::Plugin<Request<'a, 'b>> for Struct<T>
impl<T> plugin::Plugin<Request> for Struct<T>
where T: for<'c> Deserialize<'c> + Any {
type Error = BodyError;

Expand Down
39 changes: 0 additions & 39 deletions src/limit_reader.rs

This file was deleted.

0 comments on commit 6402a3c

Please sign in to comment.