Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Address clippy lints #39

Open
wants to merge 15 commits into
base: master
Choose a base branch
from
77 changes: 34 additions & 43 deletions speedy-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,14 @@ fn possibly_uses_generic_ty( generic_types: &[&syn::Ident], ty: &syn::Type ) ->
}
})
},
syn::Type::Slice( syn::TypeSlice { elem, .. } ) => possibly_uses_generic_ty( generic_types, &elem ),
syn::Type::Slice( syn::TypeSlice { elem, .. } ) => possibly_uses_generic_ty( generic_types, elem ),
syn::Type::Tuple( syn::TypeTuple { elems, .. } ) => elems.iter().any( |elem| possibly_uses_generic_ty( generic_types, elem ) ),
syn::Type::Reference( syn::TypeReference { elem, .. } ) => possibly_uses_generic_ty( generic_types, &elem ),
syn::Type::Paren( syn::TypeParen { elem, .. } ) => possibly_uses_generic_ty( generic_types, &elem ),
syn::Type::Ptr( syn::TypePtr { elem, .. } ) => possibly_uses_generic_ty( generic_types, &elem ),
syn::Type::Group( syn::TypeGroup { elem, .. } ) => possibly_uses_generic_ty( generic_types, &elem ),
syn::Type::Reference( syn::TypeReference { elem, .. } ) => possibly_uses_generic_ty( generic_types, elem ),
syn::Type::Paren( syn::TypeParen { elem, .. } ) => possibly_uses_generic_ty( generic_types, elem ),
syn::Type::Ptr( syn::TypePtr { elem, .. } ) => possibly_uses_generic_ty( generic_types, elem ),
syn::Type::Group( syn::TypeGroup { elem, .. } ) => possibly_uses_generic_ty( generic_types, elem ),
syn::Type::Array( syn::TypeArray { elem, len, .. } ) => {
if possibly_uses_generic_ty( generic_types, &elem ) {
if possibly_uses_generic_ty( generic_types, elem ) {
return true;
}

Expand Down Expand Up @@ -185,14 +185,14 @@ fn is_guaranteed_non_recursive( ty: &syn::Type ) -> bool {
_ => false
}
},
syn::Type::Slice( syn::TypeSlice { elem, .. } ) => is_guaranteed_non_recursive( &elem ),
syn::Type::Tuple( syn::TypeTuple { elems, .. } ) => elems.iter().all( |elem| is_guaranteed_non_recursive( elem ) ),
syn::Type::Reference( syn::TypeReference { elem, .. } ) => is_guaranteed_non_recursive( &elem ),
syn::Type::Paren( syn::TypeParen { elem, .. } ) => is_guaranteed_non_recursive( &elem ),
syn::Type::Ptr( syn::TypePtr { elem, .. } ) => is_guaranteed_non_recursive( &elem ),
syn::Type::Group( syn::TypeGroup { elem, .. } ) => is_guaranteed_non_recursive( &elem ),
syn::Type::Slice( syn::TypeSlice { elem, .. } ) => is_guaranteed_non_recursive( elem ),
syn::Type::Tuple( syn::TypeTuple { elems, .. } ) => elems.iter().all(is_guaranteed_non_recursive),
syn::Type::Reference( syn::TypeReference { elem, .. } ) => is_guaranteed_non_recursive( elem ),
syn::Type::Paren( syn::TypeParen { elem, .. } ) => is_guaranteed_non_recursive( elem ),
syn::Type::Ptr( syn::TypePtr { elem, .. } ) => is_guaranteed_non_recursive( elem ),
syn::Type::Group( syn::TypeGroup { elem, .. } ) => is_guaranteed_non_recursive( elem ),
syn::Type::Array( syn::TypeArray { elem, len, .. } ) => {
if !is_guaranteed_non_recursive( &elem ) {
if !is_guaranteed_non_recursive( elem ) {
return false;
}

Expand Down Expand Up @@ -671,11 +671,11 @@ struct Field< 'a > {

impl< 'a > Field< 'a > {
fn can_be_primitive( &self ) -> bool {
self.default_on_eof == false &&
!self.default_on_eof &&
self.length.is_none() &&
self.length_type.is_none() &&
self.skip == false &&
self.varint == false &&
!self.skip &&
!self.varint &&
self.constant_prefix.is_none()
}

Expand Down Expand Up @@ -726,7 +726,7 @@ impl< 'a > Field< 'a > {
}

fn is_guaranteed_non_recursive( &self ) -> bool {
is_guaranteed_non_recursive( &self.raw_ty )
is_guaranteed_non_recursive( self.raw_ty )
}
}

Expand Down Expand Up @@ -831,18 +831,13 @@ impl syn::parse::Parse for FieldAttribute {
},
syn::Expr::Unary( syn::ExprUnary { op: syn::UnOp::Neg(_), expr, .. } ) => {
match *expr {
syn::Expr::Lit( syn::ExprLit { lit: literal, .. } ) => {
match literal {
syn::Lit::Int( literal ) => {
if literal.suffix() == "i8" {
vec![ (literal.base10_parse::< i8 >().unwrap() * -1) as u8 ]
} else if literal.suffix() == "u8" {
return generic_error()
} else {
return Err( syn::Error::new( value_span, "integers are not supported; if you want to use a single byte constant then append either 'u8' or 'i8' to it" ) );
}
},
_ => return generic_error()
syn::Expr::Lit( syn::ExprLit { lit: syn::Lit::Int( literal ), .. } ) => {
if literal.suffix() == "i8" {
vec![ -literal.base10_parse::< i8 >().unwrap() as u8 ]
} else if literal.suffix() == "u8" {
return generic_error()
} else {
return Err( syn::Error::new( value_span, "integers are not supported; if you want to use a single byte constant then append either 'u8' or 'i8' to it" ) );
}
},
_ => return generic_error()
Expand Down Expand Up @@ -1172,8 +1167,7 @@ fn get_fields< 'a, I: IntoIterator< Item = &'a syn::Field > + 'a >( fields: I )
}
}

if length_type.is_some() && length.is_some() {
let (key_span, _) = length_type.unwrap();
if let (Some((key_span, _)), Some(_)) = (length_type, &length) {
let message = "You cannot have both 'length_type' and 'length' on the same field";
return Err( syn::Error::new( key_span, message ) );
}
Expand Down Expand Up @@ -1595,7 +1589,7 @@ fn read_field_body( field: &Field ) -> TokenStream {
}
}

fn readable_body< 'a >( types: &mut Vec< syn::Type >, st: &Struct< 'a > ) -> (TokenStream, TokenStream, TokenStream) {
fn readable_body( types: &mut Vec< syn::Type >, st: &Struct ) -> (TokenStream, TokenStream, TokenStream) {
let mut field_names = Vec::new();
let mut field_readers = Vec::new();
let mut minimum_bytes_needed = Vec::new();
Expand All @@ -1607,7 +1601,7 @@ fn readable_body< 'a >( types: &mut Vec< syn::Type >, st: &Struct< 'a > ) -> (To
field_names.push( name );
types.extend( field.bound_types() );

if let Some( minimum_bytes ) = get_minimum_bytes( &field ) {
if let Some( minimum_bytes ) = get_minimum_bytes( field ) {
minimum_bytes_needed.push( minimum_bytes );
}
}
Expand Down Expand Up @@ -1732,15 +1726,15 @@ fn write_field_body( field: &Field ) -> TokenStream {
body
}

fn writable_body< 'a >( types: &mut Vec< syn::Type >, st: &Struct< 'a > ) -> (TokenStream, TokenStream) {
fn writable_body( types: &mut Vec< syn::Type >, st: &Struct ) -> (TokenStream, TokenStream) {
let mut field_names = Vec::new();
let mut field_writers = Vec::new();
for field in &st.fields {
if field.skip {
continue;
}

let write_value = write_field_body( &field );
let write_value = write_field_body( field );
types.extend( field.bound_types() );

field_names.push( field.var_name().clone() );
Expand Down Expand Up @@ -1780,7 +1774,7 @@ impl< 'a > Enum< 'a > {
let attrs = collect_enum_attributes( attrs )?;
let tag_type = attrs.tag_type.unwrap_or( DEFAULT_ENUM_TAG_TYPE );
let max = match tag_type {
BasicType::U7 => 0b01111111 as u64,
BasicType::U7 => 0b01111111_u64,
BasicType::U8 => std::u8::MAX as u64,
BasicType::U16 => std::u16::MAX as u64,
BasicType::U32 => std::u32::MAX as u64,
Expand Down Expand Up @@ -1865,7 +1859,6 @@ impl< 'a > Enum< 'a > {
quote! { #tag }
},
BasicType::U64 | BasicType::VarInt64 => {
let tag = tag as u64;
quote! { #tag }
}
};
Expand Down Expand Up @@ -2133,7 +2126,7 @@ fn impl_readable( input: syn::DeriveInput ) -> Result< TokenStream, syn::Error >
(reader_body, minimum_bytes, impl_primitive, impl_zerocopyable)
},
syn::Data::Enum( syn::DataEnum { variants, .. } ) => {
let enumeration = Enum::new( &name, &input.attrs, &variants )?;
let enumeration = Enum::new( name, &input.attrs, variants )?;
let mut variant_matches = Vec::with_capacity( variants.len() );
let mut variant_minimum_sizes = Vec::with_capacity( variants.len() );
for variant in enumeration.variants {
Expand All @@ -2148,10 +2141,8 @@ fn impl_readable( input: syn::DeriveInput ) -> Result< TokenStream, syn::Error >
}
});

if variant.structure.kind != StructKind::Unit {
if variant.structure.is_guaranteed_non_recursive() {
variant_minimum_sizes.push( minimum_bytes );
}
if variant.structure.kind != StructKind::Unit && variant.structure.is_guaranteed_non_recursive() {
variant_minimum_sizes.push( minimum_bytes );
}
}

Expand Down Expand Up @@ -2306,7 +2297,7 @@ fn impl_writable( input: syn::DeriveInput ) -> Result< TokenStream, syn::Error >
(impl_body, impl_primitive)
},
syn::Data::Enum( syn::DataEnum { ref variants, .. } ) => {
let enumeration = Enum::new( &name, &input.attrs, &variants )?;
let enumeration = Enum::new( name, &input.attrs, variants )?;
let tag_writer = match enumeration.tag_type {
BasicType::U64 => quote! { write_u64 },
BasicType::U32 => quote! { write_u32 },
Expand Down
4 changes: 2 additions & 2 deletions src/circular_buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ impl CircularBuffer {

self.buffer[ range_1.clone() ].copy_from_slice( &slice[ ..range_1.len() ] );
if let Some( range_2 ) = range_2 {
self.buffer[ range_2.clone() ].copy_from_slice( &slice[ range_1.len().. ] );
self.buffer[ range_2 ].copy_from_slice( &slice[ range_1.len().. ] );
}

self.length += slice.len();
Expand Down Expand Up @@ -506,7 +506,7 @@ quickcheck::quickcheck! {
assert_eq!( buffer.len(), control_buffer.len() );
assert_eq!( buffer.to_vec(), control_buffer );

if buffer.len() > 0 {
if !buffer.is_empty() {
let expected = control_buffer.remove( 0 );
let mut actual = [!expected];
buffer.consume_into( &mut actual );
Expand Down
2 changes: 1 addition & 1 deletion src/ext_chrono.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ impl< 'a, C > Readable< 'a, C > for DateTime< Utc >
fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > {
let seconds = reader.read_i64()?;
let subsec_nanos = reader.read_u32()?;
Ok( Utc.timestamp( seconds, subsec_nanos ) )
Ok( Utc.timestamp_opt( seconds, subsec_nanos ).unwrap() )
}

#[inline]
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ mod tests {

impl io::Read for TestStream {
fn read( &mut self, output: &mut [u8] ) -> Result< usize, io::Error > {
if self.position >= self.buffer.len() || output.len() == 0 {
if self.position >= self.buffer.len() || output.is_empty() {
return Ok(0);
}

Expand Down
12 changes: 6 additions & 6 deletions src/readable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ impl< 'a, C: Context > Reader< 'a, C > for BufferReader< 'a, C > {
}

#[inline(always)]
fn read_bytes_borrowed_from_reader< 'r >( &'r mut self, length: usize ) -> Option< Result< &'r [u8], C::Error > > {
fn read_bytes_borrowed_from_reader( &mut self, length: usize ) -> Option< Result< &[u8], C::Error > > {
if self.can_read_at_least( length ) == Some( false ) {
return Some( Err( error_end_of_input() ) );
}
Expand Down Expand Up @@ -250,7 +250,7 @@ impl< 'ctx, 'r, 'a, C: Context > Reader< 'r, C > for CopyingBufferReader< 'ctx,
}

#[inline(always)]
fn read_bytes_borrowed_from_reader< 'reader >( &'reader mut self, length: usize ) -> Option< Result< &'reader [u8], C::Error > > {
fn read_bytes_borrowed_from_reader( &mut self, length: usize ) -> Option< Result< &[u8], C::Error > > {
if self.can_read_at_least( length ) == Some( false ) {
return Some( Err( error_end_of_input() ) );
}
Expand All @@ -271,12 +271,12 @@ impl< 'ctx, 'r, 'a, C: Context > Reader< 'r, C > for CopyingBufferReader< 'ctx,

#[inline(always)]
fn context( &self ) -> &C {
&self.context
self.context
}

#[inline(always)]
fn context_mut( &mut self ) -> &mut C {
&mut self.context
self.context
}
}

Expand All @@ -287,7 +287,7 @@ struct StreamReader< C: Context, S: Read > {
is_buffering: bool
}

impl< 'a, C, S > StreamReader< C, S > where C: Context, S: Read {
impl<C, S > StreamReader< C, S > where C: Context, S: Read {
#[inline(never)]
fn read_bytes_slow( &mut self, mut output: &mut [u8] ) -> Result< (), C::Error > {
if self.is_buffering && output.len() < self.buffer.capacity() {
Expand All @@ -314,7 +314,7 @@ impl< 'a, C, S > StreamReader< C, S > where C: Context, S: Read {
}
}

if self.buffer.len() > 0 {
if !self.buffer.len() > 0 {
let length = std::cmp::min( self.buffer.len(), output.len() );
self.buffer.consume_into( &mut output[ ..length ] );
output = &mut output[ length.. ];
Expand Down
4 changes: 2 additions & 2 deletions src/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ impl< T > Iterator for RawCopyIter< T > {
#[inline(always)]
fn next( &mut self ) -> Option< Self::Item > {
if self.pointer.as_ptr() as *const T == self.end {
return None;
None
} else {
unsafe {
let old = self.pointer.as_ptr();
Expand Down Expand Up @@ -95,7 +95,7 @@ pub trait Reader< 'a, C: Context >: Sized {
}

#[inline(always)]
fn read_bytes_borrowed_from_reader< 'r >( &'r mut self, _length: usize ) -> Option< Result< &'r [u8], C::Error > > {
fn read_bytes_borrowed_from_reader( &mut self, _length: usize ) -> Option< Result< &[u8], C::Error > > {
None
}

Expand Down
4 changes: 2 additions & 2 deletions src/writable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,12 @@ impl< 'a, C: Context > Writer< C > for BufferCollector< 'a, C > {

#[inline]
fn context( &self ) -> &C {
&self.context
self.context
}

#[inline]
fn context_mut( &mut self ) -> &mut C {
&mut self.context
self.context
}

#[inline(always)]
Expand Down
12 changes: 6 additions & 6 deletions src/writable_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ impl< 'a, C: Context, T: Writable< C > > Writable< C > for &'a [T] where [T]: To
impl< 'r, C, T > Writable< C > for Cow< 'r, HashSet< T > > where C: Context, T: Writable< C > + Clone + Hash + Eq {
#[inline]
fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > {
(&**self).write_to( writer )
(**self).write_to( writer )
}

#[inline]
Expand All @@ -296,7 +296,7 @@ impl< 'r, C, T > Writable< C > for Cow< 'r, HashSet< T > > where C: Context, T:
impl< 'r, C, T > Writable< C > for Cow< 'r, BTreeSet< T > > where C: Context, T: Writable< C > + Clone + Ord {
#[inline]
fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > {
(&**self).write_to( writer )
(**self).write_to( writer )
}

#[inline]
Expand All @@ -308,7 +308,7 @@ impl< 'r, C, T > Writable< C > for Cow< 'r, BTreeSet< T > > where C: Context, T:
impl< 'r, C, K, V > Writable< C > for Cow< 'r, HashMap< K, V > > where C: Context, K: Writable< C > + Clone + Hash + Eq, V: Writable< C > + Clone {
#[inline]
fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > {
(&**self).write_to( writer )
(**self).write_to( writer )
}

#[inline]
Expand All @@ -320,7 +320,7 @@ impl< 'r, C, K, V > Writable< C > for Cow< 'r, HashMap< K, V > > where C: Contex
impl< 'r, C, K, V > Writable< C > for Cow< 'r, BTreeMap< K, V > > where C: Context, K: Writable< C > + Clone + Ord, V: Writable< C > + Clone {
#[inline]
fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > {
(&**self).write_to( writer )
(**self).write_to( writer )
}

#[inline]
Expand Down Expand Up @@ -673,13 +673,13 @@ impl< C > Writable< C > for Box< str >
{
#[inline]
fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > {
let value: &str = &**self;
let value: &str = self;
value.write_to( writer )
}

#[inline]
fn bytes_needed( &self ) -> Result< usize, C::Error > {
let value: &str = &**self;
let value: &str = self;
Writable::< C >::bytes_needed( value )
}
}
2 changes: 1 addition & 1 deletion tests/serialization_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2223,7 +2223,7 @@ symmetric_tests_unsized_native_endian! {
#[cfg(feature = "chrono")]
symmetric_tests! {
chrono_datetime_utc for chrono::DateTime< chrono::Utc > {
in = chrono::offset::TimeZone::timestamp( &chrono::Utc, 123, 222 ),
in = chrono::offset::TimeZone::timestamp_opt( &chrono::Utc, 123, 222 ).unwrap(),
le = [123, 0, 0, 0, 0, 0, 0, 0, 222, 0, 0, 0],
be = [0, 0, 0, 0, 0, 0, 0, 123, 0, 0, 0, 222],
minimum_bytes = 12
Expand Down