diff --git a/tree-buf-macros/src/decode.rs b/tree-buf-macros/src/decode.rs index 9ba0909..3273dcb 100644 --- a/tree-buf-macros/src/decode.rs +++ b/tree-buf-macros/src/decode.rs @@ -39,7 +39,7 @@ fn impl_struct_decode(ast: &DeriveInput, data_struct: &DataStruct) -> TokenStrea let mut news_parallel_rhs = quote! {}; let mut is_first = true; - for NamedField { ident, ty, .. } in fields.iter() { + for NamedField { ident, ty, .. } in &fields { if is_first { is_first = false; parallel_lhs = quote! { #ident }; diff --git a/tree-buf-macros/src/encode.rs b/tree-buf-macros/src/encode.rs index 4f040b5..0a18d3c 100644 --- a/tree-buf-macros/src/encode.rs +++ b/tree-buf-macros/src/encode.rs @@ -47,7 +47,7 @@ fn impl_struct_encode(ast: &DeriveInput, data_struct: &DataStruct) -> TokenStrea // See also: fadaec14-35ad-4dc1-b6dc-6106ab811669 let (prefix, suffix) = match num_fields { - 0..=8 => (quote! {}, Ident::new(format!("Obj{}", num_fields).as_str(), Span::call_site())), + 0..=8 => (quote! {}, Ident::new(format!("Obj{num_fields}").as_str(), Span::call_site())), _ => ( quote! { ::tree_buf::internal::encodings::varint::encode_prefix_varint(#num_fields as u64 - 9, stream.bytes); diff --git a/tree-buf-macros/src/utils.rs b/tree-buf-macros/src/utils.rs index ca145e7..e54c799 100644 --- a/tree-buf-macros/src/utils.rs +++ b/tree-buf-macros/src/utils.rs @@ -13,7 +13,7 @@ use { // TODO: Unfortunately, the current method is quite inadequate. Consider a language with no case. Consider a letter 'q' having // neither uppercase nor lowercase. qq vs q_q is different. But, in this encoding they are the same. pub fn canonical_ident(ident: &Ident) -> String { - let ident_str = format!("{}", ident); + let ident_str = format!("{ident}"); to_camel_case(&ident_str) } @@ -26,9 +26,8 @@ pub type NamedFields<'a> = Vec>; pub fn get_named_fields(data_struct: &DataStruct) -> NamedFields { // TODO: Lift restriction - let fields_named = match &data_struct.fields { - Fields::Named(fields_named) => fields_named, - _ => panic!("The struct must have named fields"), + let Fields::Named(fields_named) = &data_struct.fields else { + panic!("The struct must have named fields") }; fields_named @@ -39,7 +38,7 @@ pub fn get_named_fields(data_struct: &DataStruct) -> NamedFields { NamedField { ident: field.ident.as_ref().unwrap(), ty: &field.ty, - canon_str: canonical_ident(&ident), + canon_str: canonical_ident(ident), } }) .collect() diff --git a/tree-buf/src/experimental/scratch.rs b/tree-buf/src/experimental/scratch.rs index 1159094..089c63d 100644 --- a/tree-buf/src/experimental/scratch.rs +++ b/tree-buf/src/experimental/scratch.rs @@ -24,6 +24,6 @@ pub fn scratch() -> Scratch { Scratch { buffers: Default::default() } } -pub fn encode_into_with_scratch(_value: &T, _scratch: &mut Scratch, _into: &mut Vec) { +pub fn encode_into_with_scratch(_value: &T, _scratch: &mut Scratch, _into: &mut [u8]) { todo!() } diff --git a/tree-buf/src/experimental/stats.rs b/tree-buf/src/experimental/stats.rs index d54d42c..f32a788 100644 --- a/tree-buf/src/experimental/stats.rs +++ b/tree-buf/src/experimental/stats.rs @@ -13,15 +13,13 @@ struct Path { impl Path { fn c(s: &String, x: &impl fmt::Display) -> String { - let x = format!("{}", x); + let x = format!("{x}"); if s.is_empty() { x + } else if x.is_empty() { + s.clone() } else { - if x.is_empty() { - s.clone() - } else { - format!("{}.{}", s, x) - } + format!("{s}.{x}") } } @@ -59,13 +57,13 @@ impl fmt::Display for SizeBreakdown { by_type.sort_by_key(|i| usize::MAX - i.1.size); writeln!(f, "Largest by path:")?; - for (path, agg) in by_path.iter() { + for (path, agg) in &by_path { writeln!(f, "\t{}\n\t {}\n\t {}", agg.size, path, agg.types)?; } writeln!(f)?; writeln!(f, "Largest by type:")?; - for (t, agg) in by_type.iter() { + for (t, agg) in &by_type { writeln!(f, "\t {}x {} @ {}", agg.count, agg.size, t)?; } @@ -99,95 +97,95 @@ impl SizeBreakdown { } // TODO: (Security) Re-write without recursion -fn visit_array(path: Path, branch: &DynArrayBranch, breakdown: &mut SizeBreakdown) { +fn visit_array(path: &Path, branch: &DynArrayBranch, breakdown: &mut SizeBreakdown) { match branch { - DynArrayBranch::ArrayFixed { values, len } => visit_array(path.a(&format!("[{}]", len), &"Array Fixed"), values, breakdown), + DynArrayBranch::ArrayFixed { values, len } => visit_array(&path.a(&format!("[{len}]"), &"Array Fixed"), values, breakdown), DynArrayBranch::Array { len, values } => { - visit_array(path.a(&"len", &"Array"), len, breakdown); - visit_array(path.a(&"values", &"Array"), values, breakdown); + visit_array(&path.a(&"len", &"Array"), len, breakdown); + visit_array(&path.a(&"values", &"Array"), values, breakdown); } DynArrayBranch::Enum { discriminants, variants } => { - visit_array(path.a(&"discriminants", &"Enum"), discriminants, breakdown); + visit_array(&path.a(&"discriminants", &"Enum"), discriminants, breakdown); for variant in variants.iter() { - visit_array(path.a(&variant.ident, &"Enum"), &variant.data, breakdown); + visit_array(&path.a(&variant.ident, &"Enum"), &variant.data, breakdown); } } DynArrayBranch::Boolean(enc) => match enc { - ArrayBool::Packed(b) => breakdown.add(&path, "Packed Boolean", b), - ArrayBool::RLE(_first, runs) => visit_array(path.a(&"runs", &"Bool RLE"), runs, breakdown), + ArrayBool::Packed(b) => breakdown.add(path, "Packed Boolean", b), + ArrayBool::RLE(_first, runs) => visit_array(&path.a(&"runs", &"Bool RLE"), runs, breakdown), }, DynArrayBranch::Float(f) => match f { - ArrayFloat::DoubleGorilla(b) => breakdown.add(&path, "Gorilla", b), - ArrayFloat::F32(b) => breakdown.add(&path, "Fixed F32", b), - ArrayFloat::F64(b) => breakdown.add(&path, "Fixed F64", b), - ArrayFloat::Zfp32(b) => breakdown.add(&path, "Zfp 64", b), - ArrayFloat::Zfp64(b) => breakdown.add(&path, "Zfp 32", b), + ArrayFloat::DoubleGorilla(b) => breakdown.add(path, "Gorilla", b), + ArrayFloat::F32(b) => breakdown.add(path, "Fixed F32", b), + ArrayFloat::F64(b) => breakdown.add(path, "Fixed F64", b), + ArrayFloat::Zfp32(b) => breakdown.add(path, "Zfp 64", b), + ArrayFloat::Zfp64(b) => breakdown.add(path, "Zfp 32", b), }, DynArrayBranch::Integer(ArrayInteger { bytes, encoding }) => match encoding { - ArrayIntegerEncoding::PrefixVarInt => breakdown.add(&path, "Prefix Varint", bytes), - ArrayIntegerEncoding::Simple16 => breakdown.add(&path, "Simple16", bytes), - ArrayIntegerEncoding::U8 => breakdown.add(&path, "U8 Fixed", bytes), - ArrayIntegerEncoding::DeltaZig => breakdown.add(&path, "DeltaZig", bytes), + ArrayIntegerEncoding::PrefixVarInt => breakdown.add(path, "Prefix Varint", bytes), + ArrayIntegerEncoding::Simple16 => breakdown.add(path, "Simple16", bytes), + ArrayIntegerEncoding::U8 => breakdown.add(path, "U8 Fixed", bytes), + ArrayIntegerEncoding::DeltaZig => breakdown.add(path, "DeltaZig", bytes), }, DynArrayBranch::Map { len, keys, values } => { - visit_array(path.a(&"len", &"Map"), len, breakdown); - visit_array(path.a(&"keys", &"Map"), keys, breakdown); - visit_array(path.a(&"values", &"Map"), values, breakdown); + visit_array(&path.a(&"len", &"Map"), len, breakdown); + visit_array(&path.a(&"keys", &"Map"), keys, breakdown); + visit_array(&path.a(&"values", &"Map"), values, breakdown); } DynArrayBranch::Object { fields } => { for (name, field) in fields { - visit_array(path.a(name, &"Object"), field, breakdown); + visit_array(&path.a(name, &"Object"), field, breakdown); } } DynArrayBranch::RLE { runs, values } => { - visit_array(path.a(&"runs", &"RLE"), runs, breakdown); - visit_array(path.a(&"values", &"RLE"), values, breakdown); + visit_array(&path.a(&"runs", &"RLE"), runs, breakdown); + visit_array(&path.a(&"values", &"RLE"), values, breakdown); } DynArrayBranch::Dictionary { indices, values } => { - visit_array(path.a(&"indices", &"Dictionary"), indices, breakdown); - visit_array(path.a(&"values", &"Dictionary"), values, breakdown); + visit_array(&path.a(&"indices", &"Dictionary"), indices, breakdown); + visit_array(&path.a(&"values", &"Dictionary"), values, breakdown); } - DynArrayBranch::String(b) => breakdown.add(&path, "UTF-8", b), + DynArrayBranch::String(b) => breakdown.add(path, "UTF-8", b), DynArrayBranch::BrotliUtf8 { utf8, lens } => { - breakdown.add(&path, "BrotliUtf8", utf8); - visit_array(path.a(&"lens", &"Dictionary"), lens, breakdown); + breakdown.add(path, "BrotliUtf8", utf8); + visit_array(&path.a(&"lens", &"Dictionary"), lens, breakdown); } DynArrayBranch::Tuple { fields } => { for (i, field) in fields.iter().enumerate() { - visit_array(path.a(&i, &"Tuple"), field, breakdown); + visit_array(&path.a(&i, &"Tuple"), field, breakdown); } } DynArrayBranch::Nullable { opt, values } => { - visit_array(path.a(&"opt", &"Nullable"), opt, breakdown); - visit_array(path.a(&"values", &"Nullable"), values, breakdown); + visit_array(&path.a(&"opt", &"Nullable"), opt, breakdown); + visit_array(&path.a(&"values", &"Nullable"), values, breakdown); } DynArrayBranch::Void | DynArrayBranch::Map0 | DynArrayBranch::Array0 => {} } } -fn visit(path: Path, branch: &DynRootBranch<'_>, breakdown: &mut SizeBreakdown) { +fn visit(path: &Path, branch: &DynRootBranch<'_>, breakdown: &mut SizeBreakdown) { match branch { DynRootBranch::Object { fields } => { for (name, value) in fields.iter() { - visit(path.a(name, &"Object"), value, breakdown); + visit(&path.a(name, &"Object"), value, breakdown); } } - DynRootBranch::Enum { discriminant, value } => visit(path.a(discriminant, &"Enum"), value, breakdown), + DynRootBranch::Enum { discriminant, value } => visit(&path.a(discriminant, &"Enum"), value, breakdown), DynRootBranch::Map { len: _, keys, values } => { - visit_array(path.a(&"keys", &"Map"), keys, breakdown); - visit_array(path.a(&"values", &"Values"), values, breakdown); + visit_array(&path.a(&"keys", &"Map"), keys, breakdown); + visit_array(&path.a(&"values", &"Values"), values, breakdown); } DynRootBranch::Tuple { fields } => { for (i, field) in fields.iter().enumerate() { - visit(path.a(&i, &"Tuple"), field, breakdown); + visit(&path.a(&i, &"Tuple"), field, breakdown); } } DynRootBranch::Map1 { key, value } => { - visit(path.a(&"key", &"Map1"), key, breakdown); - visit(path.a(&"value", &"Map1"), value, breakdown); + visit(&path.a(&"key", &"Map1"), key, breakdown); + visit(&path.a(&"value", &"Map1"), value, breakdown); } - DynRootBranch::Array { len, values } => visit_array(path.a(&format!("[{}]", len), &"Array"), values, breakdown), - DynRootBranch::Array1(item) => visit(path.a(&"1", &"Array1"), item, breakdown), + DynRootBranch::Array { len, values } => visit_array(&path.a(&format!("[{len}]"), &"Array"), values, breakdown), + DynRootBranch::Array1(item) => visit(&path.a(&"1", &"Array1"), item, breakdown), DynRootBranch::Boolean(_) | DynRootBranch::Array0 | DynRootBranch::Map0 @@ -274,7 +272,7 @@ pub fn size_breakdown(data: &[u8]) -> DecodeResult { by_type: HashMap::new(), total: data.len(), }; - visit(Path::default(), &root, &mut breakdown); + visit(&Path::default(), &root, &mut breakdown); - Ok(format!("{}", breakdown)) + Ok(format!("{breakdown}")) } diff --git a/tree-buf/src/internal/branch/array_branch.rs b/tree-buf/src/internal/branch/array_branch.rs index d1efba1..6cbe419 100644 --- a/tree-buf/src/internal/branch/array_branch.rs +++ b/tree-buf/src/internal/branch/array_branch.rs @@ -25,7 +25,7 @@ impl Deref for Bytes<'_> { type Target = [u8]; #[inline] fn deref(&self) -> &Self::Target { - &self.0 + self.0 } } @@ -50,7 +50,7 @@ pub struct ArrayEnumVariant<'a> { pub data: DynArrayBranch<'a>, } -#[derive(Debug)] +#[derive(Debug, Default)] pub enum DynArrayBranch<'a> { Object { fields: HashMap, DynArrayBranch<'a>>, @@ -80,6 +80,7 @@ pub enum DynArrayBranch<'a> { }, Boolean(ArrayBool<'a>), Float(ArrayFloat<'a>), + #[default] Void, String(Bytes<'a>), BrotliUtf8 { @@ -112,7 +113,7 @@ pub fn decode_next_array<'a>(bytes: &'a [u8], offset: &'_ mut usize, lens: &'_ m use ArrayTypeId::*; fn decode_ints<'a>(bytes: &'a [u8], offset: &'_ mut usize, lens: &'_ mut usize, encoding: ArrayIntegerEncoding) -> DecodeResult> { - let bytes = decode_bytes_from_len(bytes, offset, lens)?.into(); + let bytes = decode_bytes_from_len(bytes, offset, lens)?; Ok(DynArrayBranch::Integer(ArrayInteger { bytes, encoding })) } @@ -276,12 +277,6 @@ pub fn decode_next_array<'a>(bytes: &'a [u8], offset: &'_ mut usize, lens: &'_ m Ok(branch) } -impl<'a> Default for DynArrayBranch<'a> { - fn default() -> Self { - DynArrayBranch::Void - } -} - impl_type_id!(ArrayTypeId, [ Nullable: 1, ArrayVar: 2, diff --git a/tree-buf/src/internal/branch/mod.rs b/tree-buf/src/internal/branch/mod.rs index feca9da..06344f5 100644 --- a/tree-buf/src/internal/branch/mod.rs +++ b/tree-buf/src/internal/branch/mod.rs @@ -117,7 +117,7 @@ pub trait TypeId: Copy + Into + PartialEq + std::fmt::Debug { #[cfg(feature = "decode")] pub fn decode_root(bytes: &[u8]) -> DecodeResult> { profile_fn!(decode_root); - if bytes.len() == 0 { + if bytes.is_empty() { return Ok(DynRootBranch::Void); } let mut lens = bytes.len() - 1; diff --git a/tree-buf/src/internal/branch/root_branch.rs b/tree-buf/src/internal/branch/root_branch.rs index 8282b5c..0cfae4d 100644 --- a/tree-buf/src/internal/branch/root_branch.rs +++ b/tree-buf/src/internal/branch/root_branch.rs @@ -16,7 +16,7 @@ use std::convert::{TryFrom, TryInto}; // TODO: Other kinds of self-description may also be interesting, since this is for data self-description is higher value // TODO: Bytes/Blog = [u8] compressed (eg: gzip), uncompressed -#[derive(Debug)] +#[derive(Debug, Default)] pub enum DynRootBranch<'a> { Object { fields: HashMap, DynRootBranch<'a>>, @@ -37,6 +37,7 @@ pub enum DynRootBranch<'a> { Integer(RootInteger), Boolean(bool), Float(RootFloat), + #[default] Void, String(&'a str), Map0, @@ -193,12 +194,6 @@ pub fn decode_next_root<'a>(bytes: &'a [u8], offset: &'_ mut usize, lens: &'_ mu Ok(branch) } -impl<'a> Default for DynRootBranch<'a> { - fn default() -> Self { - DynRootBranch::Void - } -} - #[derive(Debug)] pub enum RootInteger { S(i64), @@ -249,9 +244,9 @@ impl RootInteger { pub fn new(bytes: &[u8], offset: &mut usize, len: usize, signed: bool) -> DecodeResult { let bytes = decode_bytes(len, bytes, offset)?; let ok = match (len, signed) { - (1, true) => Self::S((bytes[0] as i64) * -1), + (1, true) => Self::S(-i64::from(bytes[0])), (1, false) => Self::U(bytes[0].into()), - (2, true) => Self::S(u16::from_le_bytes(bytes.try_into().unwrap()) as i64 * -1), + (2, true) => Self::S(-i64::from(u16::from_le_bytes(bytes.try_into().unwrap()))), (2, false) => Self::U(u16::from_le_bytes(bytes.try_into().unwrap()).into()), (3, false) => Self::U({ let b = [bytes[0], bytes[1], bytes[2], 0]; @@ -259,36 +254,36 @@ impl RootInteger { }), (3, true) => Self::S({ let b = [bytes[0], bytes[1], bytes[2], 0]; - u32::from_le_bytes(b) as i64 * -1 + -i64::from(u32::from_le_bytes(b)) }), - (4, true) => Self::S(u32::from_le_bytes(bytes.try_into().unwrap()) as i64 * -1), + (4, true) => Self::S(-i64::from(u32::from_le_bytes(bytes.try_into().unwrap()))), (4, false) => Self::U(u32::from_le_bytes(bytes.try_into().unwrap()).into()), (5, false) => Self::U({ let b = [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], 0, 0, 0]; - u64::from_le_bytes(b).into() + u64::from_le_bytes(b) }), (5, true) => Self::S({ let b = [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], 0, 0, 0]; - u64::from_le_bytes(b) as i64 * -1 + -(u64::from_le_bytes(b) as i64) }), (6, false) => Self::U({ let b = [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], 0, 0]; - u64::from_le_bytes(b).into() + u64::from_le_bytes(b) }), (6, true) => Self::S({ let b = [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], 0, 0]; - u64::from_le_bytes(b) as i64 * -1 + -(u64::from_le_bytes(b) as i64) }), (7, false) => Self::U({ let b = [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], 0]; - u64::from_le_bytes(b).into() + u64::from_le_bytes(b) }), (7, true) => Self::S({ let b = [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], 0]; - u64::from_le_bytes(b) as i64 * -1 + -(u64::from_le_bytes(b) as i64) }), - (8, true) => Self::S(u64::from_le_bytes(bytes.try_into().unwrap()) as i64 * -1), - (8, false) => Self::U(u64::from_le_bytes(bytes.try_into().unwrap()).into()), + (8, true) => Self::S(-(u64::from_le_bytes(bytes.try_into().unwrap()) as i64)), + (8, false) => Self::U(u64::from_le_bytes(bytes.try_into().unwrap())), _ => unreachable!(), }; Ok(ok) diff --git a/tree-buf/src/internal/buffer.rs b/tree-buf/src/internal/buffer.rs index 08fdeb2..267ef0e 100644 --- a/tree-buf/src/internal/buffer.rs +++ b/tree-buf/src/internal/buffer.rs @@ -116,7 +116,7 @@ impl Buffer { #[inline] fn top(&self) -> *mut T { - unsafe { self.base().offset(self.len as isize) } + unsafe { self.base().add(self.len) } } #[inline] @@ -188,7 +188,7 @@ mod tests { let mut buffer = Buffer::new(); let data = vec![0u8, 1, 255, 12]; - for elem in data.iter() { + for elem in &data { buffer.try_push(*elem).unwrap(); } diff --git a/tree-buf/src/internal/encodings/compress.rs b/tree-buf/src/internal/encodings/compress.rs index f07deda..690299b 100644 --- a/tree-buf/src/internal/encodings/compress.rs +++ b/tree-buf/src/internal/encodings/compress.rs @@ -33,7 +33,7 @@ pub(crate) fn compress(data: &[T], stream: &mut by_size.sort_unstable_by_key(|&(_, size)| size); // Return the first compressor that succeeds - for ranked in by_size.iter() { + for ranked in &by_size { if let Ok(ok) = compressors.compress(ranked.0, data, stream) { return ok; } diff --git a/tree-buf/src/internal/encodings/delta.rs b/tree-buf/src/internal/encodings/delta.rs index 58b7b91..b94b6da 100644 --- a/tree-buf/src/internal/encodings/delta.rs +++ b/tree-buf/src/internal/encodings/delta.rs @@ -4,7 +4,7 @@ use std::ops::{Add, Sub}; // FIXME: This may not be what is needed. Zigzag may be. pub fn delta_encode_in_place + Copy>(data: &mut [T]) { profile_fn!(delta_encode_in_place); - if data.len() == 0 { + if data.is_empty() { return; } let mut current = data[0]; diff --git a/tree-buf/src/internal/encodings/gorilla_new.rs b/tree-buf/src/internal/encodings/gorilla_new.rs index 4bc57ce..64f81eb 100644 --- a/tree-buf/src/internal/encodings/gorilla_new.rs +++ b/tree-buf/src/internal/encodings/gorilla_new.rs @@ -17,9 +17,8 @@ pub fn size_for(data: impl Iterator) -> Result { // and 1 byte at end of "remaining bits" let mut bits = 72usize; - let buffer = match data.next() { - Some(first) => first, - None => return Err(()), + let Some(buffer) = data.next() else { + return Err(()) }; let mut previous = buffer; @@ -78,9 +77,8 @@ pub fn compress(data: impl Iterator, bytes: &mut Vec) -> Result< } }; - let mut buffer = match data.next() { - Some(first) => first, - None => return Err(()), + let Some(mut buffer) = data.next() else { + return Err(()) }; let mut previous = buffer; @@ -97,10 +95,10 @@ pub fn compress(data: impl Iterator, bytes: &mut Vec) -> Result< match xored { 0 => encode(0, 1, capacity, buffer, bytes), _ => { - let lz = xored.leading_zeros().min(31) as u64; - let tz = xored.trailing_zeros() as u64; - let prev_lz = prev_xor.leading_zeros() as u64; - let prev_tz = if prev_lz == 64 { 0 } else { prev_xor.trailing_zeros() as u64 }; + let lz = u64::from(xored.leading_zeros().min(31)); + let tz = u64::from(xored.trailing_zeros()); + let prev_lz = u64::from(prev_xor.leading_zeros()); + let prev_tz = if prev_lz == 64 { 0 } else { u64::from(prev_xor.trailing_zeros()) }; if lz >= prev_lz && tz >= prev_tz { let meaningful_bits = xored >> prev_tz; let meaningful_bit_count = 64 - prev_tz - prev_lz; @@ -130,7 +128,7 @@ pub fn compress(data: impl Iterator, bytes: &mut Vec) -> Result< byte_count += 1; } let last = &(&buffer.to_le_bytes())[(8 - byte_count) as usize..]; - bytes.extend_from_slice(&last); + bytes.extend_from_slice(last); bytes.push(remaining); Ok(ArrayTypeId::DoubleGorilla) diff --git a/tree-buf/src/internal/encodings/gorilla_old.rs b/tree-buf/src/internal/encodings/gorilla_old.rs index 97581a3..e9e4d32 100644 --- a/tree-buf/src/internal/encodings/gorilla_old.rs +++ b/tree-buf/src/internal/encodings/gorilla_old.rs @@ -9,7 +9,7 @@ where f64: AsPrimitive, { // FIXME: Should do schema mismatch for f32 -> f64 - let num_bits_last_elm = *bytes.last().ok_or_else(|| DecodeError::InvalidFormat)?; + let num_bits_last_elm = *bytes.last().ok_or(DecodeError::InvalidFormat)?; // Remove the byte we just read containing the bit count of the last element. let bytes = &bytes[..bytes.len() - 1]; let mut last_byte_count = num_bits_last_elm / 8; diff --git a/tree-buf/src/internal/encodings/packed_bool.rs b/tree-buf/src/internal/encodings/packed_bool.rs index 44263ab..2ad902d 100644 --- a/tree-buf/src/internal/encodings/packed_bool.rs +++ b/tree-buf/src/internal/encodings/packed_bool.rs @@ -6,14 +6,14 @@ pub fn encode_packed_bool(items: &[bool], bytes: &mut Vec) { let mut offset = 0; while offset + 7 < items.len() { - let b = (items[offset + 0] as u8) << 0 - | (items[offset + 1] as u8) << 1 - | (items[offset + 2] as u8) << 2 - | (items[offset + 3] as u8) << 3 - | (items[offset + 4] as u8) << 4 - | (items[offset + 5] as u8) << 5 - | (items[offset + 6] as u8) << 6 - | (items[offset + 7] as u8) << 7; + let b = u8::from(items[offset + 0]) << 0 + | u8::from(items[offset + 1]) << 1 + | u8::from(items[offset + 2]) << 2 + | u8::from(items[offset + 3]) << 3 + | u8::from(items[offset + 4]) << 4 + | u8::from(items[offset + 5]) << 5 + | u8::from(items[offset + 6]) << 6 + | u8::from(items[offset + 7]) << 7; bytes.push(b); offset += 8; } @@ -21,7 +21,7 @@ pub fn encode_packed_bool(items: &[bool], bytes: &mut Vec) { if offset < items.len() { let mut b = 0; for i in 0..items.len() - offset { - b |= (items[offset + i] as u8) << i; + b |= u8::from(items[offset + i]) << i; } bytes.push(b); } diff --git a/tree-buf/src/internal/encodings/varint.rs b/tree-buf/src/internal/encodings/varint.rs index 26e8d3d..a63e17e 100644 --- a/tree-buf/src/internal/encodings/varint.rs +++ b/tree-buf/src/internal/encodings/varint.rs @@ -171,7 +171,7 @@ pub fn decode_prefix_varint(bytes: &[u8], offset: &mut usize) -> DecodeResult DecodeResult (first >> 1) as u64, - 1 => (first >> 2) as u64 | ((bytes[*offset + 1] as u64) << 6), - 2 => (first >> 3) as u64 | ((bytes[*offset + 1] as u64) << 5) | ((bytes[*offset + 2] as u64) << 13), - 3 => (first >> 4) as u64 | ((bytes[*offset + 1] as u64) << 4) | ((bytes[*offset + 2] as u64) << 12) | ((bytes[*offset + 3] as u64) << 20), + 0 => u64::from(first >> 1), + 1 => u64::from(first >> 2) | (u64::from(bytes[*offset + 1]) << 6), + 2 => u64::from(first >> 3) | (u64::from(bytes[*offset + 1]) << 5) | (u64::from(bytes[*offset + 2]) << 13), + 3 => u64::from(first >> 4) | (u64::from(bytes[*offset + 1]) << 4) | (u64::from(bytes[*offset + 2]) << 12) | (u64::from(bytes[*offset + 3]) << 20), 4 => { - (first >> 5) as u64 - | ((bytes[*offset + 1] as u64) << 3) - | ((bytes[*offset + 2] as u64) << 11) - | ((bytes[*offset + 3] as u64) << 19) - | ((bytes[*offset + 4] as u64) << 27) + u64::from(first >> 5) + | (u64::from(bytes[*offset + 1]) << 3) + | (u64::from(bytes[*offset + 2]) << 11) + | (u64::from(bytes[*offset + 3]) << 19) + | (u64::from(bytes[*offset + 4]) << 27) } 5 => { - (first >> 6) as u64 - | ((bytes[*offset + 1] as u64) << 2) - | ((bytes[*offset + 2] as u64) << 10) - | ((bytes[*offset + 3] as u64) << 18) - | ((bytes[*offset + 4] as u64) << 26) - | ((bytes[*offset + 5] as u64) << 34) + u64::from(first >> 6) + | (u64::from(bytes[*offset + 1]) << 2) + | (u64::from(bytes[*offset + 2]) << 10) + | (u64::from(bytes[*offset + 3]) << 18) + | (u64::from(bytes[*offset + 4]) << 26) + | (u64::from(bytes[*offset + 5]) << 34) } 6 => { - (first >> 7) as u64 - | ((bytes[*offset + 1] as u64) << 1) - | ((bytes[*offset + 2] as u64) << 9) - | ((bytes[*offset + 3] as u64) << 17) - | ((bytes[*offset + 4] as u64) << 25) - | ((bytes[*offset + 5] as u64) << 33) - | ((bytes[*offset + 6] as u64) << 41) + u64::from(first >> 7) + | (u64::from(bytes[*offset + 1]) << 1) + | (u64::from(bytes[*offset + 2]) << 9) + | (u64::from(bytes[*offset + 3]) << 17) + | (u64::from(bytes[*offset + 4]) << 25) + | (u64::from(bytes[*offset + 5]) << 33) + | (u64::from(bytes[*offset + 6]) << 41) } 7 => { - (bytes[*offset + 1] as u64) - | ((bytes[*offset + 2] as u64) << 8) - | ((bytes[*offset + 3] as u64) << 16) - | ((bytes[*offset + 4] as u64) << 24) - | ((bytes[*offset + 5] as u64) << 32) - | ((bytes[*offset + 6] as u64) << 40) - | ((bytes[*offset + 7] as u64) << 48) + u64::from(bytes[*offset + 1]) + | (u64::from(bytes[*offset + 2]) << 8) + | (u64::from(bytes[*offset + 3]) << 16) + | (u64::from(bytes[*offset + 4]) << 24) + | (u64::from(bytes[*offset + 5]) << 32) + | (u64::from(bytes[*offset + 6]) << 40) + | (u64::from(bytes[*offset + 7]) << 48) } 8 => { - (bytes[*offset + 1] as u64) - | ((bytes[*offset + 2] as u64) << 8) - | ((bytes[*offset + 3] as u64) << 16) - | ((bytes[*offset + 4] as u64) << 24) - | ((bytes[*offset + 5] as u64) << 32) - | ((bytes[*offset + 6] as u64) << 40) - | ((bytes[*offset + 7] as u64) << 48) - | ((bytes[*offset + 8] as u64) << 56) + u64::from(bytes[*offset + 1]) + | (u64::from(bytes[*offset + 2]) << 8) + | (u64::from(bytes[*offset + 3]) << 16) + | (u64::from(bytes[*offset + 4]) << 24) + | (u64::from(bytes[*offset + 5]) << 32) + | (u64::from(bytes[*offset + 6]) << 40) + | (u64::from(bytes[*offset + 7]) << 48) + | (u64::from(bytes[*offset + 8]) << 56) } _ => unreachable!(), }; @@ -236,7 +236,7 @@ pub fn decode_prefix_varint(bytes: &[u8], offset: &mut usize) -> DecodeResult DecodeResult { - let first = bytes.get(*offset).ok_or_else(|| DecodeError::InvalidFormat)?; + let first = bytes.get(*offset).ok_or(DecodeError::InvalidFormat)?; let shift = first.trailing_zeros(); // TODO: Ensure unchecked indexing follows. @@ -245,52 +245,52 @@ pub fn decode_suffix_varint(bytes: &[u8], offset: &mut usize) -> DecodeResult (first >> 1) as u64, - 1 => (first >> 2) as u64 | ((bytes[*offset - 1] as u64) << 6), - 2 => (first >> 3) as u64 | ((bytes[*offset - 2] as u64) << 5) | ((bytes[*offset - 1] as u64) << 13), - 3 => (first >> 4) as u64 | ((bytes[*offset - 3] as u64) << 4) | ((bytes[*offset - 2] as u64) << 12) | ((bytes[*offset - 1] as u64) << 20), + 0 => u64::from(first >> 1), + 1 => u64::from(first >> 2) | (u64::from(bytes[*offset - 1]) << 6), + 2 => u64::from(first >> 3) | (u64::from(bytes[*offset - 2]) << 5) | (u64::from(bytes[*offset - 1]) << 13), + 3 => u64::from(first >> 4) | (u64::from(bytes[*offset - 3]) << 4) | (u64::from(bytes[*offset - 2]) << 12) | (u64::from(bytes[*offset - 1]) << 20), 4 => { - (first >> 5) as u64 - | ((bytes[*offset - 4] as u64) << 3) - | ((bytes[*offset - 3] as u64) << 11) - | ((bytes[*offset - 2] as u64) << 19) - | ((bytes[*offset - 1] as u64) << 27) + u64::from(first >> 5) + | (u64::from(bytes[*offset - 4]) << 3) + | (u64::from(bytes[*offset - 3]) << 11) + | (u64::from(bytes[*offset - 2]) << 19) + | (u64::from(bytes[*offset - 1]) << 27) } 5 => { - (first >> 6) as u64 - | ((bytes[*offset - 5] as u64) << 2) - | ((bytes[*offset - 4] as u64) << 10) - | ((bytes[*offset - 3] as u64) << 18) - | ((bytes[*offset - 2] as u64) << 26) - | ((bytes[*offset - 1] as u64) << 34) + u64::from(first >> 6) + | (u64::from(bytes[*offset - 5]) << 2) + | (u64::from(bytes[*offset - 4]) << 10) + | (u64::from(bytes[*offset - 3]) << 18) + | (u64::from(bytes[*offset - 2]) << 26) + | (u64::from(bytes[*offset - 1]) << 34) } 6 => { - (first >> 7) as u64 - | ((bytes[*offset - 6] as u64) << 1) - | ((bytes[*offset - 5] as u64) << 9) - | ((bytes[*offset - 4] as u64) << 17) - | ((bytes[*offset - 3] as u64) << 25) - | ((bytes[*offset - 2] as u64) << 33) - | ((bytes[*offset - 1] as u64) << 41) + u64::from(first >> 7) + | (u64::from(bytes[*offset - 6]) << 1) + | (u64::from(bytes[*offset - 5]) << 9) + | (u64::from(bytes[*offset - 4]) << 17) + | (u64::from(bytes[*offset - 3]) << 25) + | (u64::from(bytes[*offset - 2]) << 33) + | (u64::from(bytes[*offset - 1]) << 41) } 7 => { - (bytes[*offset - 7] as u64) - | ((bytes[*offset - 6] as u64) << 8) - | ((bytes[*offset - 5] as u64) << 16) - | ((bytes[*offset - 4] as u64) << 24) - | ((bytes[*offset - 3] as u64) << 32) - | ((bytes[*offset - 2] as u64) << 40) - | ((bytes[*offset - 1] as u64) << 48) + u64::from(bytes[*offset - 7]) + | (u64::from(bytes[*offset - 6]) << 8) + | (u64::from(bytes[*offset - 5]) << 16) + | (u64::from(bytes[*offset - 4]) << 24) + | (u64::from(bytes[*offset - 3]) << 32) + | (u64::from(bytes[*offset - 2]) << 40) + | (u64::from(bytes[*offset - 1]) << 48) } 8 => { - (bytes[*offset - 8] as u64) - | ((bytes[*offset - 7] as u64) << 8) - | ((bytes[*offset - 6] as u64) << 16) - | ((bytes[*offset - 5] as u64) << 24) - | ((bytes[*offset - 4] as u64) << 32) - | ((bytes[*offset - 3] as u64) << 40) - | ((bytes[*offset - 2] as u64) << 48) - | ((bytes[*offset - 1] as u64) << 56) + u64::from(bytes[*offset - 8]) + | (u64::from(bytes[*offset - 7]) << 8) + | (u64::from(bytes[*offset - 6]) << 16) + | (u64::from(bytes[*offset - 5]) << 24) + | (u64::from(bytes[*offset - 4]) << 32) + | (u64::from(bytes[*offset - 3]) << 40) + | (u64::from(bytes[*offset - 2]) << 48) + | (u64::from(bytes[*offset - 1]) << 56) } _ => unreachable!(), }; @@ -332,7 +332,7 @@ mod tests { #[test] fn test_prefix() -> DecodeResult<()> { let vecs = vec![vec![99, 127, 128, 0, 1, 2, 3, std::u64::MAX]]; - for vec in vecs.iter() { + for vec in &vecs { round_trip_prefix(vec)?; } @@ -356,7 +356,7 @@ mod tests { #[test] fn test_suffix() -> DecodeResult<()> { let vecs = vec![vec![99, 127, 128, 0, 1, 2, 3, std::u64::MAX]]; - for vec in vecs.iter() { + for vec in &vecs { round_trip_suffix(vec)?; } diff --git a/tree-buf/src/internal/rust_std/smart_pointers.rs b/tree-buf/src/internal/rust_std/smart_pointers.rs index f23d15d..d0c99f9 100644 --- a/tree-buf/src/internal/rust_std/smart_pointers.rs +++ b/tree-buf/src/internal/rust_std/smart_pointers.rs @@ -32,7 +32,7 @@ impl Decodable for Box { #[cfg(feature = "encode")] impl EncoderArray> for BoxEncoderArray { fn buffer_one<'a, 'b: 'a>(&'a mut self, value: &'b Box) { - self.inner.buffer_one(&value) + self.inner.buffer_one(value) } fn flush(self, stream: &mut EncoderStream<'_, O>) -> ArrayTypeId { self.inner.flush(stream) diff --git a/tree-buf/src/internal/types/array.rs b/tree-buf/src/internal/types/array.rs index 5e79425..6a5c577 100644 --- a/tree-buf/src/internal/types/array.rs +++ b/tree-buf/src/internal/types/array.rs @@ -9,7 +9,7 @@ impl Encodable for Vec { match self.len() { 0 => RootTypeId::Array0, 1 => { - stream.encode_with_id(|stream| (&self[0]).encode_root(stream)); + stream.encode_with_id(|stream| (self[0]).encode_root(stream)); RootTypeId::Array1 } _ => { diff --git a/tree-buf/src/internal/types/integer.rs b/tree-buf/src/internal/types/integer.rs index 384999f..0c58562 100644 --- a/tree-buf/src/internal/types/integer.rs +++ b/tree-buf/src/internal/types/integer.rs @@ -136,12 +136,7 @@ macro_rules! impl_lowerable { fn decode(sticks: DynRootBranch<'_>, _options: &impl DecodeOptions) -> DecodeResult { profile_method!(decode); match sticks { - DynRootBranch::Integer(root_int) => { - match root_int { - RootInteger::U(v) => v.try_into().map_err(|_| DecodeError::SchemaMismatch), - _ => Err(DecodeError::SchemaMismatch), - } - } + DynRootBranch::Integer(RootInteger::U(v)) => v.try_into().map_err(|_| DecodeError::SchemaMismatch), _ => Err(DecodeError::SchemaMismatch), } } @@ -418,14 +413,14 @@ where } within_rle(|| { - let mut data = data.into_iter(); + let mut data = data.iter(); let mut out = Vec::new(); // Unwrap is ok because length checked earlier. let mut current = data.next().unwrap(); out.push(*current); for next in data { - let delta = next.wrapping_sub(¤t); + let delta = next.wrapping_sub(current); current = next; out.push(delta); } @@ -491,7 +486,7 @@ impl + Copy> Compressor for PrefixVarIntCompressor { profile_method!(compress); stream.encode_with_len(|stream| { for item in data { - encode_prefix_varint((*item).into(), &mut stream.bytes); + encode_prefix_varint((*item).into(), stream.bytes); } }); Ok(ArrayTypeId::IntPrefixVar) @@ -519,7 +514,7 @@ impl Compressor for Simple16Compressor { self.check_range()?; - stream.encode_with_len(|stream| unsafe { simple_16::compress_unchecked(&data, stream.bytes) }); + stream.encode_with_len(|stream| unsafe { simple_16::compress_unchecked(data, stream.bytes) }); Ok(ArrayTypeId::IntSimple16) } @@ -529,7 +524,7 @@ impl Compressor for Simple16Compressor { self.check_range()?; - let size = unsafe { simple_16::calculate_size_unchecked(&data) }; + let size = unsafe { simple_16::calculate_size_unchecked(data) }; Ok(size) } diff --git a/tree-buf/src/internal/types/string.rs b/tree-buf/src/internal/types/string.rs index e097274..ac7ae53 100644 --- a/tree-buf/src/internal/types/string.rs +++ b/tree-buf/src/internal/types/string.rs @@ -150,7 +150,7 @@ impl InfallibleDecoderArray for IntoIter { DynArrayBranch::String(bytes) => { profile_section!(str_utf8); - let strs = decode_all(&bytes, |b, o| decode_str(b, o).and_then(|v| Ok(v.to_owned())))?; + let strs = decode_all(&bytes, |b, o| decode_str(b, o).map(|v| v.to_owned()))?; Ok(strs.into_iter()) } DynArrayBranch::RLE { runs, values } => { @@ -189,7 +189,7 @@ impl> Compressor for BrotliCompressor { profile_method!(compress); // See also c2c4fad7-c231-4fb2-8cf1-50ca1bce7fc6 - if data.len() == 0 { + if data.is_empty() { // It's not currently possible to hit this. // See also 9003b01b-83e8-4acc-9f38-d584a37e20c6 todo!("Support null lens"); diff --git a/tree-buf/tests/common/mod.rs b/tree-buf/tests/common/mod.rs index 373ce3d..22dbdf6 100644 --- a/tree-buf/tests/common/mod.rs +++ b/tree-buf/tests/common/mod.rs @@ -30,7 +30,7 @@ where let result = decode(&bytes); match result { Ok(parsed) => assert_eq!(o, &parsed), - Err(e) => assert!(false, "{}", e), + Err(e) => panic!("{}", e), } if let Some(size) = size.into() { assert_eq!(bytes.len() as i32, size, "Size Before: {}\nSize After: {}", size, bytes.len()); diff --git a/tree-buf/tests/round_trip.rs b/tree-buf/tests/round_trip.rs index d1b201a..2b337e9 100644 --- a/tree-buf/tests/round_trip.rs +++ b/tree-buf/tests/round_trip.rs @@ -14,7 +14,7 @@ mod hide_namespace { pub f: f64, pub obj_array: Vec, pub extra: Option, - pub s: Box, + pub s: String, } #[derive(Encode, Decode, PartialEq, Debug, Clone)] @@ -35,7 +35,7 @@ fn make_item() -> Bits { one: vec![99], tup: (9999.99, 200.1), }), - s: Box::new("abc".to_owned()), + s: "abc".to_string(), obj_array: vec![ Bobs { one: vec![3, 2, 1, 0], @@ -115,7 +115,7 @@ fn float32_vec() { fn lossy_f64_vec() { let mut data = Vec::new(); for i in 0..50 { - data.push(0.01 * i as f64); + data.push(0.01 * f64::from(i)); } let tolerance = -10; let options = encode_options! { options::LossyFloatTolerance(tolerance) }; @@ -410,28 +410,28 @@ fn enum_with_vec() { round_trip(&X::X(vec![25, 30, 0, 0, 0]), 11, 21); } -fn owned_vec(strs: Vec<&'static str>) -> Vec { +fn owned_vec(strs: &[&'static str]) -> Vec { strs.iter().map(|s| String::from(*s)).collect() } #[test] fn strings_using_dictionary() { - let data = vec!["abcd", "abcd", "def", "abcd", "abcd", "abcd", ""]; - round_trip(&owned_vec(data), 21, 23); + let data = ["abcd", "abcd", "def", "abcd", "abcd", "abcd", ""]; + round_trip(&owned_vec(&data), 21, 23); - let data = vec!["abcd", "abcd", "abcd", "abcd", "abcd"]; - round_trip(&owned_vec(data), 13, 15); + let data = ["abcd", "abcd", "abcd", "abcd", "abcd"]; + round_trip(&owned_vec(&data), 13, 15); - let data = vec!["abcd", "abcd", "abcd", "abcd", "abcd", "def", "def"]; - round_trip(&owned_vec(data), 17, 20); + let data = ["abcd", "abcd", "abcd", "abcd", "abcd", "def", "def"]; + round_trip(&owned_vec(&data), 17, 20); - let data = vec!["abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "def"]; - round_trip(&owned_vec(data), 17, 20); + let data = ["abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "def"]; + round_trip(&owned_vec(&data), 17, 20); } #[test] fn nested_strings_using_rle() { - let data = (owned_vec(vec!["abc", "abc", "abc"]), owned_vec(vec!["def", "def", "def"]), 1u32); + let data = (owned_vec(&["abc", "abc", "abc"]), owned_vec(&["def", "def", "def"]), 1u32); round_trip(&data, 26, 30); } @@ -482,7 +482,7 @@ fn delta_prefix_var() { #[test] fn big_brotli_str() { - let data = owned_vec(vec![ + let data = owned_vec(&[ "id,name,host_id,host_name,neighbourhood_group,neighbourhood,latitude,longitude,room_type,price,minimum_nights,number_of_reviews,last_review,reviews_per_month,calculated_host_listings_count,availability_365", "2818,Quiet Garden View Room & Super Fast WiFi,3159,Daniel,,Oostelijk Havengebied - Indische Buurt,52.36575,4.94142,Private room,59,3,277,2019-11-21,2.13,1,0", "20168,Studio with private bathroom in the centre 1,59484,Alexander,,Centrum-Oost,52.36509,4.89354,Private room,80,1,306,2019-12-05,2.57,2,198", diff --git a/tree-buf/tests/schema_migrations.rs b/tree-buf/tests/schema_migrations.rs index 7947c1c..1355392 100644 --- a/tree-buf/tests/schema_migrations.rs +++ b/tree-buf/tests/schema_migrations.rs @@ -45,10 +45,10 @@ fn canonical_idents_compatible() { #[test] fn fixed_array_to_vec() { let fixed = [0u8, 1, 2, 3, 4]; - let vec: Vec<_> = fixed.iter().copied().collect(); + let vec: Vec<_> = fixed.to_vec(); serialize_eq(&fixed, &vec, 8); let fixed = [fixed, fixed]; - let vec: Vec> = fixed.iter().map(|f| f.iter().copied().collect()).collect(); + let vec: Vec> = fixed.iter().map(|f| f.to_vec()).collect(); serialize_eq(&fixed, &vec, 14); } diff --git a/tree-buf/tests/schema_mismatches.rs b/tree-buf/tests/schema_mismatches.rs index 042d888..ef833e4 100644 --- a/tree-buf/tests/schema_mismatches.rs +++ b/tree-buf/tests/schema_mismatches.rs @@ -9,7 +9,7 @@ fn expect_schema_mismatch() { let result = decode::(&bytes); match result.unwrap_err() { DecodeError::SchemaMismatch => (), - _ => assert!(false), + _ => panic!(), } }