Skip to content

Commit

Permalink
Merge pull request #54 from GrigorenkoPV/clippy
Browse files Browse the repository at this point in the history
Apply (or silence) most of the clippy suggestions
  • Loading branch information
droundy authored Jul 19, 2024
2 parents e521f1c + 7a0ead2 commit fdeaa4f
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 54 deletions.
35 changes: 16 additions & 19 deletions benches/bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -555,11 +555,11 @@ fn main() {

println!(
"Intern<i64>::new {}",
bench_gen_env(|| rand::random::<i64>(), |x| Intern::new(*x))
bench_gen_env(rand::random::<i64>, |x| Intern::new(*x))
);
println!(
"Intern<u8>::new {}",
bench_gen_env(|| rand::random::<u8>(), |x| Intern::new(*x))
bench_gen_env(rand::random::<u8>, |x| Intern::new(*x))
);
{
fn mkset(s: &mut HashSet<String>) -> HashSet<Intern<String>> {
Expand Down Expand Up @@ -607,8 +607,8 @@ fn main() {
s.0.remove(x);
}
}
let s1: HashSet<_> = s1.iter().cloned().map(|s| Intern::new(s)).collect();
let s2: HashSet<_> = s2.iter().cloned().map(|s| Intern::new(s)).collect();
let s1: HashSet<_> = s1.iter().cloned().map(Intern::new).collect();
let s2: HashSet<_> = s2.iter().cloned().map(Intern::new).collect();
let s1_str: HashSet<Intern<str>> =
s1.iter().cloned().map(|s| From::from(s.as_str())).collect();
let s2_str: HashSet<Intern<str>> =
Expand Down Expand Up @@ -655,16 +655,19 @@ fn main() {
);
}
let i = Intern::new(7i64);
println!("Intern<i64>::clone {}", bench(|| i.clone()));
#[allow(clippy::clone_on_copy)]
{
println!("Intern<i64>::clone {}", bench(|| i.clone()))
};
println!();

println!(
"ArcIntern<i64>::new {}",
bench_gen_env(|| rand::random::<i64>(), |x| ArcIntern::new(*x))
bench_gen_env(rand::random::<i64>, |x| ArcIntern::new(*x))
);
println!(
"ArcIntern<u8>::new {}",
bench_gen_env(|| rand::random::<u8>(), |x| ArcIntern::new(*x))
bench_gen_env(rand::random::<u8>, |x| ArcIntern::new(*x))
);
{
fn mkset(s: &mut HashSet<String>) -> HashSet<ArcIntern<String>> {
Expand Down Expand Up @@ -699,8 +702,8 @@ fn main() {
s.0.remove(x);
}
}
let s1: HashSet<_> = s1.iter().cloned().map(|s| ArcIntern::new(s)).collect();
let s2: HashSet<_> = s2.iter().cloned().map(|s| ArcIntern::new(s)).collect();
let s1: HashSet<_> = s1.iter().cloned().map(ArcIntern::new).collect();
let s2: HashSet<_> = s2.iter().cloned().map(ArcIntern::new).collect();
println!(
"ArcIntern<String>::compare/hash {}",
bench_gen_env(|| (s1.clone(), s2.clone()), rmset)
Expand All @@ -724,17 +727,11 @@ fn main() {

println!(
"arc_interner::ArcIntern<i64>::new {}",
bench_gen_env(
|| rand::random::<i64>(),
|x| arc_interner::ArcIntern::new(*x)
)
bench_gen_env(rand::random::<i64>, |x| arc_interner::ArcIntern::new(*x))
);
println!(
"arc_interner::ArcIntern<u8>::new {}",
bench_gen_env(
|| rand::random::<u8>(),
|x| arc_interner::ArcIntern::new(*x)
)
bench_gen_env(rand::random::<u8>, |x| arc_interner::ArcIntern::new(*x))
);
{
fn mkset(s: &mut HashSet<String>) -> HashSet<arc_interner::ArcIntern<String>> {
Expand Down Expand Up @@ -766,12 +763,12 @@ fn main() {
let s1: HashSet<_> = s1
.iter()
.cloned()
.map(|s| arc_interner::ArcIntern::new(s))
.map(arc_interner::ArcIntern::new)
.collect();
let s2: HashSet<_> = s2
.iter()
.cloned()
.map(|s| arc_interner::ArcIntern::new(s))
.map(arc_interner::ArcIntern::new)
.collect();
println!(
"arc_interner::ArcIntern<String>::compare/hash {}",
Expand Down
3 changes: 2 additions & 1 deletion src/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,7 @@ impl<T: ?Sized + Eq + Hash + Send + Sync> Drop for ArcIntern<T> {
// removed is declared before m, so the mutex guard will be
// dropped *before* the removed content is dropped, since it
// might need to lock the mutex.
#[allow(clippy::needless_late_init)]
let _remove;
let m = Self::get_container();
_remove = m.remove(unsafe { self.pointer.as_ref() });
Expand Down Expand Up @@ -355,7 +356,7 @@ impl<T: ?Sized + Eq + Hash + Send + Sync> Hash for ArcIntern<T> {

impl<T: ?Sized + Eq + Hash + Send + Sync> PartialEq for ArcIntern<T> {
fn eq(&self, other: &Self) -> bool {
self.get_pointer() == other.get_pointer()
std::ptr::eq(self.get_pointer(), other.get_pointer())
}
}
impl<T: ?Sized + Eq + Hash + Send + Sync> Eq for ArcIntern<T> {}
Expand Down
10 changes: 6 additions & 4 deletions src/arc_dst.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ impl<T: ?Sized + Eq + Hash + Send + Sync + 'static> ArcIntern<T> {

impl From<&str> for ArcIntern<str> {
fn from(s: &str) -> Self {
ArcIntern::<str>::new_with_copyable_init_val(s, |s| RefCount::<str>::from_str(s))
ArcIntern::<str>::new_with_copyable_init_val(s, RefCount::<str>::from_str)
}
}

Expand Down Expand Up @@ -177,7 +177,7 @@ where
// implement some useful equal comparisons
macro_rules! impl_eq {
([$($vars:tt)*] $lhs:ty, $rhs: ty) => {
#[allow(unused_lifetimes)]
#[allow(unused_lifetimes, clippy::partialeq_ne_impl)]
impl<'a, $($vars)*> PartialEq<$rhs> for $lhs {
#[inline]
fn eq(&self, other: &$rhs) -> bool {
Expand All @@ -189,7 +189,7 @@ macro_rules! impl_eq {
}
}

#[allow(unused_lifetimes)]
#[allow(unused_lifetimes, clippy::partialeq_ne_impl)]
impl<'a, $($vars)*> PartialEq<$lhs> for $rhs {
#[inline]
fn eq(&self, other: &$lhs) -> bool {
Expand Down Expand Up @@ -441,7 +441,9 @@ fn dst_memory_aligned() {
// Arrays are laid out so that the zero-based nth element of the array is
// offset from the start of the array by n * size_of::<T>() bytes.
let addr0 = &ptr.data as *const [Aligned] as *const Aligned as usize;
assert_eq!(addr0 % $align, 0);

#[allow(clippy::modulo_one)]
{ assert_eq!(addr0 % $align, 0) };
for idx in 1..10 {
let addr_offset = &ptr.data[idx] as *const _ as usize;
assert_eq!(addr0 + idx * std::mem::size_of::<Aligned>(), addr_offset);
Expand Down
16 changes: 7 additions & 9 deletions src/arena.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,7 @@ impl<'a, T: ?Sized + deepsize::DeepSizeOf> deepsize::DeepSizeOf for ArenaIntern<

impl<'a, T: ?Sized> Clone for ArenaIntern<'a, T> {
fn clone(&self) -> Self {
ArenaIntern {
pointer: self.pointer,
}
*self
}
}
impl<'a, T: ?Sized> Copy for ArenaIntern<'a, T> {}
Expand Down Expand Up @@ -150,7 +148,7 @@ impl Arena<str> {
/// If this value has not previously been interned, then `intern` will
/// allocate a spot for the value on the heap. Otherwise, it will return a
/// pointer to the `str` previously allocated.
pub fn intern<'a, 'b>(&'a self, val: &'b str) -> ArenaIntern<'a, str> {
pub fn intern<'a>(&'a self, val: &str) -> ArenaIntern<'a, str> {
self.intern_ref(val)
}
/// Intern a `String` as `ArenaIntern<str>`.
Expand Down Expand Up @@ -185,7 +183,7 @@ impl Arena<std::ffi::CStr> {
/// let y = arena.intern(std::ffi::CString::new("hello").unwrap().as_c_str());
/// assert_eq!(x, y);
/// ```
pub fn intern<'a, 'b>(&'a self, val: &'b std::ffi::CStr) -> ArenaIntern<'a, std::ffi::CStr> {
pub fn intern<'a>(&'a self, val: &std::ffi::CStr) -> ArenaIntern<'a, std::ffi::CStr> {
self.intern_ref(val)
}
/// Intern a `CString` as `ArenaIntern<CStr>`.
Expand Down Expand Up @@ -238,7 +236,7 @@ impl Arena<std::ffi::OsStr> {
/// let y = arena.intern(std::ffi::OsStr::new("hello"));
/// assert_eq!(x, y);
/// ```
pub fn intern<'a, 'b>(&'a self, val: &'b std::ffi::OsStr) -> ArenaIntern<'a, std::ffi::OsStr> {
pub fn intern<'a>(&'a self, val: &std::ffi::OsStr) -> ArenaIntern<'a, std::ffi::OsStr> {
self.intern_ref(val)
}
/// Intern a `OsString` as `ArenaIntern<OsStr>`.
Expand Down Expand Up @@ -291,7 +289,7 @@ impl Arena<std::path::Path> {
/// let y = arena.intern(std::path::Path::new("hello"));
/// assert_eq!(x, y);
/// ```
pub fn intern<'a, 'b>(&'a self, val: &'b std::path::Path) -> ArenaIntern<'a, std::path::Path> {
pub fn intern<'a>(&'a self, val: &std::path::Path) -> ArenaIntern<'a, std::path::Path> {
self.intern_ref(val)
}
/// Intern a `PathBuf` as `ArenaIntern<Path>`.
Expand Down Expand Up @@ -335,7 +333,7 @@ impl<T: Eq + Hash + Copy> Arena<[T]> {
/// If this value has not previously been interned, then `intern` will
/// allocate a spot for the value on the heap. Otherwise, it will return a
/// pointer to the `[T]` previously allocated.
pub fn intern<'a, 'b>(&'a self, val: &'b [T]) -> ArenaIntern<'a, [T]> {
pub fn intern<'a>(&'a self, val: &[T]) -> ArenaIntern<'a, [T]> {
self.intern_ref(val)
}
/// Intern a `Vec<T>` as `ArenaIntern<[T]>`.
Expand Down Expand Up @@ -474,7 +472,7 @@ impl<'a, T: ?Sized> Hash for ArenaIntern<'a, T> {

impl<'a, T: ?Sized> PartialEq for ArenaIntern<'a, T> {
fn eq(&self, other: &Self) -> bool {
self.get_pointer() == other.get_pointer()
std::ptr::eq(self.get_pointer(), other.get_pointer())
}
}
impl<'a, T: ?Sized> Eq for ArenaIntern<'a, T> {}
Expand Down
13 changes: 6 additions & 7 deletions src/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,7 @@ fn has_niche() {

impl<T: ?Sized> Clone for Intern<T> {
fn clone(&self) -> Self {
Intern {
pointer: self.pointer,
}
*self
}
}

Expand Down Expand Up @@ -158,7 +156,7 @@ impl<T: Eq + Hash + Send + Sync + 'static + ?Sized> From<Box<T>> for Intern<T> {
if let Some(&b) = m.get(val.borrow()) {
return Intern { pointer: b };
}
let p: &'static T = Box::leak(Box::from(val));
let p: &'static T = Box::leak(val);
m.insert(p);
Intern { pointer: p }
})
Expand Down Expand Up @@ -227,6 +225,7 @@ impl<T: Eq + Hash + Send + Sync + 'static> Intern<T> {
impl<T: Eq + Hash + Send + Sync + 'static + ?Sized> Intern<T> {
/// Get a long-lived reference to the data pointed to by an `Intern`, which
/// is never freed from the intern pool.
#[allow(clippy::should_implement_trait)]
pub fn as_ref(self) -> &'static T {
self.pointer
}
Expand All @@ -239,7 +238,7 @@ impl<T: Eq + Hash + Send + Sync + 'static + ?Sized> Intern<T> {
/// Only for benchmarking, this will cause problems
#[cfg(feature = "bench")]
pub fn benchmarking_only_clear_interns() {
INTERN_CONTAINERS.with(|m: &mut HashSet<&'static T>| -> () { m.clear() })
INTERN_CONTAINERS.with(|m: &mut HashSet<&'static T>| m.clear())
}
}

Expand Down Expand Up @@ -292,11 +291,11 @@ const fn sz<T>() -> u64 {
impl<T: Debug> Fits64 for Intern<T> {
unsafe fn from_u64(x: u64) -> Self {
Intern {
pointer: &*(((x ^ heap_location() / sz::<T>()) * sz::<T>()) as *const T),
pointer: &*(((x ^ (heap_location() / sz::<T>())) * sz::<T>()) as *const T),
}
}
fn to_u64(self) -> u64 {
self.get_pointer() as u64 / sz::<T>() ^ heap_location() / sz::<T>()
(self.get_pointer() as u64 / sz::<T>()) ^ (heap_location() / sz::<T>())
}
}
#[test]
Expand Down
12 changes: 6 additions & 6 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@
//! You have three options with the internment crate:
//!
//! 1. `Intern`, which will never free your data. This means that an
//! `Intern` is `Copy`, so you can make as many copies of the pointer
//! as you may care to at no cost.
//! `Intern` is `Copy`, so you can make as many copies of the pointer
//! as you may care to at no cost.
//!
//! 2. `ArcIntern`, which reference-counts your data and frees it when
//! there are no more references. `ArcIntern` will keep memory use
//! down, but uses an atomic increment/decrement whenever a clone of
//! your pointer is made, or a pointer is dropped.
//! there are no more references. `ArcIntern` will keep memory use
//! down, but uses an atomic increment/decrement whenever a clone of
//! your pointer is made, or a pointer is dropped.
//!
//! 3. `ArenaIntern`, which stores its data in an `Arena`, with the data being freed
//! when the arena itself is freed. Requires feature `arena`.
//! when the arena itself is freed. Requires feature `arena`.
//!
//! In each case, accessing your data is a single pointer dereference, and the size
//! of any internment data structure (`Intern` or `ArcIntern` or `ArenaIntern`) is a
Expand Down
15 changes: 7 additions & 8 deletions src/typearena.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,9 +152,7 @@ fn has_niche() {

impl<T: ?Sized> Clone for Intern<T> {
fn clone(&self) -> Self {
Intern {
pointer: self.pointer,
}
*self
}
}

Expand Down Expand Up @@ -217,7 +215,7 @@ impl<T: Eq + Hash + Send + Sync + 'static + ?Sized> From<Box<T>> for Intern<T> {
if let Some(&b) = m.get(val.borrow()) {
return Intern { pointer: b };
}
let p: &'static T = Box::leak(Box::from(val));
let p: &'static T = Box::leak(val);
m.insert(p);
Intern { pointer: p }
})
Expand Down Expand Up @@ -280,6 +278,7 @@ impl<T: Eq + Hash + Send + Sync + 'static> Intern<T> {
impl<T: Any + Eq + Hash + Send + Sync + ?Sized> Intern<T> {
/// Get a long-lived reference to the data pointed to by an `Intern`, which
/// is never freed from the intern pool.
#[allow(clippy::should_implement_trait)]
pub fn as_ref(self) -> &'static T {
self.pointer
}
Expand All @@ -292,7 +291,7 @@ impl<T: Any + Eq + Hash + Send + Sync + ?Sized> Intern<T> {
/// Only for benchmarking, this will cause problems
#[cfg(feature = "bench")]
pub fn benchmarking_only_clear_interns() {
with_mutex_hashset(|m: &mut HashSet<&'static T>| -> () { m.clear() })
with_mutex_hashset(|m: &mut HashSet<&'static T>| m.clear())
}
}

Expand Down Expand Up @@ -341,11 +340,11 @@ const fn sz<T>() -> u64 {
impl<T: Debug> Fits64 for Intern<T> {
unsafe fn from_u64(x: u64) -> Self {
Intern {
pointer: &*(((x ^ heap_location() / sz::<T>()) * sz::<T>()) as *const T),
pointer: &*(((x ^ (heap_location() / sz::<T>())) * sz::<T>()) as *const T),
}
}
fn to_u64(self) -> u64 {
self.get_pointer() as u64 / sz::<T>() ^ heap_location() / sz::<T>()
(self.get_pointer() as u64 / sz::<T>()) ^ (heap_location() / sz::<T>())
}
}
#[test]
Expand Down Expand Up @@ -418,7 +417,7 @@ impl<T: Eq + Hash + Send + Sync + ?Sized> Hash for Intern<T> {

impl<T: Eq + Hash + Send + Sync + ?Sized> PartialEq for Intern<T> {
fn eq(&self, other: &Self) -> bool {
self.get_pointer() == other.get_pointer()
std::ptr::eq(self.get_pointer(), other.get_pointer())
}
}
impl<T: Eq + Hash + Send + Sync + ?Sized> Eq for Intern<T> {}
Expand Down

0 comments on commit fdeaa4f

Please sign in to comment.