@@ -33,13 +33,10 @@ impl<T: Internable> Interned<T> {
33
33
// - if not, box it up, insert it, and return a clone
34
34
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
35
35
// insert the same object between us looking it up and inserting it.
36
- match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash as u64 , & obj) {
36
+ match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash, & obj) {
37
37
RawEntryMut :: Occupied ( occ) => Self { arc : occ. key ( ) . clone ( ) } ,
38
38
RawEntryMut :: Vacant ( vac) => Self {
39
- arc : vac
40
- . insert_hashed_nocheck ( hash as u64 , Arc :: new ( obj) , SharedValue :: new ( ( ) ) )
41
- . 0
42
- . clone ( ) ,
39
+ arc : vac. insert_hashed_nocheck ( hash, Arc :: new ( obj) , SharedValue :: new ( ( ) ) ) . 0 . clone ( ) ,
43
40
} ,
44
41
}
45
42
}
@@ -54,13 +51,10 @@ impl Interned<str> {
54
51
// - if not, box it up, insert it, and return a clone
55
52
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
56
53
// insert the same object between us looking it up and inserting it.
57
- match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash as u64 , s) {
54
+ match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash, s) {
58
55
RawEntryMut :: Occupied ( occ) => Self { arc : occ. key ( ) . clone ( ) } ,
59
56
RawEntryMut :: Vacant ( vac) => Self {
60
- arc : vac
61
- . insert_hashed_nocheck ( hash as u64 , Arc :: from ( s) , SharedValue :: new ( ( ) ) )
62
- . 0
63
- . clone ( ) ,
57
+ arc : vac. insert_hashed_nocheck ( hash, Arc :: from ( s) , SharedValue :: new ( ( ) ) ) . 0 . clone ( ) ,
64
58
} ,
65
59
}
66
60
}
0 commit comments