@@ -3,6 +3,7 @@ use core::mem::{self, ManuallyDrop};
3
3
use core:: ops:: { Deref , RangeBounds } ;
4
4
use core:: ptr:: NonNull ;
5
5
use core:: { cmp, fmt, hash, ptr, slice, usize} ;
6
+ use std:: sync:: Arc ;
6
7
7
8
use alloc:: {
8
9
alloc:: { dealloc, Layout } ,
@@ -289,6 +290,74 @@ impl Bytes {
289
290
ret
290
291
}
291
292
293
+ /// Creates a new `Bytes` from an [`Arc<T>`] owner and a function that
294
+ /// returns the buffer given a reference to the contained `T`.
295
+ ///
296
+ /// `T` must be [`Sized`] rather than a trait object or slice.
297
+ ///
298
+ /// The returned `Bytes` can be cloned via `Arc` referencing counting,
299
+ /// whereas `Bytes` created via conversion from [`Vec`] must perform a new
300
+ /// allocation internally on first clone to hold the reference count.
301
+ /// This optimization is most significant if many `Bytes` instances are
302
+ /// created from the same `owner` and subsequently cloned.
303
+ ///
304
+ /// ```
305
+ /// # use std::sync::Arc;
306
+ /// # use bytes::Bytes;
307
+ /// struct Pieces(Vec<Vec<u8>>);
308
+ /// let pieces = Arc::new(Pieces(vec![b"hello".to_vec(), b"world".to_vec()]));
309
+ /// let bytes: Vec<Bytes> = (0..2).map(|i| {
310
+ /// Bytes::from_arc_projection(pieces.clone(), |p| &p.0[i])
311
+ /// }).collect();
312
+ /// let bytes_cloned = bytes.clone();
313
+ /// assert_eq!(bytes[0], b"hello"[..]);
314
+ /// assert_eq!(bytes_cloned[1], b"world"[..]);
315
+ /// ```
316
+ ///
317
+ /// See also [`Bytes::try_from_arc_projection`] for a fallible version.
318
+ pub fn from_arc_projection < T : Sync + ' static > (
319
+ owner : Arc < T > ,
320
+ projection : impl FnOnce ( & T ) -> & [ u8 ] ,
321
+ ) -> Self {
322
+ let buf = projection ( & * owner) ;
323
+ Bytes {
324
+ ptr : buf. as_ptr ( ) ,
325
+ len : buf. len ( ) ,
326
+ data : AtomicPtr :: new ( Arc :: into_raw ( owner) as * mut ( ) ) ,
327
+ vtable : arcproj_vtable :: < T > ( ) ,
328
+ }
329
+ }
330
+
331
+ /// Tries to creates a new `Bytes` from an [`Arc`] owner and a function that
332
+ /// returns the buffer given a reference to the contained `T` or fails.
333
+ ///
334
+ /// This is similar to [`Bytes::from_arc_projection`] but fallible.
335
+ ///
336
+ /// ```
337
+ /// # use std::sync::Arc;
338
+ /// # use bytes::Bytes;
339
+ /// struct Pieces(Vec<Vec<u8>>);
340
+ /// let pieces = Arc::new(Pieces(vec![b"hello".to_vec(), b"world".to_vec()]));
341
+ /// let bytes: Vec<Result<Bytes, &str>> = (0..3).map(|i| {
342
+ /// Bytes::try_from_arc_projection(pieces.clone(), |p| {
343
+ /// p.0.get(i).map(|v| &**v).ok_or("out of bounds")
344
+ /// })
345
+ /// }).collect();
346
+ /// assert_eq!(bytes, [Ok(b"hello"[..].into()), Ok(b"world"[..].into()), Err("out of bounds")]);
347
+ /// ```
348
+ pub fn try_from_arc_projection < T : Sync + ' static , E > (
349
+ owner : Arc < T > ,
350
+ projection : impl FnOnce ( & T ) -> Result < & [ u8 ] , E > ,
351
+ ) -> Result < Self , E > {
352
+ let buf = projection ( & * owner) ?;
353
+ Ok ( Bytes {
354
+ ptr : buf. as_ptr ( ) ,
355
+ len : buf. len ( ) ,
356
+ data : AtomicPtr :: new ( Arc :: into_raw ( owner) as * mut ( ) ) ,
357
+ vtable : arcproj_vtable :: < T > ( ) ,
358
+ } )
359
+ }
360
+
292
361
/// Returns the number of bytes contained in this `Bytes`.
293
362
///
294
363
/// # Examples
@@ -322,8 +391,9 @@ impl Bytes {
322
391
/// Returns true if this is the only reference to the data and
323
392
/// `Into<BytesMut>` would avoid cloning the underlying buffer.
324
393
///
325
- /// Always returns false if the data is backed by a [static slice](Bytes::from_static),
326
- /// or an [owner](Bytes::from_owner).
394
+ /// Always returns false if the data is backed by a
395
+ /// [static slice](Bytes::from_static), [owner](Bytes::from_owner),
396
+ /// or [Arc projection](Bytes::from_arc_projection).
327
397
///
328
398
/// The result of this method may be invalidated immediately if another
329
399
/// thread clones this value while this is being called. Ensure you have
@@ -627,8 +697,9 @@ impl Bytes {
627
697
/// If `self` is not unique for the entire original buffer, this will fail
628
698
/// and return self.
629
699
///
630
- /// This will also always fail if the buffer was constructed via either
631
- /// [from_owner](Bytes::from_owner) or [from_static](Bytes::from_static).
700
+ /// Always fails if the data is backed by a
701
+ /// [static slice](Bytes::from_static), [owner](Bytes::from_owner),
702
+ /// or [Arc projection](Bytes::from_arc_projection).
632
703
///
633
704
/// # Examples
634
705
///
@@ -1073,13 +1144,17 @@ impl fmt::Debug for Vtable {
1073
1144
}
1074
1145
}
1075
1146
1147
+ fn never_unique ( _: & AtomicPtr < ( ) > ) -> bool {
1148
+ false
1149
+ }
1150
+
1076
1151
// ===== impl StaticVtable =====
1077
1152
1078
1153
const STATIC_VTABLE : Vtable = Vtable {
1079
1154
clone : static_clone,
1080
1155
to_vec : static_to_vec,
1081
1156
to_mut : static_to_mut,
1082
- is_unique : static_is_unique ,
1157
+ is_unique : never_unique ,
1083
1158
drop : static_drop,
1084
1159
} ;
1085
1160
@@ -1098,10 +1173,6 @@ unsafe fn static_to_mut(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> BytesM
1098
1173
BytesMut :: from ( slice)
1099
1174
}
1100
1175
1101
- fn static_is_unique ( _: & AtomicPtr < ( ) > ) -> bool {
1102
- false
1103
- }
1104
-
1105
1176
unsafe fn static_drop ( _: & mut AtomicPtr < ( ) > , _: * const u8 , _: usize ) {
1106
1177
// nothing to drop for &'static [u8]
1107
1178
}
@@ -1152,10 +1223,6 @@ unsafe fn owned_to_mut(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
1152
1223
BytesMut :: from_vec ( owned_to_vec ( data, ptr, len) )
1153
1224
}
1154
1225
1155
- unsafe fn owned_is_unique ( _data : & AtomicPtr < ( ) > ) -> bool {
1156
- false
1157
- }
1158
-
1159
1226
unsafe fn owned_drop_impl ( owned : * mut ( ) ) {
1160
1227
let lifetime = owned. cast :: < OwnedLifetime > ( ) ;
1161
1228
let ref_cnt = & ( * lifetime) . ref_cnt ;
@@ -1183,7 +1250,7 @@ static OWNED_VTABLE: Vtable = Vtable {
1183
1250
clone : owned_clone,
1184
1251
to_vec : owned_to_vec,
1185
1252
to_mut : owned_to_mut,
1186
- is_unique : owned_is_unique ,
1253
+ is_unique : never_unique ,
1187
1254
drop : owned_drop,
1188
1255
} ;
1189
1256
@@ -1489,6 +1556,59 @@ unsafe fn shallow_clone_arc(shared: *mut Shared, ptr: *const u8, len: usize) ->
1489
1556
}
1490
1557
}
1491
1558
1559
+ fn arcproj_vtable < T : Sync > ( ) -> & ' static Vtable {
1560
+ // Produce vtable via const promotion to &'static.
1561
+ // <https://users.rust-lang.org/t/custom-vtables-with-integers/78508/2>
1562
+ trait V {
1563
+ const VTABLE : Vtable ;
1564
+ }
1565
+ impl < T : Sync > V for T {
1566
+ const VTABLE : Vtable = Vtable {
1567
+ clone : arcproj_clone :: < T > ,
1568
+ to_vec : arcproj_to_vec :: < T > ,
1569
+ to_mut : arcproj_to_mut :: < T > ,
1570
+ is_unique : never_unique,
1571
+ drop : arcproj_drop :: < T > ,
1572
+ } ;
1573
+ }
1574
+ & <T as V >:: VTABLE
1575
+ }
1576
+
1577
+ unsafe fn arcproj_clone < T : Sync > ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Bytes {
1578
+ let arc = data. load ( Ordering :: Relaxed ) ;
1579
+
1580
+ // Replicate `Arc::increment_strong_count`, which has a MSRV of 1.51.0.
1581
+ let _ = std:: mem:: ManuallyDrop :: new ( Arc :: < T > :: from_raw ( arc as * const T ) ) . clone ( ) ;
1582
+
1583
+ Bytes {
1584
+ ptr,
1585
+ len,
1586
+ data : AtomicPtr :: new ( arc) ,
1587
+ vtable : arcproj_vtable :: < T > ( ) ,
1588
+ }
1589
+ }
1590
+
1591
+ unsafe fn arcproj_to_vec < T : Sync > ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> Vec < u8 > {
1592
+ let vec = slice:: from_raw_parts ( ptr, len) . to_vec ( ) ;
1593
+ arcproj_drop_impl :: < T > ( data) ;
1594
+ vec
1595
+ }
1596
+
1597
+ unsafe fn arcproj_to_mut < T : Sync > ( data : & AtomicPtr < ( ) > , ptr : * const u8 , len : usize ) -> BytesMut {
1598
+ let out = BytesMut :: from ( slice:: from_raw_parts ( ptr, len) ) ;
1599
+ arcproj_drop_impl :: < T > ( data) ;
1600
+ out
1601
+ }
1602
+
1603
+ unsafe fn arcproj_drop < T : Sync > ( data : & mut AtomicPtr < ( ) > , _ptr : * const u8 , _len : usize ) {
1604
+ arcproj_drop_impl :: < T > ( data) ;
1605
+ }
1606
+
1607
+ unsafe fn arcproj_drop_impl < T : Sync > ( data : & AtomicPtr < ( ) > ) {
1608
+ // Replicate `Arc::decrement_strong_count`, which has a MSRV of 1.51.0.
1609
+ drop ( Arc :: from_raw ( data. load ( Ordering :: Relaxed ) as * const T ) ) ;
1610
+ }
1611
+
1492
1612
#[ cold]
1493
1613
unsafe fn shallow_clone_vec (
1494
1614
atom : & AtomicPtr < ( ) > ,
0 commit comments