@@ -2198,6 +2198,112 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
2198
2198
}
2199
2199
}
2200
2200
2201
+ impl < T : Clone , A : Allocator + Clone > Arc < [ T ] , A > {
2202
+ /// Makes a mutable reference into the given `Arc<[T]>`.
2203
+ ///
2204
+ /// This function has the same purpose and characteristics as [`make_mut`](Self::make_mut),
2205
+ /// except that it applies to slices rather than [`Sized`] values.
2206
+ ///
2207
+ /// # Examples
2208
+ ///
2209
+ /// ```
2210
+ /// #![feature(make_mut_slice)]
2211
+ /// use std::sync::Arc;
2212
+ ///
2213
+ /// let mut data: Arc<[i32]> = Rc::new([10, 20, 30]);
2214
+ ///
2215
+ /// Arc::make_mut_slice(&mut data)[0] += 1; // Won't clone anything
2216
+ /// let mut other_data = Arc::clone(&data); // Won't clone inner data
2217
+ /// Arc::make_mut_slice(&mut data)[1] += 1; // Clones inner data
2218
+ /// Arc::make_mut_slice(&mut data)[2] += 1; // Won't clone anything
2219
+ /// Arc::make_mut_slice(&mut other_data)[0] *= 10; // Won't clone anything
2220
+ ///
2221
+ /// // Now `data` and `other_data` point to different allocations.
2222
+ /// assert_eq!(*data, [11, 21, 31]);
2223
+ /// assert_eq!(*other_data, [110, 20, 30]);
2224
+ /// ```
2225
+ ///
2226
+ /// [`Weak`] pointers will be disassociated:
2227
+ ///
2228
+ /// ```
2229
+ /// #![feature(make_mut_slice)]
2230
+ /// use std::sync::Arc;
2231
+ ///
2232
+ /// let mut data: Arc<[i32]> = Arc::new([75]);
2233
+ /// let weak = Arc::downgrade(&data);
2234
+ ///
2235
+ /// assert!([75] == *data);
2236
+ /// assert!([75] == *weak.upgrade().unwrap());
2237
+ ///
2238
+ /// Arc::make_mut_slice(&mut data)[0] += 1;
2239
+ ///
2240
+ /// assert!([76] == *data);
2241
+ /// assert!(weak.upgrade().is_none());
2242
+ /// ```
2243
+ #[ cfg( not( no_global_oom_handling) ) ]
2244
+ #[ inline]
2245
+ #[ stable( feature = "arc_unique" , since = "1.4.0" ) ]
2246
+ pub fn make_mut_slice ( this : & mut Self ) -> & mut [ T ] {
2247
+ // This code is identical to `make_mut()` except that it clones len() items instead of 1.
2248
+
2249
+ let len = this. len ( ) ;
2250
+
2251
+ // Note that we hold both a strong reference and a weak reference.
2252
+ // Thus, releasing our strong reference only will not, by itself, cause
2253
+ // the memory to be deallocated.
2254
+ //
2255
+ // Use Acquire to ensure that we see any writes to `weak` that happen
2256
+ // before release writes (i.e., decrements) to `strong`. Since we hold a
2257
+ // weak count, there's no chance the ArcInner itself could be
2258
+ // deallocated.
2259
+ if this. inner ( ) . strong . compare_exchange ( 1 , 0 , Acquire , Relaxed ) . is_err ( ) {
2260
+ // Another strong pointer exists, so we must clone.
2261
+ // Pre-allocate memory to allow writing the cloned value directly.
2262
+ let mut arc = Self :: new_uninit_slice_in ( len, this. alloc . clone ( ) ) ;
2263
+ unsafe {
2264
+ let data = Arc :: get_mut_unchecked ( & mut arc) ;
2265
+ for i in 0 ..len {
2266
+ this[ i] . write_clone_into_raw ( data[ i] . as_mut_ptr ( ) ) ;
2267
+ }
2268
+ * this = arc. assume_init ( ) ;
2269
+ }
2270
+ } else if this. inner ( ) . weak . load ( Relaxed ) != 1 {
2271
+ // Relaxed suffices in the above because this is fundamentally an
2272
+ // optimization: we are always racing with weak pointers being
2273
+ // dropped. Worst case, we end up allocated a new Arc unnecessarily.
2274
+
2275
+ // We removed the last strong ref, but there are additional weak
2276
+ // refs remaining. We'll move the contents to a new Arc, and
2277
+ // invalidate the other weak refs.
2278
+
2279
+ // Note that it is not possible for the read of `weak` to yield
2280
+ // usize::MAX (i.e., locked), since the weak count can only be
2281
+ // locked by a thread with a strong reference.
2282
+
2283
+ // Materialize our own implicit weak pointer, so that it can clean
2284
+ // up the ArcInner as needed.
2285
+ let _weak = Weak { ptr : this. ptr , alloc : this. alloc . clone ( ) } ;
2286
+
2287
+ // Can just steal the data, all that's left is Weaks
2288
+ let mut arc = Self :: new_uninit_slice_in ( len, this. alloc . clone ( ) ) ;
2289
+ unsafe {
2290
+ let data = Arc :: get_mut_unchecked ( & mut arc) ;
2291
+ mem:: MaybeUninit :: slice_as_mut_ptr ( data)
2292
+ . copy_from_nonoverlapping ( this. as_ptr ( ) , len) ;
2293
+ ptr:: write ( this, arc. assume_init ( ) ) ;
2294
+ }
2295
+ } else {
2296
+ // We were the sole reference of either kind; bump back up the
2297
+ // strong ref count.
2298
+ this. inner ( ) . strong . store ( 1 , Release ) ;
2299
+ }
2300
+
2301
+ // As with `get_mut()`, the unsafety is ok because our reference was
2302
+ // either unique to begin with, or became one upon cloning the contents.
2303
+ unsafe { Self :: get_mut_unchecked ( this) }
2304
+ }
2305
+ }
2306
+
2201
2307
impl < T : ?Sized , A : Allocator > Arc < T , A > {
2202
2308
/// Returns a mutable reference into the given `Arc`, if there are
2203
2309
/// no other `Arc` or [`Weak`] pointers to the same allocation.
0 commit comments