diff --git a/libs/simple/src/alloc.rs b/libs/simple/src/alloc.rs index b9c8c1b..0eed9d4 100644 --- a/libs/simple/src/alloc.rs +++ b/libs/simple/src/alloc.rs @@ -28,17 +28,31 @@ pub const MAXIMUM_SMALL_WORDS: usize = 32; /// The alignment of elements in the arena pub const ARENA_ELEMENT_ALIGN: usize = mem::align_of::(); -#[inline] -pub const fn small_object_size() -> usize { - let header_layout = Layout::new::(); - header_layout.size() + header_layout - .padding_needed_for(std::mem::align_of::()) - + mem::size_of::() +#[derive(Copy, Clone)] +pub struct SmallObjectSize { + pub val_size: usize, + pub val_align: usize } -#[inline] -pub const fn is_small_object() -> bool { - small_object_size::() <= MAXIMUM_SMALL_WORDS * 8 - && mem::align_of::() <= ARENA_ELEMENT_ALIGN +impl SmallObjectSize { + #[inline] + pub const fn of() -> SmallObjectSize { + SmallObjectSize { + val_size: std::mem::size_of::(), + val_align: std::mem::align_of::() + } + } + #[inline] + pub const fn is_small(&self) -> bool { + self.object_size() <= MAXIMUM_SMALL_WORDS * 8 + && self.val_align <= ARENA_ELEMENT_ALIGN + } + #[inline] + pub const fn object_size(&self) -> usize { + let header_layout = Layout::new::(); + header_layout.size() + header_layout + .padding_needed_for(self.val_align) + self.val_size + } + } pub(crate) struct Chunk { diff --git a/libs/simple/src/lib.rs b/libs/simple/src/lib.rs index 189600e..7e36b69 100644 --- a/libs/simple/src/lib.rs +++ b/libs/simple/src/lib.rs @@ -28,7 +28,7 @@ use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering}; use std::sync::atomic::AtomicPtr; #[cfg(not(feature = "multiple-collectors"))] use std::marker::PhantomData; -use std::any::TypeId; +use std::any::{TypeId, Any}; use slog::{Logger, FnValue, debug}; @@ -49,11 +49,27 @@ use zerogc_context::{ mod alloc; #[cfg(not(feature = "small-object-arenas"))] mod alloc { - pub const fn is_small_object() -> bool { - false - } - pub const fn small_object_size() -> usize { - unimplemented!() + #[derive(Copy, Clone)] + pub struct SmallObjectSize { + pub val_size: usize, + pub val_align: usize + } + impl SmallObjectSize { + #[inline] + pub const fn of() -> SmallObjectSize { + SmallObjectSize { + val_size: std::mem::size_of::(), + val_align: std::mem::align_of::() + } + } + #[inline] + pub const fn is_small(&self) -> bool { + false + } + #[inline] + pub const fn object_size(&self) -> usize { + unimplemented!() + } } pub struct FakeArena; impl FakeArena { @@ -125,7 +141,7 @@ unsafe impl DynTrace for GcHandleListWrapper { fn trace(&mut self, visitor: &mut MarkVisitor) { unsafe { let Ok(()) = self.0.trace::<_, !>(|raw_ptr, type_info| { - let header = &mut *GcHeader::from_value_ptr(raw_ptr, type_info); + let header = &mut *GcHeader::from_value_ptr(raw_ptr as *mut u8, type_info); // Mark grey header.update_raw_state(MarkState::Grey. to_raw(visitor.inverted_mark)); @@ -701,6 +717,32 @@ unsafe impl GcVisitor for MarkVisitor<'_> { Ok(()) } } + + #[inline] + unsafe fn visit_dyn_gc(&mut self, gc_ptr: NonNull, collector_id: &dyn Any) -> Result<(), Self::Err> where Self: Sized { + if let Some(&id) = collector_id.downcast_ref::() { + assert!(id, self.expected_collector); + // This MUST match self._visit_own_gc + assert!() + let header = GcHeader::from_value_ptr( + gc_ptr.as_ptr() as *mut u8, + &GcType { + /* + * TODO: Is this possibly correct? + * How do we handle dropping dynamically-dispatched types + */ + drop_func: Some(std::mem::transmute::< + unsafe fn(*mut dyn GcSafe), + unsafe fn(*mut c_void) + >(std::ptr::drop_in_place::)), + + } + ) + } else { + // Just ignore ids from other collectors + Ok(()) + } + } } impl MarkVisitor<'_> { /// Visit a GC type whose [::zerogc::CollectorId] matches our own @@ -710,7 +752,7 @@ impl MarkVisitor<'_> { // Verify this again (should be checked by caller) debug_assert_eq!(gc.collector_id(), self.expected_collector); let header = GcHeader::from_value_ptr( - gc.as_raw_ptr(), + gc.as_raw_ptr() as *mut u8, T::STATIC_TYPE ); self.visit_raw_gc(&mut *header, |obj, visitor| { @@ -804,7 +846,14 @@ impl StaticGcType for T { /// and fallback alloc vis `BigGcObject` #[repr(C)] struct GcHeader { - type_info: &'static GcType, + /// If this type is a `Sized` type whose info was + /// statically known at allocation, this type-info will be present + /// + /// Otherwise, if the object is a trait-object, a slice, or a `str` + /// this will be missing. The other field of the fat-pointer + /// will be one past the end. The data-pointer part of the fat-pointer + /// can be reconstructed from from [Gc::as_raw_ptr] + type_info: Option<&'static GcType>, /* * NOTE: State byte should come last * If the value is small `(u32)`, we could reduce @@ -829,8 +878,8 @@ impl GcHeader { } } #[inline] - pub unsafe fn from_value_ptr(ptr: *mut T, static_type: &GcType) -> *mut GcHeader { - (ptr as *mut u8).sub(static_type.value_offset).cast() + pub unsafe fn from_value_ptr(ptr: *mut u8, static_type: &GcType) -> *mut GcHeader { + ptr.sub(static_type.value_offset).cast() } #[inline] fn raw_state(&self) -> RawMarkState { diff --git a/src/cell.rs b/src/cell.rs index 85287e4..b8fdf79 100644 --- a/src/cell.rs +++ b/src/cell.rs @@ -15,13 +15,16 @@ //! and it'll generate a safe wrapper. use core::cell::Cell; -use crate::{GcSafe, Trace, GcVisitor, NullTrace, TraceImmutable, GcDirectBarrier,}; +use crate::prelude::*; /// A `Cell` pointing to a garbage collected object. /// -/// This only supports mutating `NullTrace` types, -/// becuase garbage collected pointers need write barriers. -#[derive(Default, Clone, Debug)] +/// This only supports mutating `NullTrace` types directly, +/// because garbage collected pointers need write barriers. +/// +/// However, other types can be mutated through auto-generated +/// accessors on the type (using the [GcDirectBarrier] trait). +#[derive(Default, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] #[repr(transparent)] pub struct GcCell(Cell); impl GcCell { @@ -61,14 +64,16 @@ impl GcCell { self.0.set(value) } } -unsafe impl<'gc, OwningRef, Value> GcDirectBarrier<'gc, OwningRef> for GcCell - where Value: GcDirectBarrier<'gc, OwningRef> + Copy { +/// Trigger a write barrier on the inner value +/// +/// We are a 'direct' write barrier because `Value` is stored inline +unsafe impl<'gc, OwningRef, Value> crate::GcDirectBarrier<'gc, OwningRef> for GcCell + where Value: crate::GcDirectBarrier<'gc, OwningRef> + Copy { #[inline] unsafe fn write_barrier( &self, owner: &OwningRef, field_offset: usize ) { - // NOTE: We are direct write because `Value` is stored inline self.get().write_barrier(owner, field_offset) } } @@ -80,8 +85,6 @@ unsafe impl<'gc, OwningRef, Value> GcDirectBarrier<'gc, OwningRef> for GcCell Trace for GcCell { - const NEEDS_TRACE: bool = T::NEEDS_TRACE; - #[inline] fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { visitor.visit(self.get_mut()) @@ -98,9 +101,16 @@ unsafe impl TraceImmutable for GcCell { self.set(value); Ok(()) } + + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable(visitor) + } } unsafe impl NullTrace for GcCell {} -unsafe impl GcSafe for GcCell { +unsafe impl GcSafe for GcCell {} +unsafe impl GcType for GcCell { + const NEEDS_TRACE: bool = T::NEEDS_TRACE; /// Since T is Copy, we shouldn't need to be dropped const NEEDS_DROP: bool = false; -} +} \ No newline at end of file diff --git a/src/dummy_impl.rs b/src/dummy_impl.rs index 68d6216..5801e2a 100644 --- a/src/dummy_impl.rs +++ b/src/dummy_impl.rs @@ -1,9 +1,6 @@ //! Dummy collector implementation for testing -use crate::{ - Trace, TraceImmutable, GcVisitor, NullTrace, CollectorId, - GcSafe, GcSystem, GcContext, -}; +use crate::{Trace, TraceImmutable, GcVisitor, NullTrace, CollectorId, GcSafe, GcSystem, GcContext, GcType, GcDynVisitError, GcDynVisitor}; use std::ptr::NonNull; /// Fake a [Gc] that points to the specified value @@ -103,8 +100,6 @@ pub struct DummyCollectorId { _priv: () } unsafe impl Trace for DummyCollectorId { - const NEEDS_TRACE: bool = false; - fn visit(&mut self, _visitor: &mut V) -> Result<(), ::Err> { Ok(()) } @@ -113,6 +108,15 @@ unsafe impl TraceImmutable for DummyCollectorId { fn visit_immutable(&self, _visitor: &mut V) -> Result<(), V::Err> { Ok(()) } + + #[inline] + fn visit_dyn_immutable(&self, _visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + Ok(()) + } +} +unsafe impl GcType for DummyCollectorId { + const NEEDS_TRACE: bool = false; + const NEEDS_DROP: bool = false; } unsafe impl NullTrace for DummyCollectorId {} diff --git a/src/lib.rs b/src/lib.rs index c4b024a..3378e6b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,7 @@ // NOTE: Both these features have accepted RFCs #![feature( const_panic, // RFC 2345 - Const asserts + min_specialization, // RFC 1210 - Required for our blanket implementations of `dyn_trace` )] #![deny(missing_docs)] #![cfg_attr(not(feature = "std"), no_std)] @@ -29,6 +30,7 @@ use core::ptr::NonNull; use core::marker::PhantomData; use core::hash::{Hash, Hasher}; use core::fmt::{self, Debug, Formatter}; +use std::any::Any; #[macro_use] mod manually_traced; @@ -371,13 +373,13 @@ impl FrozenContext { /// /// It should be safe to assume that a collector exists /// if any of its pointers still do! -pub unsafe trait CollectorId: Copy + Eq + Debug + NullTrace + 'static { +pub unsafe trait CollectorId: Copy + Eq + Debug + NullTrace + ::std::any::Any + 'static { /// The type of the garbage collector system type System: GcSystem; /// Perform a write barrier before writing to a garbage collected field /// /// ## Safety - /// Smilar to the [GcDirectBarrier] trait, it can be assumed that + /// Similar to the [GcDirectBarrier] trait, it can be assumed that /// the field offset is correct and the types match. unsafe fn gc_write_barrier<'gc, T: GcSafe + ?Sized + 'gc, V: GcSafe + ?Sized + 'gc>( owner: &Gc<'gc, T, Self>, @@ -481,35 +483,38 @@ impl<'gc, T: GcSafe + ?Sized + 'gc, Id: CollectorId> Gc<'gc, T, Id> { } /// Double-indirection is completely safe -unsafe impl<'gc, T: GcSafe + 'gc, Id: CollectorId> GcSafe for Gc<'gc, T, Id> { - const NEEDS_DROP: bool = true; // We are Copy -} +unsafe impl<'gc, T, Id> GcSafe for Gc<'gc, T, Id> + where T: GcSafe + ?Sized + 'gc, Id: CollectorId {} /// Rebrand unsafe impl<'gc, 'new_gc, T, Id> GcRebrand<'new_gc, Id> for Gc<'gc, T, Id> - where T: GcSafe + GcRebrand<'new_gc, Id>, + where T: GcSafe + GcRebrand<'new_gc, Id> + ?Sized, >::Branded: GcSafe, Id: CollectorId, { type Branded = Gc<'new_gc, >::Branded, Id>; } unsafe impl<'gc, 'a, T, Id> GcErase<'a, Id> for Gc<'gc, T, Id> - where T: GcSafe + GcErase<'a, Id>, + where T: GcSafe + GcErase<'a, Id> + ?Sized, >::Erased: GcSafe, Id: CollectorId, { type Erased = Gc<'a, >::Erased, Id>; } -unsafe impl<'gc, T: GcSafe + 'gc, Id: CollectorId> Trace for Gc<'gc, T, Id> { - // We always need tracing.... - const NEEDS_TRACE: bool = true; - +unsafe impl<'gc, T, Id> Trace for Gc<'gc, T, Id> + where T: GcSafe + ?Sized + 'gc, Id: CollectorId { #[inline] - fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { + fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> where Self: Sized { unsafe { // We're doing this correctly! V::visit_gc(visitor, self) } } } -impl<'gc, T: GcSafe + 'gc, Id: CollectorId> Deref for Gc<'gc, T, Id> { +unsafe impl<'gc, T: GcSafe + ?Sized + 'gc, Id: CollectorId> GcType for Gc<'gc, T, Id> { + /// We always need tracing.... + const NEEDS_TRACE: bool = true; + /// We are Copy + const NEEDS_DROP: bool = false; +} +impl<'gc, T: GcSafe + ?Sized + 'gc, Id: CollectorId> Deref for Gc<'gc, T, Id> { type Target = &'gc T; #[inline(always)] @@ -518,7 +523,7 @@ impl<'gc, T: GcSafe + 'gc, Id: CollectorId> Deref for Gc<'gc, T, Id> { } } unsafe impl<'gc, O, V, Id> GcDirectBarrier<'gc, Gc<'gc, O, Id>> for Gc<'gc, V,Id> - where O: GcSafe + 'gc, V: GcSafe + 'gc, Id: CollectorId { + where O: GcSafe + ?Sized + 'gc, V: GcSafe + ?Sized + 'gc, Id: CollectorId { #[inline(always)] unsafe fn write_barrier(&self, owner: &Gc<'gc, O, Id>, field_offset: usize) { Id::gc_write_barrier(owner, self, field_offset) @@ -533,21 +538,23 @@ impl<'gc, T: GcSafe + ?Sized + 'gc, Id: CollectorId> Clone for Gc<'gc, T, Id> { } } // Delegating impls -impl<'gc, T: GcSafe + Hash + 'gc, Id: CollectorId> Hash for Gc<'gc, T, Id> { +impl<'gc, T, Id> Hash for Gc<'gc, T, Id> + where T: GcSafe + ?Sized + Hash + 'gc, Id: CollectorId { #[inline] fn hash(&self, state: &mut H) { self.value().hash(state) } } -impl<'gc, T: GcSafe + PartialEq + 'gc, Id: CollectorId> PartialEq for Gc<'gc, T, Id> { +impl<'gc, T, Id> PartialEq for Gc<'gc, T, Id> + where T: GcSafe + ?Sized + PartialEq + 'gc, Id: CollectorId { #[inline] fn eq(&self, other: &Self) -> bool { // NOTE: We compare by value, not identity self.value() == other.value() } } -impl<'gc, T: GcSafe + Eq + 'gc, Id: CollectorId> Eq for Gc<'gc, T, Id> {} -impl<'gc, T: GcSafe + PartialEq + 'gc, Id: CollectorId> PartialEq for Gc<'gc, T, Id> { +impl<'gc, T: GcSafe + ?Sized + Eq + 'gc, Id: CollectorId> Eq for Gc<'gc, T, Id> {} +impl<'gc, T: GcSafe + ?Sized + PartialEq + 'gc, Id: CollectorId> PartialEq for Gc<'gc, T, Id> { #[inline] fn eq(&self, other: &T) -> bool { self.value() == other @@ -713,20 +720,13 @@ pub unsafe trait GcDirectBarrier<'gc, OwningRef>: Trace { /// and avoids a second pass over the objects /// to check for resurrected objects. pub unsafe trait GcSafe: Trace { - /// If this type needs a destructor run - /// - /// This is usually equivalent to `std::mem::needs_drop`. - /// However procedurally derived code can sometimes provide - /// a no-op drop implementation (for safety), - /// which would lead to a false positive with `std::mem::needs_drop()` - const NEEDS_DROP: bool; - /// Assert this type is GC safe /// /// Only used by procedural derive #[doc(hidden)] - fn assert_gc_safe() {} + fn assert_gc_safe() where Self: Sized {} } + /// Assert that a type implements Copy /// /// Used by the derive code @@ -735,7 +735,7 @@ pub fn assert_copy() {} /// A wrapper type that assumes its contents don't need to be traced #[repr(transparent)] -#[derive(Copy, Clone, Debug)] +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Default, Hash)] pub struct AssumeNotTraced(T); impl AssumeNotTraced { /// Assume the specified value doesn't need to be traced @@ -769,9 +769,8 @@ impl DerefMut for AssumeNotTraced { } unsafe impl Trace for AssumeNotTraced { - const NEEDS_TRACE: bool = false; #[inline(always)] // This method does nothing and is always a win to inline - fn visit(&mut self, _visitor: &mut V) -> Result<(), V::Err> { + fn visit(&mut self, _visitor: &mut V) -> Result<(), V::Err> { Ok(()) } } @@ -780,14 +779,22 @@ unsafe impl TraceImmutable for AssumeNotTraced { fn visit_immutable(&self, _visitor: &mut V) -> Result<(), V::Err> { Ok(()) } + + #[inline] + fn visit_dyn_immutable(&self, _visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + Ok(()) + } } unsafe impl NullTrace for AssumeNotTraced {} /// No tracing implies GcSafe -unsafe impl GcSafe for AssumeNotTraced { +unsafe impl GcSafe for AssumeNotTraced {} +unsafe impl GcType for AssumeNotTraced { + const NEEDS_TRACE: bool = false; // This is the whole point of the type ;) const NEEDS_DROP: bool = core::mem::needs_drop::(); } unsafe_gc_brand!(AssumeNotTraced, T); + /// Changes all references to garbage collected /// objects to match a specific lifetime. /// @@ -817,15 +824,23 @@ pub unsafe trait GcErase<'a, Id: CollectorId>: Trace { type Erased: 'a; } -/// Indicates that a type can be traced by a garbage collector. +/// A type which is compatible with the garbage collection, +/// and whose static type information known at compile time. +/// +/// Types that implement this trait must have a fixed size known at compile time. +/// In other words, this trait is *not* object-safe. /// -/// This doesn't necessarily mean that the type is safe to allocate in a garbage collector ([GcSafe]). +/// This is the simplest possible way to implement [Trace]. /// /// ## Safety -/// See the documentation of the `trace` method for more info. -/// Essentially, this object must faithfully trace anything that -/// could contain garbage collected pointers or other `Trace` items. -pub unsafe trait Trace { +/// The [GcType::trace] method must be implemented correctly. +/// +/// If any of the flags in this type are implemented incorrectly, +/// *undefined behavior* will occur. +/// +/// If [NullTrace] is implemented, then [GcType::NEEDS_TRACE] should +/// be true. However, one does not necessity need to imply the other. +pub unsafe trait GcType: Sized + Trace { /// Whether this type needs to be traced by the garbage collector. /// /// Some primitive types don't need to be traced at all, @@ -835,7 +850,7 @@ pub unsafe trait Trace { /// claiming the need for tracing only if their elements do. /// For example, to decide `Vec::NEEDS_TRACE` you'd check whether `u32::NEEDS_TRACE` (false), /// and so then `Vec` doesn't need to be traced. - /// By the same logic, `Vec>` does need to be traced, + /// By the same logic, `Vec>` *does* need to be traced, /// since it contains a garbage collected pointer. /// /// If there are multiple types involved, you should check if any of them need tracing. @@ -844,23 +859,58 @@ pub unsafe trait Trace { /// The fields which don't need tracing will always ignored by `GarbageCollector::trace`, /// while the fields that do will be properly traced. /// + /// + /// ## Safety /// False negatives will always result in completely undefined behavior. /// False positives could result in un-necessary tracing, but are perfectly safe otherwise. /// Therefore, when in doubt you always assume this is true. /// /// If this is true `NullTrace` should (but doesn't have to) be implemented. + /// It is always correct to conservatively return `true`. const NEEDS_TRACE: bool; + /// If this type needs a destructor run + /// + /// This is usually equivalent to `std::mem::needs_drop`. + /// However procedurally derived code can sometimes provide + /// a no-op drop implementation (for safety), + /// which would lead to a false positive with `std::mem::needs_drop()` + /// + /// ## Safety + /// This is mostly an optimization. + /// As long as the drop code is [gc-safe](GcSafe), + /// then it's okay to conservatively return `true`. + /// + /// In the worst-case scenario the GC will dynamically call a + /// empty implementation, or it will leak some memory. + /// + /// However, if the destructor is *not* [gc-safe](GcSafe) + /// then *undefined behavior* will occur. + const NEEDS_DROP: bool; +} + +/// Indicates that a type can be traced by a garbage collector. +/// +/// This doesn't necessarily mean that the type is safe to +/// allocate in a garbage collector ([GcSafe]). +/// +/// +/// ## Safety +/// See the documentation of the [GcType::trace] method for more info. +/// Essentially, this object must faithfully trace anything that +/// could contain garbage collected pointers or other `Trace` items. +pub unsafe trait Trace { /// Visit each field in this type /// - /// Users should never invoke this method, and always call the `V::visit` instead. - /// Only the collector itself is premitted to call this method, - /// and **it is undefined behavior for the user to invoke this**. + /// Users should never invoke this method, and always call the [GcVisitor::visit] method + /// instead. + /// Only the collector itself is permitted to call this method, + /// and **it is undefined behavior for the user to invoke this directly**. /// /// Structures should trace each of their fields, /// and collections should trace each of their elements. /// - /// ### Safety - /// Some types (like `Gc`) need special actions taken when they're traced, + /// ## Safety + /// Some types (like [Gc]) need special actions taken when they're traced, /// but those are somewhat rare and are usually already provided by the garbage collector. /// /// Unless I explicitly document actions as legal I may decide to change i. @@ -868,17 +918,18 @@ pub unsafe trait Trace { /// if I explicitly document it as safe behavior in this method's documentation. /// If you try something that isn't explicitly documented here as permitted behavior, /// the collector may choose to override your memory with `0xDEADBEEF`. - /// ## Always Permitted + /// ### Always Permitted /// - Reading your own memory (includes iteration) - /// - Interior mutation is undefined behavior, even if you use `GcCell` - /// - Calling `GcVisitor::visit` with the specified collector - /// - `GarbageCollector::trace` already verifies that it owns the data, so you don't need to do that + /// - Interior mutation is undefined behavior, even if you use [crate::cell::GcCell] + /// - Calling [crate::GcVisitor::visit] with the specified collector + /// - [crate::GcVisitor::trace] already verifies that it owns the data, so you don't need to do that + /// - Other visitor methods can be invoked as appropriate /// - Panicking /// - This should be reserved for cases where you are seriously screwed up, /// and can't fulfill your contract to trace your interior properly. - /// - One example is `Gc` which panics if the garbage collectors are mismatched + /// - One example is [Gc] which panics if the garbage collectors are mismatched /// - This rule may change in future versions, depending on how we deal with multi-threading. - /// ## Never Permitted Behavior + /// ### Never Permitted Behavior /// - Forgetting a element of a collection, or field of a structure /// - If you forget an element undefined behavior will result /// - This is why we always prefer automatically derived implementations where possible. @@ -887,9 +938,30 @@ pub unsafe trait Trace { /// - With an automatically derived implementation you will never miss a field /// - It is undefined behavior to mutate any of your own data. /// - The mutable `&mut self` is just so copying collectors can relocate GC pointers - /// - Invoking this function directly, without delegating to `GcVisitor` - fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err>; + /// - Invoking this function directly, without delegating to [GcVisitor] + fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> where Self: Sized; } + +/// A object-safe, dynamically dispatched version of [Trace] +/// +/// This should not be implemented directly, +/// unless you are a +pub unsafe trait DynTrace { + /// A dynamically dispatched version of [Trace::trace]. + /// + /// The only reason you should implement this directly is if you are an unsized type. + /// + /// This must be dynamically dispatched for object-safety reasons. + fn visit_dynamically(&mut self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError>; +} + +unsafe impl DynTrace for T { + #[inline] + fn visit_dynamically(&mut self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + T::visit::(self, visitor) + } +} + /// A type that can be safely traced/relocated /// without having to use a mutable reference /// @@ -903,7 +975,9 @@ pub unsafe trait TraceImmutable: Trace { /// /// The visitor may want to relocate garbage collected pointers, /// which this type must support. - fn visit_immutable(&self, visitor: &mut V) -> Result<(), V::Err>; + fn visit_immutable(&self, visitor: &mut V) -> Result<(), V::Err> where Self: Sized; + /// A dynamically dispatched version of [TraceImmutable::visit_immutable] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError>; } /// Marker types for types that don't need to be traced @@ -914,26 +988,65 @@ pub unsafe trait NullTrace: Trace + TraceImmutable {} /// Visits garbage collected objects /// /// This should only be used by a [GcSystem] -pub unsafe trait GcVisitor: Sized { +pub unsafe trait GcVisitor { /// The type of errors returned by this visitor type Err: Debug; /// Visit a reference to the specified value #[inline(always)] - fn visit(&mut self, value: &mut T) -> Result<(), Self::Err> { + fn visit(&mut self, value: &mut T) -> Result<(), Self::Err> where Self: Sized { value.visit(self) } /// Visit a reference to the specified value #[inline(always)] - fn visit_immutable(&mut self, value: &T) -> Result<(), Self::Err> { + fn visit_immutable(&mut self, value: &T) -> Result<(), Self::Err> where Self: Sized { value.visit_immutable(self) } /// Visit a garbage collected pointer /// + /// The type `T` is statically known in this method, + /// unlike [GcVisitor::visit_dyn_gc]. + /// /// ## Safety /// Undefined behavior if the GC pointer isn't properly visited. - unsafe fn visit_gc<'gc, T: GcSafe + 'gc, Id: CollectorId>( + unsafe fn visit_gc<'gc, T: GcSafe + ?Sized + 'gc, Id: CollectorId>( &mut self, gc: &mut Gc<'gc, T, Id> + ) -> Result<(), Self::Err> where Self: Sized; + + /// Visit a garbage collected pointer, + /// whose type is not statically known + /// + /// ## Safety + /// Undefined behavior if the GC pointer isn't properly visited. + unsafe fn visit_dyn_gc( + &mut self, gc_ptr: NonNull, + collector_id: &dyn Any ) -> Result<(), Self::Err>; } + +/// An opaque-error type caused by using a [GcDynVisitor] +#[derive(Debug)] +pub struct GcDynVisitError(Box); + +/// A dynamically-dispatched [GcVisitor] +pub type GcDynVisitor = dyn GcVisitor; +/// Helper implementation that internally dispatches to +unsafe impl GcVisitor for &mut GcDynVisitor { + type Err = GcDynVisitError; + + #[inline] + unsafe fn visit_gc<'gc, T, Id>(&mut self, gc: &mut Gc<'gc, T, Id>) -> Result<(), Self::Err> + where Self: Sized, T: GcSafe + ?Sized + 'gc, Id: CollectorId { + let id = gc.collector_id(); + (**self).visit_dyn_gc( + NonNull::new_unchecked(gc.as_raw_ptr() as *mut dyn GcSafe), + &id as &dyn Any + ) + } + + #[inline] + unsafe fn visit_dyn_gc(&mut self, gc_ptr: NonNull, collector_id: &dyn Any) -> Result<(), Self::Err> { + (**self).visit_dyn_gc(gc_ptr, collector_id) + } +} diff --git a/src/manually_traced/core.rs b/src/manually_traced/core.rs index 34078cd..4f0510c 100644 --- a/src/manually_traced/core.rs +++ b/src/manually_traced/core.rs @@ -12,6 +12,16 @@ use crate::{GcDirectBarrier, CollectorId}; macro_rules! trace_tuple { { $($param:ident)* } => { unsafe impl<$($param),*> Trace for ($($param,)*) + where $($param: Trace),* { + #[inline] + fn visit(&mut self, #[allow(unused)] visitor: &mut Visit) -> Result<(), Visit::Err> { + #[allow(non_snake_case)] + let ($(ref mut $param,)*) = *self; + $(visitor.visit::<$param>($param)?;)* + Ok(()) + } + } + unsafe impl<$($param),*> GcType for ($($param,)*) where $($param: Trace),* { /* * HACK: Macros don't allow using `||` as separator, @@ -21,13 +31,7 @@ macro_rules! trace_tuple { * This also correctly handles the empty unit tuple by making it false */ const NEEDS_TRACE: bool = $($param::NEEDS_TRACE || )* false; - #[inline] - fn visit(&mut self, #[allow(unused)] visitor: &mut Visit) -> Result<(), Visit::Err> { - #[allow(non_snake_case)] - let ($(ref mut $param,)*) = *self; - $(visitor.visit::<$param>($param)?;)* - Ok(()) - } + const NEEDS_DROP: bool = $(<$param as GcType>::NEEDS_DROP ||)* false; } unsafe impl<$($param),*> TraceImmutable for ($($param,)*) where $($param: TraceImmutable),* { @@ -38,6 +42,10 @@ macro_rules! trace_tuple { $(visitor.visit_immutable::<$param>($param)?;)* Ok(()) } + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable(visitor) + } } unsafe impl<$($param: NullTrace),*> NullTrace for ($($param,)*) {} unsafe impl<'new_gc, Id, $($param),*> $crate::GcRebrand<'new_gc, Id> for ($($param,)*) @@ -50,9 +58,7 @@ macro_rules! trace_tuple { $(<$param as $crate::GcErase<'a, Id>>::Erased: Sized,)* { type Erased = ($(<$param as $crate::GcErase<'a, Id>>::Erased,)*); } - unsafe impl<$($param: GcSafe),*> GcSafe for ($($param,)*) { - const NEEDS_DROP: bool = false $(|| <$param as GcSafe>::NEEDS_DROP)*; - } + unsafe impl<$($param: GcSafe),*> GcSafe for ($($param,)*) {} unsafe impl<'gc, OwningRef, $($param),*> $crate::GcDirectBarrier<'gc, OwningRef> for ($($param,)*) where $($param: $crate::GcDirectBarrier<'gc, OwningRef>),* { #[inline] @@ -110,22 +116,27 @@ trace_tuple! { A B C D E F G H I } macro_rules! trace_array { ($size:tt) => { unsafe impl Trace for [T; $size] { - const NEEDS_TRACE: bool = T::NEEDS_TRACE; #[inline] fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { visitor.visit::<[T]>(self as &mut [T]) } } + unsafe impl GcType for [T; $size] { + const NEEDS_TRACE: bool = T::NEEDS_TRACE; + const NEEDS_DROP: bool = T::NEEDS_DROP; + } unsafe impl $crate::TraceImmutable for [T; $size] { #[inline] fn visit_immutable(&self, visitor: &mut V) -> Result<(), V::Err> { visitor.visit_immutable::<[T]>(self as &[T]) } + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + visitor.visit_immutable::<[T]>(self as &[T]) + } } unsafe impl $crate::NullTrace for [T; $size] {} - unsafe impl GcSafe for [T; $size] { - const NEEDS_DROP: bool = core::mem::needs_drop::(); - } + unsafe impl GcSafe for [T; $size] {} unsafe impl<'new_gc, Id, T> $crate::GcRebrand<'new_gc, Id> for [T; $size] where Id: CollectorId, T: GcRebrand<'new_gc, Id>, >::Branded: Sized { @@ -148,22 +159,28 @@ trace_array! { /// /// The underlying data must support `TraceImmutable` since we /// only have an immutable reference. -unsafe impl<'a, T: TraceImmutable> Trace for &'a T { - const NEEDS_TRACE: bool = T::NEEDS_TRACE; +unsafe impl<'a, T: TraceImmutable + ?Sized> Trace for &'a T { #[inline(always)] - fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { + fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { visitor.visit_immutable::(*self) } } -unsafe impl<'a, T: TraceImmutable> TraceImmutable for &'a T { +unsafe impl<'a, T: TraceImmutable + ?Sized> TraceImmutable for &'a T { #[inline(always)] - fn visit_immutable(&self, visitor: &mut V) -> Result<(), V::Err> { + fn visit_immutable(&self, visitor: &mut V) -> Result<(), V::Err> { + visitor.visit_immutable::(*self) + } + + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { visitor.visit_immutable::(*self) } } -unsafe impl<'a, T: NullTrace> NullTrace for &'a T {} -unsafe impl<'a, T: GcSafe + TraceImmutable> GcSafe for &'a T { - const NEEDS_DROP: bool = false; // References are safe :) +unsafe impl<'a, T: NullTrace + ?Sized> NullTrace for &'a T {} +unsafe impl<'a, T: GcSafe + TraceImmutable + ?Sized> GcSafe for &'a T {} +unsafe impl<'a, T: GcType + TraceImmutable> GcType for &'a T { + const NEEDS_TRACE: bool = T::NEEDS_TRACE; + const NEEDS_DROP: bool = false; // References never need to be dropped :) } /// TODO: Right now we require `NullTrace` /// @@ -185,7 +202,6 @@ unsafe impl<'a, Id, T> GcErase<'a, Id> for &'a T /// Implements tracing for mutable references. unsafe impl<'a, T: Trace> Trace for &'a mut T { - const NEEDS_TRACE: bool = T::NEEDS_TRACE; #[inline(always)] fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { visitor.visit::(*self) @@ -196,10 +212,19 @@ unsafe impl<'a, T: TraceImmutable> TraceImmutable for &'a mut T { fn visit_immutable(&self, visitor: &mut V) -> Result<(), V::Err> { visitor.visit_immutable::(&**self) } + + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + visitor.visit_immutable::(&**self) + } } unsafe impl<'a, T: NullTrace> NullTrace for &'a mut T {} -unsafe impl<'a, T: GcSafe> GcSafe for &'a mut T { - const NEEDS_DROP: bool = false; // References are Copy +unsafe impl<'a, T: GcSafe> GcSafe for &'a mut T {} +unsafe impl<'a, T: Trace> GcType for &'a mut T { + const NEEDS_TRACE: bool = T::NEEDS_TRACE; + /// Although not [Copy], mutable references + /// never need to be dropped + const NEEDS_DROP: bool = false; } /// TODO: We currently require NullTrace for `T` unsafe impl<'a, 'new_gc, Id, T> GcRebrand<'new_gc, Id> for &'a mut T @@ -212,10 +237,9 @@ unsafe impl<'a, Id, T> GcErase<'a, Id> for &'a mut T type Erased = &'a mut T; } -/// Implements tracing for slices, by tracing all the objects they refer to. +/// Implements tracing for slices, by tracing all +/// the objects they refer to. unsafe impl Trace for [T] { - const NEEDS_TRACE: bool = T::NEEDS_TRACE ; - #[inline] fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { if !T::NEEDS_TRACE { return Ok(()) }; @@ -225,6 +249,15 @@ unsafe impl Trace for [T] { Ok(()) } } +unsafe impl crate::DynTrace for [T] { + fn visit_dynamically(&mut self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + if !T::NEEDS_TRACE { return Ok(()) }; + for value in self { + visitor.visit(value)?; + } + Ok(()) + } +} unsafe impl TraceImmutable for [T] { #[inline] fn visit_immutable(&self, visitor: &mut V) -> Result<(), ::Err> { @@ -234,15 +267,16 @@ unsafe impl TraceImmutable for [T] { } Ok(()) } + + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable::(visitor) + } } unsafe impl NullTrace for [T] {} -unsafe impl GcSafe for [T] { - const NEEDS_DROP: bool = core::mem::needs_drop::(); -} +unsafe impl GcSafe for [T] {} unsafe impl Trace for Option { - const NEEDS_TRACE: bool = T::NEEDS_TRACE; - #[inline] fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { match *self { @@ -259,9 +293,16 @@ unsafe impl TraceImmutable for Option { Some(ref value) => visitor.visit_immutable(value), } } + + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable(visitor) + } } unsafe impl NullTrace for Option {} -unsafe impl GcSafe for Option { +unsafe impl GcSafe for Option {} +unsafe impl GcType for Option { + const NEEDS_TRACE: bool = T::NEEDS_TRACE; const NEEDS_DROP: bool = T::NEEDS_DROP; } unsafe impl<'gc, OwningRef, V> GcDirectBarrier<'gc, OwningRef> for Option @@ -293,4 +334,8 @@ unsafe impl TraceImmutable for Wrapping { fn visit_immutable(&self, visitor: &mut V) -> Result<(), V::Err> { visitor.visit_immutable(&self.0) } + + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable(visitor) + } } \ No newline at end of file diff --git a/src/manually_traced/indexmap.rs b/src/manually_traced/indexmap.rs index 3993aae..ab68c2a 100644 --- a/src/manually_traced/indexmap.rs +++ b/src/manually_traced/indexmap.rs @@ -16,13 +16,15 @@ unsafe impl TraceImmutable for IndexMap } Ok(()) } + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable::(visitor) + } } unsafe impl NullTrace for IndexMap where K: NullTrace + Eq + Hash, V: NullTrace {} unsafe impl Trace for IndexMap where K: TraceImmutable + Eq + Hash, V: Trace { - const NEEDS_TRACE: bool = K::NEEDS_TRACE || V::NEEDS_TRACE; - fn visit(&mut self, visitor: &mut Visit) -> Result<(), Visit::Err> { if !Self::NEEDS_TRACE { return Ok(()); }; for (key, value) in self.iter_mut() { @@ -33,9 +35,7 @@ unsafe impl Trace for IndexMap } } unsafe impl GcSafe for IndexMap where - K: GcSafe + TraceImmutable + Eq + Hash, V: GcSafe { - const NEEDS_DROP: bool = true; // IndexMap has internal memory -} + K: GcSafe + TraceImmutable + Eq + Hash, V: GcSafe {} unsafe impl<'new_gc, Id, K, V> GcRebrand<'new_gc, Id> for IndexMap where Id: CollectorId, K: TraceImmutable + Eq + Hash + GcRebrand<'new_gc, Id>, V: GcRebrand<'new_gc, Id>, @@ -47,6 +47,12 @@ unsafe impl<'new_gc, Id, K, V> GcRebrand<'new_gc, Id> for IndexMap >; } +unsafe impl GcType for IndexMap + where K: TraceImmutable + Eq + Hash, V: Trace { + /// IndexMap has internal memory + const NEEDS_TRACE: bool = K::NEEDS_TRACE | V::NEEDS_TRACE; + const NEEDS_DROP: bool = true; +} unsafe impl<'a, Id, K, V> GcErase<'a, Id> for IndexMap where Id: CollectorId, K: TraceImmutable + Eq + Hash + GcErase<'a, Id>, V: GcErase<'a, Id>, @@ -69,11 +75,14 @@ unsafe impl TraceImmutable for IndexSet } Ok(()) } + + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable(visitor) + } } unsafe impl Trace for IndexSet where V: TraceImmutable + Eq + Hash { - const NEEDS_TRACE: bool = V::NEEDS_TRACE; - fn visit(&mut self, visitor: &mut Visit) -> Result<(), Visit::Err> { for value in self.iter() { visitor.visit_immutable(value)?; @@ -90,4 +99,10 @@ unsafe impl<'a, Id, V> GcErase<'a, Id> for IndexSet where Id: CollectorId, V: GcErase<'a, Id> + TraceImmutable + Eq + Hash, >::Erased: TraceImmutable + Eq + Hash, { type Erased = IndexSet<>::Erased>; -} \ No newline at end of file +} +unsafe impl GcType for IndexSet + where V: TraceImmutable + Eq + Hash { + const NEEDS_TRACE: bool = V::NEEDS_TRACE; + /// IndexSet has internal memory + const NEEDS_DROP: bool = true; +} diff --git a/src/manually_traced/mod.rs b/src/manually_traced/mod.rs index 2159869..ce2693e 100644 --- a/src/manually_traced/mod.rs +++ b/src/manually_traced/mod.rs @@ -153,6 +153,10 @@ macro_rules! unsafe_trace_deref { let extracted: &$target_type = &**self; visitor.visit_immutable(extracted) } + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut $crate::GcDynVisitor) -> Result<(), $crate::GcDynVisitError> { + self.visit_immutable::<$crate::GcDynVisitor>(visitor) + } } unsafe_trace_deref!($target, $($param),*; immut = false; |value| { // I wish we had type ascription -_- @@ -164,8 +168,6 @@ macro_rules! unsafe_trace_deref { unsafe_gc_brand!($target, immut = required; $($param),*); unsafe impl<$($param),*> Trace for $target<$($param),*> where $($param: TraceImmutable),* { - - const NEEDS_TRACE: bool = $($param::NEEDS_TRACE || )* false; #[inline] fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { let extracted = { @@ -175,6 +177,20 @@ macro_rules! unsafe_trace_deref { visitor.visit_immutable(extracted) } } + unsafe impl<$($param),*> GcType for $target<$($param),*> + where $($param: TraceImmutable),* { + const NEEDS_TRACE: bool = $($param::NEEDS_TRACE || )* false; + /* + * NOTE: Because we are a wrapper class, we may have a custom drop. + * Therefore we (more conservatively) base our drop decisions + * on [core::mem::needs_drop] instead of [crate::needs_gc_drop]. + * + * This may result in false positives for types like + * Wrapper + * but it is acceptable for now. + */ + const NEEDS_DROP: bool = core::mem::needs_drop::(); + } unsafe impl<$($param),*> TraceImmutable for $target<$($param),*> where $($param: TraceImmutable),* { @@ -186,20 +202,21 @@ macro_rules! unsafe_trace_deref { }; visitor.visit_immutable(extracted) } + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut $crate::GcDynVisitor) -> Result<(), $crate::GcDynVisitError> { + self.visit_immutable::<$crate::GcDynVisitor>(visitor) + } } unsafe impl<$($param: NullTrace),*> NullTrace for $target<$($param),*> {} - /// We trust ourselves to not do anything bad as long as our paramaters don't + /// We trust ourselves to not do anything bad + /// as long as our parameters don't unsafe impl<$($param),*> GcSafe for $target<$($param),*> - where $($param: GcSafe + TraceImmutable),* { - const NEEDS_DROP: bool = core::mem::needs_drop::(); - } + where $($param: GcSafe + TraceImmutable),* {} }; ($target:ident, $($param:ident),*; immut = false; |$value:ident| $extract:expr) => { unsafe_gc_brand!($target, $($param),*); unsafe impl<$($param),*> Trace for $target<$($param),*> where $($param: Trace),* { - - const NEEDS_TRACE: bool = $($param::NEEDS_TRACE || )* false; #[inline] fn visit(&mut self, visitor: &mut V) -> Result<(), V::Err> { let extracted = { @@ -212,7 +229,21 @@ macro_rules! unsafe_trace_deref { unsafe impl<$($param: NullTrace),*> NullTrace for $target<$($param),*> {} /// We trust ourselves to not do anything bad as long as our paramaters don't unsafe impl<$($param),*> GcSafe for $target<$($param),*> - where $($param: GcSafe),* { + where $($param: GcSafe),* {} + unsafe impl<$($param),*> GcType for $target<$($param),*> + where $($param: Trace),* { + const NEEDS_TRACE: bool = $($param::NEEDS_TRACE || )* false; + /* + * NOTE: Because we are a wrapper class, we may have a custom drop. + * Therefore we (more conservatively) base our drop decisions + * on [core::mem::needs_drop] instead of [crate::needs_gc_drop]. + * + * This may result in false positives for types like + * Wrapper + * but it is acceptable for now. + * + * TODO: Duplicate comment!!! + */ const NEEDS_DROP: bool = core::mem::needs_drop::(); } }; @@ -272,6 +303,10 @@ macro_rules! unsafe_immutable_trace_iterable { } Ok(()) } + #[inline] + fn visit_dyn_immutable(&self, visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + self.visit_immutable::(self) + } } unsafe impl<$($param: $crate::NullTrace),*> NullTrace for $target<$($param),*> {} }; @@ -304,7 +339,6 @@ macro_rules! unsafe_trace_primitive { ($target:ty) => { unsafe_gc_brand!($target); unsafe impl Trace for $target { - const NEEDS_TRACE: bool = false; #[inline(always)] // This method does nothing and is always a win to inline fn visit(&mut self, _visitor: &mut V) -> Result<(), V::Err> { Ok(()) @@ -315,10 +349,16 @@ macro_rules! unsafe_trace_primitive { fn visit_immutable(&self, _visitor: &mut V) -> Result<(), V::Err> { Ok(()) } + #[inline] + fn visit_dyn_immutable(&self, _visitor: &mut GcDynVisitor) -> Result<(), GcDynVisitError> { + Ok(()) + } } unsafe impl $crate::NullTrace for $target {} /// No drop/custom behavior -> GcSafe - unsafe impl GcSafe for $target { + unsafe impl GcSafe for $target {} + unsafe impl GcType for $target { + const NEEDS_TRACE: bool = false; const NEEDS_DROP: bool = core::mem::needs_drop::<$target>(); } unsafe impl<'gc, OwningRef> $crate::GcDirectBarrier<'gc, OwningRef> for $target { diff --git a/src/manually_traced/stdlib.rs b/src/manually_traced/stdlib.rs index 3f9df70..a073aa2 100644 --- a/src/manually_traced/stdlib.rs +++ b/src/manually_traced/stdlib.rs @@ -10,8 +10,6 @@ use crate::CollectorId; unsafe_immutable_trace_iterable!(HashMap; element = { (&K, &V) }); unsafe impl Trace for HashMap { - const NEEDS_TRACE: bool = K::NEEDS_TRACE || V::NEEDS_TRACE; - fn visit(&mut self, visitor: &mut Visit) -> Result<(), Visit::Err> { if !Self::NEEDS_TRACE { return Ok(()); }; for (key, value) in self.iter_mut() { @@ -21,8 +19,10 @@ unsafe impl Trace for HashMap { Ok(()) } } -unsafe impl GcSafe for HashMap { - const NEEDS_DROP: bool = true; // HashMap has internal memory +unsafe impl GcSafe for HashMap {} +unsafe impl GcType for HashMap { + const NEEDS_TRACE: bool = K::NEEDS_TRACE || V::NEEDS_TRACE; + const NEEDS_DROP: bool = true; // HashMap has native-allocated internal memory } unsafe impl<'new_gc, Id, K, V> GcRebrand<'new_gc, Id> for HashMap where Id: CollectorId, K: TraceImmutable + GcRebrand<'new_gc, Id>, @@ -46,8 +46,6 @@ unsafe impl<'a, Id, K, V> GcErase<'a, Id> for HashMap } unsafe_immutable_trace_iterable!(HashSet; element = { &V }); unsafe impl Trace for HashSet { - const NEEDS_TRACE: bool = V::NEEDS_TRACE; - fn visit(&mut self, visitor: &mut Visit) -> Result<(), Visit::Err> { if !Self::NEEDS_TRACE { return Ok(()); }; for value in self.iter() { @@ -57,3 +55,7 @@ unsafe impl Trace for HashSet { } } unsafe_gc_brand!(HashSet, immut = required; V); +unsafe impl GcType for HashSet { + const NEEDS_TRACE: bool = V::NEEDS_TRACE; + const NEEDS_DROP: bool = true; // We have internal memory +} \ No newline at end of file diff --git a/src/prelude.rs b/src/prelude.rs index b66207b..abe53cf 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -15,11 +15,12 @@ pub use crate::{ // Basic collector types pub use crate::{ GcSystem, GcContext, GcSimpleAlloc, - Gc, GcHandle, GcVisitor + Gc, GcHandle, GcVisitor, GcDynVisitor, GcDynVisitError }; // Traits for user code to implement pub use crate::{ - GcSafe, GcErase, GcRebrand, Trace, TraceImmutable, NullTrace + GcSafe, GcErase, GcRebrand, Trace, TraceImmutable, + NullTrace, GcType }; // Hack traits pub use crate::{GcBindHandle};