diff --git a/gc/src/gc.rs b/gc/src/gc.rs index 58696a3..9dabddf 100644 --- a/gc/src/gc.rs +++ b/gc/src/gc.rs @@ -3,7 +3,7 @@ use std::cell::{Cell, RefCell}; use std::mem; use std::ptr::{self, NonNull}; -struct GcState { +pub(crate) struct GcState { stats: GcStats, config: GcConfig, boxes_start: Cell>>>, @@ -45,12 +45,19 @@ thread_local!(static GC_STATE: RefCell = RefCell::new(GcState { boxes_start: Cell::new(None), })); +pub enum GcBoxType { + Standard, + Weak, + Ephemeron, +} + const MARK_MASK: usize = 1 << (usize::BITS - 1); const ROOTS_MASK: usize = !MARK_MASK; const ROOTS_MAX: usize = ROOTS_MASK; // max allowed value of roots pub(crate) struct GcBoxHeader { roots: Cell, // high bit is used as mark flag + ephemeron_flag: Cell, next: Cell>>>, } @@ -59,6 +66,25 @@ impl GcBoxHeader { pub fn new(next: Option>>) -> Self { GcBoxHeader { roots: Cell::new(1), // unmarked and roots count = 1 + ephemeron_flag: Cell::new(false), + next: Cell::new(next), + } + } + + #[inline] + pub fn new_ephemeron(next: Option>>) -> Self { + GcBoxHeader { + roots: Cell::new(0), + ephemeron_flag: Cell::new(true), + next: Cell::new(next), + } + } + + #[inline] + pub fn new_weak(next: Option>>) -> Self { + GcBoxHeader { + roots: Cell::new(0), + ephemeron_flag: Cell::new(false), next: Cell::new(next), } } @@ -100,20 +126,73 @@ impl GcBoxHeader { pub fn unmark(&self) { self.roots.set(self.roots.get() & !MARK_MASK) } + + #[inline] + pub fn is_ephemeron(&self) -> bool { + self.ephemeron_flag.get() + } } #[repr(C)] // to justify the layout computation in Gc::from_raw -pub(crate) struct GcBox { +pub struct GcBox { header: GcBoxHeader, data: T, } +impl GcBox { + /// Returns `true` if the two references refer to the same `GcBox`. + pub(crate) fn ptr_eq(this: &GcBox, other: &GcBox) -> bool { + // Use .header to ignore fat pointer vtables, to work around + // https://github.com/rust-lang/rust/issues/46139 + ptr::eq(&this.header, &other.header) + } + + /// Marks this `GcBox` and marks through its data. + pub(crate) unsafe fn trace_inner(&self) { + if !self.header.is_marked() && !self.header.is_ephemeron() { + self.header.mark(); + self.data.trace(); + } + } + + /// Trace inner data + pub(crate) unsafe fn weak_trace_inner(&self, queue: &mut Vec>>) { + self.data.weak_trace(queue); + } + + /// Increases the root count on this `GcBox`. + /// Roots prevent the `GcBox` from being destroyed by the garbage collector. + pub(crate) unsafe fn root_inner(&self) { + self.header.inc_roots(); + } + + /// Decreases the root count on this `GcBox`. + /// Roots prevent the `GcBox` from being destroyed by the garbage collector. + pub(crate) unsafe fn unroot_inner(&self) { + self.header.dec_roots(); + } + + /// Returns a pointer to the `GcBox`'s value, without dereferencing it. + pub(crate) fn value_ptr(this: *const GcBox) -> *const T { + unsafe { ptr::addr_of!((*this).data) } + } + + /// Returns a reference to the `GcBox`'s value. + pub(crate) fn value(&self) -> &T { + &self.data + } + + pub(crate) fn is_marked(&self) -> bool { + self.header.is_marked() + } +} + impl GcBox { /// Allocates a garbage collected `GcBox` on the heap, /// and appends it to the thread-local `GcBox` chain. /// /// A `GcBox` allocated this way starts its life rooted. - pub(crate) fn new(value: T) -> NonNull { + pub(crate) fn new(value: T, box_type: GcBoxType) -> NonNull { GC_STATE.with(|st| { let mut st = st.borrow_mut(); @@ -132,8 +211,14 @@ impl GcBox { } } + let header = match box_type { + GcBoxType::Standard => GcBoxHeader::new(st.boxes_start.take()), + GcBoxType::Weak => GcBoxHeader::new_weak(st.boxes_start.take()), + GcBoxType::Ephemeron => GcBoxHeader::new_ephemeron(st.boxes_start.take()), + }; + let gcbox = Box::into_raw(Box::new(GcBox { - header: GcBoxHeader::new(st.boxes_start.take()), + header, data: value, })); @@ -149,105 +234,108 @@ impl GcBox { } } -impl GcBox { - /// Returns `true` if the two references refer to the same `GcBox`. - pub(crate) fn ptr_eq(this: &GcBox, other: &GcBox) -> bool { - // Use .header to ignore fat pointer vtables, to work around - // https://github.com/rust-lang/rust/issues/46139 - ptr::eq(&this.header, &other.header) - } - - /// Marks this `GcBox` and marks through its data. - pub(crate) unsafe fn trace_inner(&self) { - if !self.header.is_marked() { - self.header.mark(); - self.data.trace(); - } - } - - /// Increases the root count on this `GcBox`. - /// Roots prevent the `GcBox` from being destroyed by the garbage collector. - pub(crate) unsafe fn root_inner(&self) { - self.header.inc_roots(); - } - - /// Decreases the root count on this `GcBox`. - /// Roots prevent the `GcBox` from being destroyed by the garbage collector. - pub(crate) unsafe fn unroot_inner(&self) { - self.header.dec_roots(); - } - - /// Returns a pointer to the `GcBox`'s value, without dereferencing it. - pub(crate) fn value_ptr(this: *const GcBox) -> *const T { - unsafe { ptr::addr_of!((*this).data) } - } - - /// Returns a reference to the `GcBox`'s value. - pub(crate) fn value(&self) -> &T { - &self.data - } -} - /// Collects garbage. fn collect_garbage(st: &mut GcState) { st.stats.collections_performed += 1; - struct Unmarked<'a> { - incoming: &'a Cell>>>, - this: NonNull>, - } - unsafe fn mark(head: &Cell>>>) -> Vec> { + unsafe fn mark( + head: &Cell>>>, + ) -> Vec>> { // Walk the tree, tracing and marking the nodes - let mut mark_head = head.get(); - while let Some(node) = mark_head { - if (*node.as_ptr()).header.roots() > 0 { - (*node.as_ptr()).trace_inner(); + let mut finalize = Vec::new(); + let mut ephemeron_queue = Vec::new(); + let mut mark_head = head; + while let Some(node) = mark_head.get() { + if (*node.as_ptr()).header.is_ephemeron() { + ephemeron_queue.push(node); + } else { + if (*node.as_ptr()).header.roots() > 0 { + (*node.as_ptr()).trace_inner(); + } else { + finalize.push(node) + } } + mark_head = &(*node.as_ptr()).header.next; + } - mark_head = (*node.as_ptr()).header.next.get(); + // Ephemeron Evaluation + if !ephemeron_queue.is_empty() { + loop { + let mut reachable_nodes = Vec::new(); + let mut other_nodes = Vec::new(); + // iterate through ephemeron queue, sorting nodes by whether they + // are reachable or unreachable + for node in ephemeron_queue { + if (*node.as_ptr()).data.is_marked_ephemeron() { + (*node.as_ptr()).header.mark(); + reachable_nodes.push(node); + } else { + other_nodes.push(node); + } + } + // Replace the old queue with the unreachable + ephemeron_queue = other_nodes; + + // If reachable nodes is not empty, trace values. If it is empty, + // break from the loop + if !reachable_nodes.is_empty() { + // iterate through reachable nodes and trace their values, + // enqueuing any ephemeron that is found during the trace + for node in reachable_nodes { + (*node.as_ptr()).weak_trace_inner(&mut ephemeron_queue) + } + } else { + break; + } + } } - // Collect a vector of all of the nodes which were not marked, - // and unmark the ones which were. - let mut unmarked = Vec::new(); - let mut unmark_head = head; - while let Some(node) = unmark_head.get() { - if (*node.as_ptr()).header.is_marked() { - (*node.as_ptr()).header.unmark(); - } else { - unmarked.push(Unmarked { - incoming: unmark_head, - this: node, - }); + // Any left over nodes in the ephemeron queue at this point are + // unreachable and need to be notified/finalized. + finalize.extend(ephemeron_queue); + + finalize + } + + unsafe fn finalize(finalize_vec: Vec>>) { + for node in finalize_vec { + // We double check that the unreachable nodes are actually unreachable + // prior to finalization as they could have been marked by a different + // trace after initially being added to the queue + if !(*node.as_ptr()).header.is_marked() { + Trace::finalize_glue(&(*node.as_ptr()).data) } - unmark_head = &(*node.as_ptr()).header.next; } - unmarked } - unsafe fn sweep(finalized: Vec>, bytes_allocated: &mut usize) { + unsafe fn sweep(head: &Cell>>>, bytes_allocated: &mut usize) { let _guard = DropGuard::new(); - for node in finalized.into_iter().rev() { - if (*node.this.as_ptr()).header.is_marked() { - continue; + + let mut sweep_head = head; + while let Some(node) = sweep_head.get() { + if (*node.as_ptr()).header.is_marked() { + (*node.as_ptr()).header.unmark(); + sweep_head = &(*node.as_ptr()).header.next; + } else { + let unmarked_node = Box::from_raw(node.as_ptr()); + *bytes_allocated -= mem::size_of_val::>(&*unmarked_node); + sweep_head.set(unmarked_node.header.next.take()); } - let incoming = node.incoming; - let node = Box::from_raw(node.this.as_ptr()); - *bytes_allocated -= mem::size_of_val::>(&*node); - incoming.set(node.header.next.take()); } } unsafe { - let unmarked = mark(&st.boxes_start); - if unmarked.is_empty() { - return; - } - for node in &unmarked { - Trace::finalize_glue(&(*node.this.as_ptr()).data); - } - mark(&st.boxes_start); - sweep(unmarked, &mut st.stats.bytes_allocated); + // Run mark and return vector of nonreachable porperties + let unreachable_nodes = mark(&st.boxes_start); + // Finalize the unreachable properties + finalize(unreachable_nodes); + // Run mark again to mark any nodes that are resurrected by their finalizer + // + // At this point, _f should be filled with all nodes that are unreachable and + // have already been finalized, so they can be ignored. + let _f = mark(&st.boxes_start); + // Run sweep: unmarking all marked nodes and freeing any unmarked nodes + sweep(&st.boxes_start, &mut st.stats.bytes_allocated); } } diff --git a/gc/src/lib.rs b/gc/src/lib.rs index 76a9e8c..ddff942 100644 --- a/gc/src/lib.rs +++ b/gc/src/lib.rs @@ -6,7 +6,7 @@ #![cfg_attr(feature = "nightly", feature(coerce_unsized, unsize))] -use crate::gc::{GcBox, GcBoxHeader}; +use crate::gc::{GcBox, GcBoxHeader, GcBoxType}; use std::alloc::Layout; use std::cell::{Cell, UnsafeCell}; use std::cmp::Ordering; @@ -27,10 +27,22 @@ mod gc; #[cfg(feature = "serde")] mod serde; mod trace; +pub mod weak; + +pub use weak::{WeakGc, WeakPair}; #[cfg(feature = "derive")] pub use gc_derive::{Finalize, Trace}; +/// `derive_prelude` is a quick prelude that imports +/// `Finalize`, `Trace`, and `GcPointer` for implementing +/// the derive +#[cfg(feature = "derive")] +pub mod derive_prelude { + pub use crate::GcPointer; + pub use gc_derive::{Finalize, Trace}; +} + // We re-export the Trace method, as well as some useful internal methods for // managing collections or configuring the garbage collector. pub use crate::gc::{finalizer_safe, force_collect}; @@ -41,6 +53,8 @@ pub use crate::gc::{configure, GcConfig}; #[cfg(feature = "unstable-stats")] pub use crate::gc::{stats, GcStats}; +pub type GcPointer = NonNull>; + //////// // Gc // //////// @@ -76,7 +90,7 @@ impl Gc { unsafe { // Allocate the memory for the object - let ptr = GcBox::new(value); + let ptr = GcBox::new(value, GcBoxType::Standard); // When we create a Gc, all pointers which have been moved to the // heap no longer need to be rooted, so we unroot them. @@ -99,7 +113,9 @@ impl Gc { } /// Returns the given pointer with its root bit cleared. -unsafe fn clear_root_bit(ptr: NonNull>) -> NonNull> { +pub(crate) unsafe fn clear_root_bit( + ptr: NonNull>, +) -> NonNull> { let ptr = ptr.as_ptr(); let data = ptr as *mut u8; let addr = data as isize; @@ -214,6 +230,23 @@ impl Gc { gc.set_root(); gc } + + #[inline] + pub fn clone_weak_gc(&self) -> WeakGc { + unsafe { + let weak_gc = WeakGc::from_gc_box(self.ptr_root.get()); + weak_gc + } + } + + #[inline] + pub fn create_weak_pair(&self, value: Option) -> WeakPair + where + V: Trace, + { + let weak_pair = WeakPair::from_gc_value_pair(self.ptr_root.get(), value); + weak_pair + } } impl Finalize for Gc {} @@ -224,6 +257,16 @@ unsafe impl Trace for Gc { self.inner().trace_inner(); } + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + + #[inline] + unsafe fn weak_trace(&self, queue: &mut Vec) { + self.inner().weak_trace_inner(queue); + } + #[inline] unsafe fn root(&self) { assert!(!self.rooted(), "Can't double-root a Gc"); @@ -639,6 +682,18 @@ unsafe impl Trace for GcCell { } #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + + #[inline] + unsafe fn weak_trace(&self, queue: &mut Vec) { + match self.flags.get().borrowed() { + BorrowState::Writing => (), + _ => (*self.cell.get()).weak_trace(queue), + } + } + unsafe fn root(&self) { assert!(!self.flags.get().rooted(), "Can't root a GcCell twice!"); self.flags.set(self.flags.get().set_rooted(true)); @@ -980,7 +1035,7 @@ impl Debug for GcCell { // // For a slice/trait object, this sets the `data` field and leaves the rest // unchanged. For a sized raw pointer, this simply sets the pointer. -unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { +pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); ptr } diff --git a/gc/src/trace.rs b/gc/src/trace.rs index c881bfc..d92d9fb 100644 --- a/gc/src/trace.rs +++ b/gc/src/trace.rs @@ -13,6 +13,7 @@ use std::sync::atomic::{ AtomicU64, AtomicU8, AtomicUsize, }; +pub use crate::GcPointer; /// The Finalize trait, which needs to be implemented on /// garbage-collected objects to define finalization logic. pub trait Finalize { @@ -24,6 +25,14 @@ pub unsafe trait Trace: Finalize { /// Marks all contained `Gc`s. unsafe fn trace(&self); + /// Checks if an ephemeron's key is marked. + /// + /// Note: value should always be implemented to return false + unsafe fn is_marked_ephemeron(&self) -> bool; + + /// Returns true if a marked `Gc` is found + unsafe fn weak_trace(&self, ephemeron_queue: &mut Vec); + /// Increments the root-count of all contained `Gc`s. unsafe fn root(&self); @@ -44,6 +53,12 @@ macro_rules! unsafe_empty_trace { #[inline] unsafe fn trace(&self) {} #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + #[inline] + unsafe fn weak_trace(&self, _ephemeron_queue: &mut Vec) {} + #[inline] unsafe fn root(&self) {} #[inline] unsafe fn unroot(&self) {} @@ -61,7 +76,7 @@ macro_rules! unsafe_empty_trace { /// correct method on the argument. #[macro_export] macro_rules! custom_trace { - ($this:ident, $body:expr) => { + ($this:ident, $op:ident, $body:expr, $weak_body:expr) => { #[inline] unsafe fn trace(&self) { #[inline] @@ -72,6 +87,20 @@ macro_rules! custom_trace { $body } #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + #[inline] + unsafe fn weak_trace(&self, queue: &mut Vec) { + #[inline] + unsafe fn mark(it: &T, queue: &mut Vec) { + $crate::Trace::weak_trace(it, queue) + } + let $this = self; + let $op = queue; + $weak_body + } + #[inline] unsafe fn root(&self) { #[inline] unsafe fn mark(it: &T) { @@ -166,11 +195,20 @@ simple_empty_finalize_trace![ impl Finalize for [T; N] {} unsafe impl Trace for [T; N] { - custom_trace!(this, { - for v in this { - mark(v); + custom_trace!( + this, + queue, + { + for v in this { + mark(v); + } + }, + { + for v in this { + mark(v, queue); + } } - }); + ); } macro_rules! fn_finalize_trace_one { @@ -201,12 +239,18 @@ macro_rules! tuple_finalize_trace { ($($args:ident),*) => { impl<$($args),*> Finalize for ($($args,)*) {} unsafe impl<$($args: $crate::Trace),*> Trace for ($($args,)*) { - custom_trace!(this, { + custom_trace!(this, queue, { #[allow(non_snake_case, unused_unsafe)] fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*)) { unsafe { $(mark($args);)* } } avoid_lints(this) + }, { + #[allow(non_snake_case, unused_unsafe)] + fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*), queue: &mut Vec) { + unsafe { $(mark($args, queue);)* } + } + avoid_lints(this, queue) }); } } @@ -239,118 +283,231 @@ type_arg_tuple_based_finalize_trace_impls![ impl Finalize for Rc {} unsafe impl Trace for Rc { - custom_trace!(this, { - mark(&**this); - }); + custom_trace!( + this, + queue, + { + mark(&**this); + }, + mark(&**this, queue) + ); } impl Finalize for Rc<[T]> {} unsafe impl Trace for Rc<[T]> { - custom_trace!(this, { - for e in this.iter() { - mark(e); + custom_trace!( + this, + queue, + { + for e in this.iter() { + mark(e); + } + }, + { + for e in this.iter() { + mark(e, queue); + } } - }); + ); } impl Finalize for Box {} unsafe impl Trace for Box { - custom_trace!(this, { - mark(&**this); - }); + custom_trace!( + this, + queue, + { + mark(&**this); + }, + mark(&**this, queue) + ); } impl Finalize for Box<[T]> {} unsafe impl Trace for Box<[T]> { - custom_trace!(this, { - for e in this.iter() { - mark(e); + custom_trace!( + this, + queue, + { + for e in this.iter() { + mark(e); + } + }, + { + for e in this.iter() { + mark(e, queue); + } } - }); + ); } impl Finalize for Vec {} unsafe impl Trace for Vec { - custom_trace!(this, { - for e in this { - mark(e); + custom_trace!( + this, + queue, + { + for e in this { + mark(e); + } + }, + { + for e in this { + mark(e, queue); + } } - }); + ); } impl Finalize for Option {} unsafe impl Trace for Option { - custom_trace!(this, { - if let Some(ref v) = *this { - mark(v); + custom_trace!( + this, + queue, + { + if let Some(ref v) = *this { + mark(v); + } + }, + { + if let Some(ref v) = *this { + mark(v, queue) + } } - }); + ); } impl Finalize for Result {} unsafe impl Trace for Result { - custom_trace!(this, { - match *this { - Ok(ref v) => mark(v), - Err(ref v) => mark(v), + custom_trace!( + this, + queue, + { + match *this { + Ok(ref v) => mark(v), + Err(ref v) => mark(v), + } + }, + { + let marked = match *this { + Ok(ref v) => mark(v, queue), + Err(ref v) => mark(v, queue), + }; + marked } - }); + ); } impl Finalize for BinaryHeap {} unsafe impl Trace for BinaryHeap { - custom_trace!(this, { - for v in this.iter() { - mark(v); + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for e in this.iter() { + mark(e, queue); + } } - }); + ); } impl Finalize for BTreeMap {} unsafe impl Trace for BTreeMap { - custom_trace!(this, { - for (k, v) in this { - mark(k); - mark(v); + custom_trace!( + this, + queue, + { + for (k, v) in this { + mark(k); + mark(v); + } + }, + { + for (k, v) in this { + mark(k, queue); + mark(v, queue); + } } - }); + ); } impl Finalize for BTreeSet {} unsafe impl Trace for BTreeSet { - custom_trace!(this, { - for v in this { - mark(v); + custom_trace!( + this, + queue, + { + for v in this { + mark(v); + } + }, + { + for v in this { + mark(v, queue); + } } - }); + ); } impl Finalize for HashMap {} unsafe impl Trace for HashMap { - custom_trace!(this, { - for (k, v) in this.iter() { - mark(k); - mark(v); + custom_trace!( + this, + queue, + { + for (k, v) in this.iter() { + mark(k); + mark(v); + } + }, + { + for (k, v) in this.iter() { + mark(k, queue); + mark(v, queue); + } } - }); + ); } impl Finalize for HashSet {} unsafe impl Trace for HashSet { - custom_trace!(this, { - for v in this.iter() { - mark(v); + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for v in this.iter() { + mark(v, queue); + } } - }); + ); } impl Finalize for LinkedList {} unsafe impl Trace for LinkedList { - custom_trace!(this, { - for v in this.iter() { - mark(v); + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for v in this.iter() { + mark(v, queue); + } } - }); + ); } impl Finalize for PhantomData {} @@ -360,11 +517,20 @@ unsafe impl Trace for PhantomData { impl Finalize for VecDeque {} unsafe impl Trace for VecDeque { - custom_trace!(this, { - for v in this.iter() { - mark(v); + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for v in this.iter() { + mark(v, queue); + } } - }); + ); } impl<'a, T: ToOwned + Trace + ?Sized> Finalize for Cow<'a, T> {} @@ -372,9 +538,18 @@ unsafe impl<'a, T: ToOwned + Trace + ?Sized> Trace for Cow<'a, T> where T::Owned: Trace, { - custom_trace!(this, { - if let Cow::Owned(ref v) = this { - mark(v); + custom_trace!( + this, + queue, + { + if let Cow::Owned(ref v) = this { + mark(v); + } + }, + { + if let Cow::Owned(ref v) = this { + mark(v, queue) + } } - }); + ); } diff --git a/gc/src/weak/ephemeron.rs b/gc/src/weak/ephemeron.rs new file mode 100644 index 0000000..8033d81 --- /dev/null +++ b/gc/src/weak/ephemeron.rs @@ -0,0 +1,236 @@ +//! This module will implement the internal types GcBox and Ephemeron +use crate::gc::{finalizer_safe, GcBox, GcBoxType}; +use crate::trace::Trace; +use crate::{clear_root_bit, set_data_ptr, Finalize, GcPointer}; +use std::cell::Cell; +use std::mem; +use std::ptr::NonNull; + +/// Implementation of an Ephemeron structure +/// +/// An Ephemeron can be either a WeakPair (Ephemeron) or a WeakBox (Ephemeron) +/// +/// +/// # Tracing with Ephemerons +/// +/// Tracing with ephemerons requires a 3 phase approach: +/// - Phase One: Trace everything up to an ephemeron (queue found ephemerons) +/// - Phase Two: Trace keys of queued ephemerons. If reachable, +/// +/// [Reference]: https://docs.racket-lang.org/reference/ephemerons.html#%28tech._ephemeron%29 +pub struct Ephemeron { + key: Cell>>, + value: Cell>>>, +} + +impl Ephemeron { + pub(crate) fn new_weak(value: K) -> Ephemeron { + assert!(mem::align_of::>() > 1); + + unsafe { + let ptr = GcBox::new(value, GcBoxType::Weak); + + let ephem = Ephemeron { + key: Cell::new(NonNull::new_unchecked(ptr.as_ptr())), + value: Cell::new(None) + }; + ephem.set_root(); + ephem + } + } + + pub(crate) fn new_weak_pair(key: K, value: Option) -> Ephemeron { + assert!(mem::align_of::>() > 1); + + unsafe { + let key_ptr = GcBox::new(key, GcBoxType::Weak); + let value = if let Some(v) = value { + let new_gc_box = GcBox::new(v, GcBoxType::Weak); + Cell::new(Some(NonNull::new_unchecked(new_gc_box.as_ptr()))) + } else { + Cell::new(None) + }; + + let ephem = Ephemeron { + key: Cell::new(NonNull::new_unchecked(key_ptr.as_ptr())), + value, + }; + ephem.set_root(); + ephem + } + } + + #[inline] + pub fn set_value(&self, value: V) { + unsafe { + let new_value = GcBox::new(value, GcBoxType::Weak); + self.value.set(Some(NonNull::new_unchecked(new_value.as_ptr()))); + } + } + +} + +impl Ephemeron { + + #[inline] + pub(crate) fn new_pair_from_gc_pointers( + key: NonNull>, + value: Option>>, + ) -> Ephemeron { + unsafe { + let value = if let Some(v) = value { + Cell::new(Some(NonNull::new_unchecked(v.as_ptr()))) + } else { + Cell::new(None) + }; + + let ephem = Ephemeron { + key: Cell::new(NonNull::new_unchecked(key.as_ptr())), + value, + }; + ephem.set_root(); + ephem + } + } + + #[inline] + pub(crate) fn weak_from_gc_box(value: NonNull>) -> Ephemeron { + unsafe { + let ephem = Ephemeron { + key: Cell::new(NonNull::new_unchecked(value.as_ptr())), + value: Cell::new(None), + }; + ephem.set_root(); + ephem + } + } + + fn rooted(&self) -> bool { + self.key.get().as_ptr() as *mut u8 as usize & 1 != 0 + } + + unsafe fn set_root(&self) { + let ptr = self.key.get().as_ptr(); + let data = ptr as *mut u8; + let addr = data as isize; + let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr)); + self.key.set(NonNull::new_unchecked(ptr)); + } + + unsafe fn clear_root(&self) { + self.key.set(clear_root_bit(self.key.get())); + } + + #[inline] + pub(crate) fn is_marked(&self) -> bool { + self.inner_key().is_marked() + } + + #[inline] + fn inner_key_ptr(&self) -> *mut GcBox { + assert!(finalizer_safe()); + unsafe { clear_root_bit(self.key.get()).as_ptr() } + } + + #[inline] + fn inner_value_ptr(&self) -> Option<*mut GcBox> { + assert!(finalizer_safe()); + + if let Some(gc_box) = self.value.get() { + let val = gc_box.as_ptr(); + Some(val) + } else { + None + } + } + + #[inline] + fn inner_key(&self) -> &GcBox { + unsafe { &*self.inner_key_ptr() } + } + + #[inline] + fn inner_value(&self) -> Option<&GcBox> { + unsafe { + if let Some(inner_value) = self.inner_value_ptr() { + Some(&*inner_value) + } else { + None + } + } + } + + #[inline] + pub fn key_value(&self) -> &K { + self.inner_key().value() + } + + #[inline] + pub fn value(&self) -> Option<&V> { + if let Some(gcbox) = self.inner_value() { + Some(gcbox.value()) + } else { + None + } + } + + #[inline] + unsafe fn weak_trace_key(&self, queue: &mut Vec) { + self.inner_key().weak_trace_inner(queue) + } + + #[inline] + unsafe fn weak_trace_value(&self, queue: &mut Vec) { + if let Some(gcbox) = self.inner_value() { + gcbox.weak_trace_inner(queue); + } + } +} + +impl Finalize for Ephemeron { + #[inline] + fn finalize(&self) { + self.value.set(None) + } +} + +unsafe impl Trace for Ephemeron { + #[inline] + unsafe fn trace(&self) { + /* An ephemeron is never traced with Phase One Trace */ + /* May be traced in phase 3, so this still may need to be implemented */ + } + + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + self.is_marked() + } + + #[inline] + unsafe fn weak_trace(&self, queue: &mut Vec) { + if self.is_marked() { + self.weak_trace_key(queue); + self.weak_trace_value(queue); + } + } + + #[inline] + unsafe fn root(&self) { + // An ephemeron is never rooted in the GcBoxHeader + assert!(!self.rooted(), "Can't double-root an Ephemeron"); + + self.set_root() + } + + #[inline] + unsafe fn unroot(&self) { + // An ephemeron is never rotted in the GcBoxHeader + assert!(self.rooted(), "Can't double-unroot an Ephemeron"); + self.clear_root(); + } + + #[inline] + fn finalize_glue(&self) { + Finalize::finalize(self) + } +} diff --git a/gc/src/weak/mod.rs b/gc/src/weak/mod.rs new file mode 100644 index 0000000..c5f4cde --- /dev/null +++ b/gc/src/weak/mod.rs @@ -0,0 +1,23 @@ +use crate::gc::GcBox; +pub use crate::gc::{finalizer_safe, force_collect}; +use crate::set_data_ptr; +pub use crate::trace::{Finalize, Trace}; +use std::ptr::NonNull; + +pub(crate) mod ephemeron; +pub mod pair; +pub mod weak_gc; + +pub(crate) use ephemeron::Ephemeron; +pub use pair::WeakPair; +pub use weak_gc::WeakGc; + +pub(crate) unsafe fn clear_root_bit( + ptr: NonNull>>, +) -> NonNull>> { + let ptr = ptr.as_ptr(); + let data = ptr as *mut u8; + let addr = data as isize; + let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr)); + NonNull::new_unchecked(ptr) +} diff --git a/gc/src/weak/pair.rs b/gc/src/weak/pair.rs new file mode 100644 index 0000000..075b401 --- /dev/null +++ b/gc/src/weak/pair.rs @@ -0,0 +1,226 @@ +pub use crate::gc::{finalizer_safe, force_collect}; +use crate::gc::{GcBox, GcBoxType}; +pub use crate::trace::{Finalize, Trace}; +use crate::weak::{clear_root_bit, Ephemeron}; +use crate::{set_data_ptr, GcPointer}; +use std::cell::Cell; +use std::fmt; +use std::mem; +use std::ops::Deref; +use std::ptr::NonNull; + +////////////// +// WeakPair // +////////////// + +// The WeakPair struct is a garbage collected pointer to an Ephemeron +pub struct WeakPair { + ptr_root: Cell>>>, +} + +impl WeakPair { + /// Crate a new Weak type Gc + /// + /// This method can trigger a collection + pub fn new(key: K, value: Option) -> Self { + assert!(mem::align_of::>() > 1); + + unsafe { + // Allocate the memory for the object + let eph_value = Ephemeron::new_weak_pair(key, value); + let ptr = GcBox::new(eph_value, GcBoxType::Ephemeron); + + (*ptr.as_ptr()).value().unroot(); + let weak_gc = WeakPair { + ptr_root: Cell::new(NonNull::new_unchecked(ptr.as_ptr())), + }; + weak_gc.set_root(); + weak_gc + } + } + + #[inline] + pub fn set_value(&self, value: V) { + self.inner().value().set_value(value) + } +} + +impl WeakPair { + fn rooted(&self) -> bool { + self.ptr_root.get().as_ptr() as *mut u8 as usize & 1 != 0 + } + + unsafe fn set_root(&self) { + let ptr = self.ptr_root.get().as_ptr(); + let data = ptr as *mut u8; + let addr = data as isize; + let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr)); + self.ptr_root.set(NonNull::new_unchecked(ptr)); + } + + unsafe fn clear_root(&self) { + self.ptr_root.set(clear_root_bit(self.ptr_root.get())); + } + + #[inline] + fn inner_ptr(&self) -> *mut GcBox> { + // If we are currently in the dropping phase of garbage collection, + // it would be undefined behavior to dereference this pointer. + // By opting into `Trace` you agree to not dereference this pointer + // within your drop method, meaning that it should be safe. + // + // This assert exists just in case. + assert!(finalizer_safe()); + + unsafe { clear_root_bit(self.ptr_root.get()).as_ptr() } + } + + #[inline] + fn inner(&self) -> &GcBox> { + unsafe { &*self.inner_ptr() } + } +} + +impl WeakPair { + #[inline] + pub fn key_value(&self) -> &K { + self.inner().value().key_value() + } + + #[inline] + pub fn value(&self) -> Option<&V> { + self.inner().value().value() + } + + #[inline] + pub fn value_tuple(&self) -> (&K, Option<&V>) { + (self.key_value(), self.value()) + } + + #[inline] + pub fn from_gc_pair(key: NonNull>, value: Option>>) -> Self { + unsafe { + let eph = Ephemeron::new_pair_from_gc_pointers(key, value); + let ptr = GcBox::new(eph, GcBoxType::Ephemeron); + + let weak_gc = WeakPair { + ptr_root: Cell::new(NonNull::new_unchecked(ptr.as_ptr())), + }; + weak_gc.set_root(); + weak_gc + } + } +} + +impl WeakPair { + #[inline] + pub(crate) fn from_gc_value_pair(key:NonNull>, value: Option) -> Self { + unsafe { + let value_ptr = if let Some(v) = value { + let gcbox = GcBox::new(v, GcBoxType::Weak); + Some(NonNull::new_unchecked(gcbox.as_ptr())) + } else { + None + }; + + let eph = Ephemeron::new_pair_from_gc_pointers(key, value_ptr); + let ptr = GcBox::new(eph, GcBoxType::Ephemeron); + + let weak_pair = WeakPair { + ptr_root: Cell::new(NonNull::new_unchecked(ptr.as_ptr())), + }; + weak_pair.set_root(); + weak_pair + } + } +} + +impl Finalize for WeakPair {} + +unsafe impl Trace for WeakPair { + #[inline] + unsafe fn trace(&self) { + // Set the strong reference here to false in the case that a trace has run and no + // strong refs exist. + self.inner().trace_inner(); + } + + unsafe fn is_marked_ephemeron(&self) -> bool { + // This is technically an Ephemeron wrapper. + // Returning false to ensure that only an Ephemeron returns true + false + } + + unsafe fn weak_trace(&self, queue: &mut Vec) { + // WeakPair is an Ephemeron wrapper, so we know the inner GcBox must contain an + // an Ephemeron. So we push the Ephemeron onto the Ephemeron queue to be checked + // by the collector + queue.push(self.ptr_root.get()) + } + + #[inline] + unsafe fn root(&self) { + assert!(!self.rooted(), "Can't double-root a WeakPair"); + self.set_root(); + } + + #[inline] + unsafe fn unroot(&self) { + assert!(self.rooted(), "Can't double-unroot a WeakPair"); + self.clear_root(); + } + + #[inline] + fn finalize_glue(&self) { + Finalize::finalize(self) + } +} + +impl Clone for WeakPair { + #[inline] + fn clone(&self) -> Self { + unsafe { + let weak_gc = WeakPair { + ptr_root: Cell::new(self.ptr_root.get()), + }; + weak_gc.set_root(); + weak_gc + } + } +} + +impl Deref for WeakPair { + type Target = K; + + #[inline] + fn deref(&self) -> &K { + &self.inner().value().key_value() + } +} + +impl Default for WeakPair { + #[inline] + fn default() -> Self { + Self::new(Default::default(), Default::default()) + } +} + +impl fmt::Pointer for WeakPair { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.inner(), f) + } +} + +// TODO: implement FROM trait for WeakPair + +impl std::borrow::Borrow for WeakPair { + fn borrow(&self) -> &K { + &**self + } +} + +impl std::convert::AsRef for WeakPair { + fn as_ref(&self) -> &K { + &**self + } +} diff --git a/gc/src/weak/weak_gc.rs b/gc/src/weak/weak_gc.rs new file mode 100644 index 0000000..6aa2e82 --- /dev/null +++ b/gc/src/weak/weak_gc.rs @@ -0,0 +1,258 @@ +pub use crate::gc::{finalizer_safe, force_collect}; +use crate::gc::{GcBox, GcBoxType}; +pub use crate::trace::{Finalize, Trace}; +use crate::weak::{clear_root_bit, Ephemeron}; +use crate::{set_data_ptr, GcPointer}; +use std::cell::Cell; +use std::cmp::Ordering; +use std::fmt::{self, Debug, Display}; +use std::hash::{Hash, Hasher}; +use std::mem; +use std::ops::Deref; +use std::ptr::NonNull; + +//////////// +// WeakGc // +//////////// + +/// A weak Garbage Collected pointer for an immutable value +/// +/// This implementation uses an Ephemeron as a generalized weak +/// box to trace and sweep the inner values. +pub struct WeakGc { + ptr_root: Cell>>>, +} + +impl WeakGc { + /// Crate a new Weak type Gc + /// + /// This method can trigger a collection + pub fn new(value: T) -> Self { + assert!(mem::align_of::>() > 1); + + unsafe { + // Allocate the memory for the object + let eph_value = Ephemeron::::new_weak(value); + let ptr = GcBox::new(eph_value, GcBoxType::Ephemeron); + + (*ptr.as_ptr()).value().unroot(); + let weak_gc = WeakGc { + ptr_root: Cell::new(NonNull::new_unchecked(ptr.as_ptr())), + }; + weak_gc.set_root(); + weak_gc + } + } +} + +impl WeakGc { + fn rooted(&self) -> bool { + self.ptr_root.get().as_ptr() as *mut u8 as usize & 1 != 0 + } + + unsafe fn set_root(&self) { + let ptr = self.ptr_root.get().as_ptr(); + let data = ptr as *mut u8; + let addr = data as isize; + let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr)); + self.ptr_root.set(NonNull::new_unchecked(ptr)); + } + + unsafe fn clear_root(&self) { + self.ptr_root.set(clear_root_bit(self.ptr_root.get())); + } + + #[inline] + fn inner_ptr(&self) -> *mut GcBox> { + // If we are currently in the dropping phase of garbage collection, + // it would be undefined behavior to dereference this pointer. + // By opting into `Trace` you agree to not dereference this pointer + // within your drop method, meaning that it should be safe. + // + // This assert exists just in case. + assert!(finalizer_safe()); + + unsafe { clear_root_bit(self.ptr_root.get()).as_ptr() } + } + + #[inline] + fn inner(&self) -> &GcBox> { + unsafe { &*self.inner_ptr() } + } +} + +impl WeakGc { + #[inline] + pub fn value(&self) -> &T { + self.inner().value().key_value() + } + + #[inline] + pub(crate) fn from_gc_box(gc_box: NonNull>) -> Self { + unsafe { + let eph = Ephemeron::::weak_from_gc_box(gc_box); + let ptr = GcBox::new(eph, GcBoxType::Ephemeron); + + let weak_gc = WeakGc { + ptr_root: Cell::new(NonNull::new_unchecked(ptr.as_ptr())), + }; + weak_gc.set_root(); + weak_gc + } + } +} + +impl Finalize for WeakGc {} + +unsafe impl Trace for WeakGc { + #[inline] + unsafe fn trace(&self) { + // Set the strong reference here to false in the case that a trace has run and no + // strong refs exist. + self.inner().trace_inner(); + } + + unsafe fn is_marked_ephemeron(&self) -> bool { + // This is technically an Ephemeron wrapper. + // Returning false to ensure that only an Ephemeron returns true + false + } + + unsafe fn weak_trace(&self, queue: &mut Vec) { + // WeakGc is an Ephemeron wrapper, so we know the inner GcBox must contain an + // an Ephemeron. So we push the Ephemeron onto the Ephemeron queue to be checked + // by the collector + queue.push(self.ptr_root.get()) + } + + #[inline] + unsafe fn root(&self) { + assert!(!self.rooted(), "Can't double-root a WeakGc"); + self.set_root(); + } + + #[inline] + unsafe fn unroot(&self) { + assert!(self.rooted(), "Can't double-unroot a WeakGc"); + self.clear_root(); + } + + #[inline] + fn finalize_glue(&self) { + Finalize::finalize(self) + } +} + +impl Clone for WeakGc { + #[inline] + fn clone(&self) -> Self { + unsafe { + let weak_gc = WeakGc { + ptr_root: Cell::new(self.ptr_root.get()), + }; + weak_gc.set_root(); + weak_gc + } + } +} + +impl Deref for WeakGc { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + &self.inner().value().key_value() + } +} + +impl Default for WeakGc { + #[inline] + fn default() -> Self { + Self::new(Default::default()) + } +} + +impl PartialEq for WeakGc { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { + **self == **other + } +} + +impl Eq for WeakGc {} + +impl PartialOrd for WeakGc { + #[inline(always)] + fn partial_cmp(&self, other: &Self) -> Option { + (**self).partial_cmp(&**other) + } + + #[inline(always)] + fn lt(&self, other: &Self) -> bool { + **self < **other + } + + #[inline(always)] + fn le(&self, other: &Self) -> bool { + **self <= **other + } + + #[inline(always)] + fn gt(&self, other: &Self) -> bool { + **self > **other + } + + #[inline(always)] + fn ge(&self, other: &Self) -> bool { + **self >= **other + } +} + +impl Ord for WeakGc { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + (**self).cmp(&**other) + } +} + +impl Hash for WeakGc { + fn hash(&self, state: &mut H) { + (**self).hash(state); + } +} + +impl Display for WeakGc { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + +impl Debug for WeakGc { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl fmt::Pointer for WeakGc { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.inner(), f) + } +} + +impl From for WeakGc { + fn from(t: T) -> Self { + Self::new(t) + } +} + +impl std::borrow::Borrow for WeakGc { + fn borrow(&self) -> &T { + &**self + } +} + +impl std::convert::AsRef for WeakGc { + fn as_ref(&self) -> &T { + &**self + } +} diff --git a/gc/tests/derive_bounds.rs b/gc/tests/derive_bounds.rs index 9552545..c35042b 100644 --- a/gc/tests/derive_bounds.rs +++ b/gc/tests/derive_bounds.rs @@ -1,5 +1,5 @@ +use gc::{Gc, GcPointer}; use gc_derive::{Finalize, Trace}; -use gc::Gc; // This impl should *not* require T: Trace. #[derive(Finalize, Trace)] diff --git a/gc/tests/finalize.rs b/gc/tests/finalize.rs index 5b668a6..f90d6af 100644 --- a/gc/tests/finalize.rs +++ b/gc/tests/finalize.rs @@ -1,4 +1,4 @@ -use gc::{Finalize, Trace}; +use gc::{Finalize, GcPointer, Trace}; use gc_derive::{Finalize, Trace}; use std::cell::Cell; diff --git a/gc/tests/gc_semantics.rs b/gc/tests/gc_semantics.rs index 83fcb90..3d734ee 100644 --- a/gc/tests/gc_semantics.rs +++ b/gc/tests/gc_semantics.rs @@ -1,4 +1,4 @@ -use gc::{force_collect, Finalize, Gc, GcCell, Trace}; +use gc::{force_collect, Finalize, Gc, GcCell, GcPointer, Trace}; use gc_derive::{Finalize, Trace}; use std::cell::Cell; use std::thread::LocalKey; @@ -7,6 +7,7 @@ use std::thread::LocalKey; #[derive(PartialEq, Eq, Debug, Clone, Copy)] struct GcWatchFlags { trace: i32, + weak_trace: i32, root: i32, unroot: i32, drop: i32, @@ -14,9 +15,17 @@ struct GcWatchFlags { } impl GcWatchFlags { - fn new(trace: i32, root: i32, unroot: i32, drop: i32, finalize: i32) -> GcWatchFlags { + fn new( + trace: i32, + weak_trace: i32, + root: i32, + unroot: i32, + drop: i32, + finalize: i32, + ) -> GcWatchFlags { GcWatchFlags { trace, + weak_trace, root, unroot, drop, @@ -27,6 +36,7 @@ impl GcWatchFlags { fn zero() -> Cell { Cell::new(GcWatchFlags { trace: 0, + weak_trace: 0, root: 0, unroot: 0, drop: 0, @@ -65,6 +75,16 @@ unsafe impl Trace for GcWatch { f.set(of); }); } + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + unsafe fn weak_trace(&self, _queue: &mut Vec) { + self.0.with(|f| { + let mut of = f.get(); + of.weak_trace += 1; + f.set(of) + }); + } unsafe fn root(&self) { self.0.with(|f| { let mut of = f.get(); @@ -98,14 +118,14 @@ fn basic_allocate() { { let _gced_val = Gc::new(GcWatch(&FLAGS)); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 1, 0, 0))); force_collect(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); } - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); force_collect(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 1, 1))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 1, 1))); } #[test] @@ -119,49 +139,49 @@ fn basic_cycle_allocate() { watch: GcWatch(&FLAGS1), cycle: GcCell::new(None), }); - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 1, 0, 0))); let node2 = Gc::new(GcWatchCycle { watch: GcWatch(&FLAGS2), cycle: GcCell::new(Some(node1.clone())), }); - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0, 0))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 1, 0, 0))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 1, 0, 0))); force_collect(); - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); // Move node2 into the cycleref { *node1.cycle.borrow_mut() = Some(node2); - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); force_collect(); - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0, 0))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 0, 1, 0, 0))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 0, 1, 0, 0))); } - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0, 0))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 0, 1, 0, 0))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 0, 1, 0, 0))); force_collect(); - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0, 0))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 0, 1, 0, 0))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 0, 1, 0, 0))); } - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0, 0))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0, 0))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 0, 1, 0, 0))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 0, 1, 0, 0))); force_collect(); - FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 1, 1))); - FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 1, 1))); + FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 0, 1, 1, 1))); + FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 0, 1, 1, 1))); } #[test] @@ -171,74 +191,74 @@ fn gccell_rooting() { { let cell = GcCell::new(GcWatch(&FLAGS)); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0, 0))); { // Borrow it let _borrowed = cell.borrow(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0, 0))); // Shared borrows can happen multiple times in one scope let _borrowed2 = cell.borrow(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0, 0))); } - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0, 0))); { // Borrow it mutably now let _borrowed = cell.borrow_mut(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0, 0))); } - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 0, 0, 0))); // Put it in a gc (should unroot the GcWatch) let gc_wrapper = Gc::new(cell); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 0, 1, 0, 0))); // It should be traced by the GC force_collect(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); { // Borrow it let _borrowed = gc_wrapper.borrow(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); // Shared borrows can happen multiple times in one scope let _borrowed2 = gc_wrapper.borrow(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 0, 1, 0, 0))); // It should be traced by the GC force_collect(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 0, 1, 0, 0))); } - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 0, 1, 0, 0))); { // Borrow it mutably now - this should root the GcWatch let _borrowed = gc_wrapper.borrow_mut(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 1, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 1, 0, 0))); // It shouldn't be traced by the GC (as it's owned by the GcCell) // If it had rootable members, they would be traced by the GC force_collect(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 1, 1, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 1, 0, 0))); } // Dropping the borrow should unroot it again - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 1, 2, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 2, 0, 0))); // It should be traced by the GC force_collect(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 1, 2, 0, 0))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 2, 0, 0))); } // It should be collected by the GC force_collect(); - FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 1, 2, 1, 1))); + FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 2, 1, 1))); } #[cfg(feature = "nightly")] diff --git a/gc/tests/gymnastics_cycle.rs b/gc/tests/gymnastics_cycle.rs index 54e7618..8a4cc6a 100644 --- a/gc/tests/gymnastics_cycle.rs +++ b/gc/tests/gymnastics_cycle.rs @@ -1,4 +1,4 @@ -use gc::{force_collect, Gc, GcCell}; +use gc::{force_collect, Gc, GcCell, GcPointer}; use gc_derive::Trace; use std::cell::Cell; diff --git a/gc/tests/resurrections.rs b/gc/tests/resurrections.rs new file mode 100644 index 0000000..8d2a095 --- /dev/null +++ b/gc/tests/resurrections.rs @@ -0,0 +1,37 @@ +use gc::{force_collect, Finalize, Gc, GcCell, GcPointer}; +use gc_derive::{Finalize, Trace}; + +#[derive(Finalize, Trace)] +struct Foo { + bar: GcCell>>, +} + +#[derive(Trace)] +struct Bar { + string: String, + foo: Gc, + this: GcCell>>, +} + +impl Finalize for Bar { + fn finalize(&self) { + println!("Bar's finalizer has run :)"); + *self.foo.bar.borrow_mut() = self.this.borrow().clone(); + } +} + +#[test] +fn resurrection_by_finalizer() { + let foo = Gc::new(Foo { + bar: GcCell::new(None), + }); + let bar = Gc::new(Bar { + string: "Hello, world!".to_string(), + foo: foo.clone(), + this: GcCell::new(None), + }); + *bar.this.borrow_mut() = Some(bar.clone()); + drop(bar); + force_collect(); + assert_eq!(foo.bar.borrow().as_ref().unwrap().string, "Hello, world!"); +} diff --git a/gc/tests/trace_impl.rs b/gc/tests/trace_impl.rs index c59783f..39943bc 100644 --- a/gc/tests/trace_impl.rs +++ b/gc/tests/trace_impl.rs @@ -4,7 +4,7 @@ use std::rc::Rc; thread_local!(static X: RefCell = RefCell::new(0)); -use gc::Trace; +use gc::{GcPointer, Trace}; #[derive(Copy, Clone, Finalize)] struct Foo; @@ -16,6 +16,10 @@ unsafe impl Trace for Foo { *m += 1; }) } + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + unsafe fn weak_trace(&self, _queue: &mut Vec) {} unsafe fn root(&self) {} unsafe fn unroot(&self) {} fn finalize_glue(&self) {} diff --git a/gc/tests/unsized.rs b/gc/tests/unsized.rs index 329aa40..3c00706 100644 --- a/gc/tests/unsized.rs +++ b/gc/tests/unsized.rs @@ -1,4 +1,4 @@ -use gc::{Gc, Trace}; +use gc::{Gc, GcPointer, Trace}; use gc_derive::{Finalize, Trace}; trait Foo: Trace {} diff --git a/gc/tests/weak_gc.rs b/gc/tests/weak_gc.rs new file mode 100644 index 0000000..c2c2edb --- /dev/null +++ b/gc/tests/weak_gc.rs @@ -0,0 +1,26 @@ +use gc::{Gc, GcCell, WeakGc}; + +#[test] +fn weak_gc_try_deref_some_value() { + let weak = WeakGc::new(GcCell::new(1)); + assert_eq!(weak.value(), &(GcCell::new(1))); +} + +#[test] +fn weak_gc_from_existing() { + let gc = Gc::new(GcCell::new(1)); + let weak_gc = gc.clone_weak_gc(); + assert_eq!(weak_gc.value(), &(GcCell::new(1))); +} + +#[test] +fn weak_gc_different_copies() { + let gc = Gc::new(GcCell::new(1)); + let weak_gc1 = gc.clone_weak_gc(); + let _weak_gc2 = weak_gc1.clone(); + + { + let _weak_gc3 = WeakGc::new(GcCell::new(2)); + gc::force_collect(); + } +} \ No newline at end of file diff --git a/gc_derive/Cargo.toml b/gc_derive/Cargo.toml index 7834db3..3b8f73f 100644 --- a/gc_derive/Cargo.toml +++ b/gc_derive/Cargo.toml @@ -18,4 +18,4 @@ proc-macro = true syn = "1.0" proc-macro2 = "1.0" quote = "1.0" -synstructure = "0.12" +synstructure = "0.12" \ No newline at end of file diff --git a/gc_derive/src/lib.rs b/gc_derive/src/lib.rs index bedc924..23b16ce 100644 --- a/gc_derive/src/lib.rs +++ b/gc_derive/src/lib.rs @@ -11,6 +11,7 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { .any(|attr| attr.path.is_ident("unsafe_ignore_trace")) }); let trace_body = s.each(|bi| quote!(mark(#bi))); + let weak_trace_body = s.each(|bi| quote!(mark(#bi, queue))); s.add_bounds(AddBounds::Fields); let trace_impl = s.unsafe_bound_impl( @@ -24,6 +25,17 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { } match *self { #trace_body } } + #[inline] unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + #[inline] unsafe fn weak_trace(&self, queue: &mut Vec) { + #[allow(dead_code, unreachable_code)] + #[inline] + unsafe fn mark(it: &T, queue: &mut Vec<::gc::GcPointer>) { + ::gc::Trace::weak_trace(it, queue) + } + match *self { #weak_trace_body } + } #[inline] unsafe fn root(&self) { #[allow(dead_code)] #[inline]