diff --git a/gc/src/gc.rs b/gc/src/gc.rs index 3e1f8f9..2e1a66f 100644 --- a/gc/src/gc.rs +++ b/gc/src/gc.rs @@ -52,7 +52,8 @@ thread_local!(static GC_STATE: RefCell = RefCell::new(GcState { })); const MARK_MASK: usize = 1 << (usize::BITS - 1); -const ROOTS_MASK: usize = !MARK_MASK; +const FINALIZED_MASK: usize = 1 << (usize::BITS - 2); +const ROOTS_MASK: usize = !(MARK_MASK | FINALIZED_MASK); const ROOTS_MAX: usize = ROOTS_MASK; // max allowed value of roots pub(crate) struct GcBoxHeader { @@ -81,7 +82,7 @@ impl GcBoxHeader { // abort if the count overflows to prevent `mem::forget` loops // that could otherwise lead to erroneous drops if (roots & ROOTS_MASK) < ROOTS_MAX { - self.roots.set(roots + 1); // we checked that this wont affect the high bit + self.roots.set(roots + 1); // we checked that this wont affect the high bits } else { panic!("roots counter overflow"); } @@ -89,7 +90,7 @@ impl GcBoxHeader { #[inline] pub fn dec_roots(&self) { - self.roots.set(self.roots.get() - 1); // no underflow check + self.roots.set(self.roots.get() - 1); // no underflow check, always nonzero number of roots } #[inline] @@ -97,6 +98,16 @@ impl GcBoxHeader { self.roots.get() & MARK_MASK != 0 } + #[inline] + pub fn is_finalized(&self) -> bool { + self.roots.get() & FINALIZED_MASK != 0 + } + + #[inline] + pub fn set_finalized(&self) { + self.roots.set(self.roots.get() | FINALIZED_MASK) + } + #[inline] pub fn mark(&self) { self.roots.set(self.roots.get() | MARK_MASK); @@ -298,9 +309,13 @@ fn collect_garbage(st: &mut GcState) { unsafe fn sweep(finalized: Vec>, bytes_allocated: &mut usize) { let _guard = DropGuard::new(); for node in finalized.into_iter().rev() { - if node.this.as_ref().header.is_marked() { - continue; - } + // sanity check. If this trips we have violated an unsafe invarant. + // This won't catch all UB, just direct reclamation of roots!! + assert_eq!( + node.this.as_ref().header.roots(), + 0, + "Reclaimed node should not be rooted" + ); let incoming = node.incoming; let node = Box::from_raw(node.this.as_ptr()); *bytes_allocated -= mem::size_of_val::>(&*node); @@ -316,10 +331,16 @@ fn collect_garbage(st: &mut GcState) { if unmarked.is_empty() { return; } - for node in &unmarked { - Trace::finalize_glue(&node.this.as_ref().data); + // finalize unmarked nodes + for node in unmarked { + if !node.this.as_ref().header.is_finalized() { + Trace::finalize_glue(&node.this.as_ref().data); + node.this.as_ref().header.set_finalized(); + } } - mark(head); + // rerun mark phase and reclaim unmarked finalized nodes + let mut unmarked = mark(head); + unmarked.retain(|node| node.this.as_ref().header.is_finalized()); sweep(unmarked, &mut st.stats.bytes_allocated); } } diff --git a/gc/tests/finalize.rs b/gc/tests/finalize.rs index f717f84..c5ac451 100644 --- a/gc/tests/finalize.rs +++ b/gc/tests/finalize.rs @@ -1,4 +1,4 @@ -use gc::{Finalize, Trace}; +use gc::{Finalize, Gc, GcCell, Trace}; use std::cell::Cell; #[derive(PartialEq, Eq, Debug, Clone, Copy)] @@ -46,3 +46,31 @@ fn drop_triggers_finalize() { } FLAGS.with(|f| assert_eq!(f.get(), Flags(1, 1))); } + +#[derive(Trace)] +struct Resurrection { + escape: Gc>>, + value: Gc, +} + +impl Finalize for Resurrection { + fn finalize(&self) { + *self.escape.borrow_mut() = self.value.clone(); + } +} + +// run this with miri to detect UB +// cargo +nightly miri test -p gc --test finalize +#[test] +fn finalizer_can_resurrect() { + let escape = Gc::new(GcCell::new(Gc::new(String::new()))); + let value = Gc::new(GcCell::new(Resurrection { + escape: escape.clone(), + value: Gc::new(String::from("Hello world")), + })); + drop(value); + + gc::force_collect(); + + assert_eq!(&**escape.borrow(), "Hello world"); +}