Browse Source

Use `Vec<T>` for keeping track of gc objects (#3493)

* Use `Vec<T>` for keeping track of gc objects

* Apply review
pull/2767/head
Haled Odat 11 months ago committed by GitHub
parent
commit
6506f6520f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      boa_gc/src/internals/ephemeron_box.rs
  2. 8
      boa_gc/src/internals/gc_box.rs
  3. 9
      boa_gc/src/internals/weak_map_box.rs
  4. 128
      boa_gc/src/lib.rs
  5. 3
      boa_gc/src/test/mod.rs
  6. 4
      boa_gc/src/test/weak.rs

2
boa_gc/src/internals/ephemeron_box.rs

@ -19,7 +19,6 @@ const NON_ROOTS_MAX: u32 = NON_ROOTS_MASK;
pub(crate) struct EphemeronBoxHeader { pub(crate) struct EphemeronBoxHeader {
ref_count: Cell<u32>, ref_count: Cell<u32>,
non_root_count: Cell<u32>, non_root_count: Cell<u32>,
pub(crate) next: Cell<Option<NonNull<dyn ErasedEphemeronBox>>>,
} }
impl EphemeronBoxHeader { impl EphemeronBoxHeader {
@ -28,7 +27,6 @@ impl EphemeronBoxHeader {
Self { Self {
ref_count: Cell::new(1), ref_count: Cell::new(1),
non_root_count: Cell::new(0), non_root_count: Cell::new(0),
next: Cell::new(None),
} }
} }

8
boa_gc/src/internals/gc_box.rs

@ -1,9 +1,5 @@
use crate::Trace; use crate::Trace;
use std::{ use std::{cell::Cell, fmt, ptr};
cell::Cell,
fmt,
ptr::{self, NonNull},
};
const MARK_MASK: u32 = 1 << (u32::BITS - 1); const MARK_MASK: u32 = 1 << (u32::BITS - 1);
const NON_ROOTS_MASK: u32 = !MARK_MASK; const NON_ROOTS_MASK: u32 = !MARK_MASK;
@ -20,7 +16,6 @@ const NON_ROOTS_MAX: u32 = NON_ROOTS_MASK;
pub(crate) struct GcBoxHeader { pub(crate) struct GcBoxHeader {
ref_count: Cell<u32>, ref_count: Cell<u32>,
non_root_count: Cell<u32>, non_root_count: Cell<u32>,
pub(crate) next: Cell<Option<NonNull<GcBox<dyn Trace>>>>,
} }
impl GcBoxHeader { impl GcBoxHeader {
@ -29,7 +24,6 @@ impl GcBoxHeader {
Self { Self {
ref_count: Cell::new(1), ref_count: Cell::new(1),
non_root_count: Cell::new(0), non_root_count: Cell::new(0),
next: Cell::new(None),
} }
} }

9
boa_gc/src/internals/weak_map_box.rs

@ -1,10 +1,8 @@
use crate::{pointers::RawWeakMap, GcRefCell, Trace, WeakGc}; use crate::{pointers::RawWeakMap, GcRefCell, Trace, WeakGc};
use std::{cell::Cell, ptr::NonNull};
/// A box that is used to track [`WeakMap`][`crate::WeakMap`]s. /// A box that is used to track [`WeakMap`][`crate::WeakMap`]s.
pub(crate) struct WeakMapBox<K: Trace + Sized + 'static, V: Trace + Sized + 'static> { pub(crate) struct WeakMapBox<K: Trace + Sized + 'static, V: Trace + Sized + 'static> {
pub(crate) map: WeakGc<GcRefCell<RawWeakMap<K, V>>>, pub(crate) map: WeakGc<GcRefCell<RawWeakMap<K, V>>>,
pub(crate) next: Cell<Option<NonNull<dyn ErasedWeakMapBox>>>,
} }
/// A trait that is used to erase the type of a [`WeakMapBox`]. /// A trait that is used to erase the type of a [`WeakMapBox`].
@ -12,9 +10,6 @@ pub(crate) trait ErasedWeakMapBox {
/// Clear dead entries from the [`WeakMapBox`]. /// Clear dead entries from the [`WeakMapBox`].
fn clear_dead_entries(&self); fn clear_dead_entries(&self);
/// A pointer to the next [`WeakMapBox`].
fn next(&self) -> &Cell<Option<NonNull<dyn ErasedWeakMapBox>>>;
/// Returns `true` if the [`WeakMapBox`] is live. /// Returns `true` if the [`WeakMapBox`] is live.
fn is_live(&self) -> bool; fn is_live(&self) -> bool;
@ -31,10 +26,6 @@ impl<K: Trace, V: Trace + Clone> ErasedWeakMapBox for WeakMapBox<K, V> {
} }
} }
fn next(&self) -> &Cell<Option<NonNull<dyn ErasedWeakMapBox>>> {
&self.next
}
fn is_live(&self) -> bool { fn is_live(&self) -> bool {
self.map.upgrade().is_some() self.map.upgrade().is_some()
} }

128
boa_gc/src/lib.rs

@ -46,9 +46,9 @@ thread_local!(static GC_DROPPING: Cell<bool> = Cell::new(false));
thread_local!(static BOA_GC: RefCell<BoaGc> = RefCell::new( BoaGc { thread_local!(static BOA_GC: RefCell<BoaGc> = RefCell::new( BoaGc {
config: GcConfig::default(), config: GcConfig::default(),
runtime: GcRuntimeData::default(), runtime: GcRuntimeData::default(),
strong_start: Cell::new(None), strongs: Vec::default(),
weak_start: Cell::new(None), weaks: Vec::default(),
weak_map_start: Cell::new(None), weak_maps: Vec::default(),
})); }));
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
@ -79,9 +79,9 @@ struct GcRuntimeData {
struct BoaGc { struct BoaGc {
config: GcConfig, config: GcConfig,
runtime: GcRuntimeData, runtime: GcRuntimeData,
strong_start: Cell<Option<GcPointer>>, strongs: Vec<GcPointer>,
weak_start: Cell<Option<EphemeronPointer>>, weaks: Vec<EphemeronPointer>,
weak_map_start: Cell<Option<ErasedWeakMapBoxPointer>>, weak_maps: Vec<ErasedWeakMapBoxPointer>,
} }
impl Drop for BoaGc { impl Drop for BoaGc {
@ -133,12 +133,11 @@ impl Allocator {
let mut gc = st.borrow_mut(); let mut gc = st.borrow_mut();
Self::manage_state(&mut gc); Self::manage_state(&mut gc);
value.header.next.set(gc.strong_start.take());
// Safety: value cannot be a null pointer, since `Box` cannot return null pointers. // Safety: value cannot be a null pointer, since `Box` cannot return null pointers.
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(value))) }; let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(value))) };
let erased: NonNull<GcBox<dyn Trace>> = ptr; let erased: NonNull<GcBox<dyn Trace>> = ptr;
gc.strong_start.set(Some(erased)); gc.strongs.push(erased);
gc.runtime.bytes_allocated += element_size; gc.runtime.bytes_allocated += element_size;
ptr ptr
@ -154,12 +153,11 @@ impl Allocator {
let mut gc = st.borrow_mut(); let mut gc = st.borrow_mut();
Self::manage_state(&mut gc); Self::manage_state(&mut gc);
value.header.next.set(gc.weak_start.take());
// Safety: value cannot be a null pointer, since `Box` cannot return null pointers. // Safety: value cannot be a null pointer, since `Box` cannot return null pointers.
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(value))) }; let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(value))) };
let erased: NonNull<dyn ErasedEphemeronBox> = ptr; let erased: NonNull<dyn ErasedEphemeronBox> = ptr;
gc.weak_start.set(Some(erased)); gc.weaks.push(erased);
gc.runtime.bytes_allocated += element_size; gc.runtime.bytes_allocated += element_size;
ptr ptr
@ -175,18 +173,15 @@ impl Allocator {
let weak = WeakGc::new(&weak_map.inner); let weak = WeakGc::new(&weak_map.inner);
BOA_GC.with(|st| { BOA_GC.with(|st| {
let gc = st.borrow_mut(); let mut gc = st.borrow_mut();
let weak_box = WeakMapBox { let weak_box = WeakMapBox { map: weak };
map: weak,
next: Cell::new(gc.weak_map_start.take()),
};
// Safety: value cannot be a null pointer, since `Box` cannot return null pointers. // Safety: value cannot be a null pointer, since `Box` cannot return null pointers.
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(weak_box))) }; let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(weak_box))) };
let erased: ErasedWeakMapBoxPointer = ptr; let erased: ErasedWeakMapBoxPointer = ptr;
gc.weak_map_start.set(Some(erased)); gc.weak_maps.push(erased);
weak_map weak_map
}) })
@ -233,7 +228,7 @@ impl Collector {
Self::trace_non_roots(gc); Self::trace_non_roots(gc);
let unreachables = Self::mark_heap(&gc.strong_start, &gc.weak_start, &gc.weak_map_start); let unreachables = Self::mark_heap(&gc.strongs, &gc.weaks, &gc.weak_maps);
// Only finalize if there are any unreachable nodes. // Only finalize if there are any unreachable nodes.
if !unreachables.strong.is_empty() || !unreachables.weak.is_empty() { if !unreachables.strong.is_empty() || !unreachables.weak.is_empty() {
@ -241,64 +236,63 @@ impl Collector {
// SAFETY: All passed pointers are valid, since we won't deallocate until `Self::sweep`. // SAFETY: All passed pointers are valid, since we won't deallocate until `Self::sweep`.
unsafe { Self::finalize(unreachables) }; unsafe { Self::finalize(unreachables) };
let _final_unreachables = let _final_unreachables = Self::mark_heap(&gc.strongs, &gc.weaks, &gc.weak_maps);
Self::mark_heap(&gc.strong_start, &gc.weak_start, &gc.weak_map_start);
} }
// SAFETY: The head of our linked list is always valid per the invariants of our GC. // SAFETY: The head of our linked list is always valid per the invariants of our GC.
unsafe { unsafe {
Self::sweep( Self::sweep(
&gc.strong_start, &mut gc.strongs,
&gc.weak_start, &mut gc.weaks,
&mut gc.runtime.bytes_allocated, &mut gc.runtime.bytes_allocated,
); );
} }
// Weak maps have to be cleared after the sweep, since the process dereferences GcBoxes. // Weak maps have to be cleared after the sweep, since the process dereferences GcBoxes.
let mut weak_map = &gc.weak_map_start; gc.weak_maps.retain(|w| {
while let Some(w) = weak_map.get() {
// SAFETY: The caller must ensure the validity of every node of `heap_start`. // SAFETY: The caller must ensure the validity of every node of `heap_start`.
let node_ref = unsafe { w.as_ref() }; let node_ref = unsafe { w.as_ref() };
if node_ref.is_live() { if node_ref.is_live() {
node_ref.clear_dead_entries(); node_ref.clear_dead_entries();
weak_map = node_ref.next();
} else {
weak_map.set(node_ref.next().take());
true
} else {
// SAFETY: // SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that // The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`. // was allocated by `Box::from_raw(Box::new(..))`.
let _unmarked_node = unsafe { Box::from_raw(w.as_ptr()) }; let _unmarked_node = unsafe { Box::from_raw(w.as_ptr()) };
false
} }
} });
gc.strongs.shrink_to(gc.strongs.len() >> 2);
gc.weaks.shrink_to(gc.weaks.len() >> 2);
gc.weak_maps.shrink_to(gc.weak_maps.len() >> 2);
} }
fn trace_non_roots(gc: &BoaGc) { fn trace_non_roots(gc: &BoaGc) {
// Count all the handles located in GC heap. // Count all the handles located in GC heap.
// Then, we can find whether there is a reference from other places, and they are the roots. // Then, we can find whether there is a reference from other places, and they are the roots.
let mut strong = &gc.strong_start; for node in &gc.strongs {
while let Some(node) = strong.get() {
// SAFETY: node must be valid as this phase cannot drop any node. // SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { node.as_ref() }; let node_ref = unsafe { node.as_ref() };
node_ref.value().trace_non_roots(); node_ref.value().trace_non_roots();
strong = &node_ref.header.next;
} }
let mut weak = &gc.weak_start; for eph in &gc.weaks {
while let Some(eph) = weak.get() {
// SAFETY: node must be valid as this phase cannot drop any node. // SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() }; let eph_ref = unsafe { eph.as_ref() };
eph_ref.trace_non_roots(); eph_ref.trace_non_roots();
weak = &eph_ref.header().next;
} }
} }
/// Walk the heap and mark any nodes deemed reachable /// Walk the heap and mark any nodes deemed reachable
fn mark_heap( fn mark_heap(
mut strong: &Cell<Option<NonNull<GcBox<dyn Trace>>>>, strongs: &[GcPointer],
mut weak: &Cell<Option<NonNull<dyn ErasedEphemeronBox>>>, weaks: &[EphemeronPointer],
mut weak_map: &Cell<Option<ErasedWeakMapBoxPointer>>, weak_maps: &[ErasedWeakMapBoxPointer],
) -> Unreachables { ) -> Unreachables {
let _timer = Profiler::global().start_event("Gc Marking", "gc"); let _timer = Profiler::global().start_event("Gc Marking", "gc");
@ -309,7 +303,7 @@ impl Collector {
// === Preliminary mark phase === // === Preliminary mark phase ===
// //
// 0. Get the naive list of possibly dead nodes. // 0. Get the naive list of possibly dead nodes.
while let Some(node) = strong.get() { for node in strongs {
// SAFETY: node must be valid as this phase cannot drop any node. // SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { node.as_ref() }; let node_ref = unsafe { node.as_ref() };
if node_ref.get_non_root_count() < node_ref.get_ref_count() { if node_ref.get_non_root_count() < node_ref.get_ref_count() {
@ -318,13 +312,12 @@ impl Collector {
node_ref.mark_and_trace(); node_ref.mark_and_trace();
} }
} else if !node_ref.is_marked() { } else if !node_ref.is_marked() {
strong_dead.push(node); strong_dead.push(*node);
} }
strong = &node_ref.header.next;
} }
// 0.1. Early return if there are no ephemerons in the GC // 0.1. Early return if there are no ephemerons in the GC
if weak.get().is_none() { if weaks.is_empty() {
strong_dead.retain_mut(|node| { strong_dead.retain_mut(|node| {
// SAFETY: node must be valid as this phase cannot drop any node. // SAFETY: node must be valid as this phase cannot drop any node.
unsafe { !node.as_ref().is_marked() } unsafe { !node.as_ref().is_marked() }
@ -341,7 +334,7 @@ impl Collector {
// 1. Get the naive list of ephemerons that are supposedly dead or their key is dead and // 1. Get the naive list of ephemerons that are supposedly dead or their key is dead and
// trace all the ephemerons that have roots and their keys are live. Also remove from // trace all the ephemerons that have roots and their keys are live. Also remove from
// this list the ephemerons that are marked but their value is dead. // this list the ephemerons that are marked but their value is dead.
while let Some(eph) = weak.get() { for eph in weaks {
// SAFETY: node must be valid as this phase cannot drop any node. // SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() }; let eph_ref = unsafe { eph.as_ref() };
let header = eph_ref.header(); let header = eph_ref.header();
@ -350,20 +343,17 @@ impl Collector {
} }
// SAFETY: the garbage collector ensures `eph_ref` always points to valid data. // SAFETY: the garbage collector ensures `eph_ref` always points to valid data.
if unsafe { !eph_ref.trace() } { if unsafe { !eph_ref.trace() } {
pending_ephemerons.push(eph); pending_ephemerons.push(*eph);
} }
weak = &header.next;
} }
// 2. Trace all the weak pointers in the live weak maps to make sure they do not get swept. // 2. Trace all the weak pointers in the live weak maps to make sure they do not get swept.
while let Some(w) = weak_map.get() { for w in weak_maps {
// SAFETY: node must be valid as this phase cannot drop any node. // SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { w.as_ref() }; let node_ref = unsafe { w.as_ref() };
// SAFETY: The garbage collector ensures that all nodes are valid. // SAFETY: The garbage collector ensures that all nodes are valid.
unsafe { node_ref.trace() }; unsafe { node_ref.trace() };
weak_map = node_ref.next();
} }
// 3. Iterate through all pending ephemerons, removing the ones which have been successfully // 3. Iterate through all pending ephemerons, removing the ones which have been successfully
@ -423,81 +413,79 @@ impl Collector {
/// - Providing a list of pointers that weren't allocated by `Box::into_raw(Box::new(..))` /// - Providing a list of pointers that weren't allocated by `Box::into_raw(Box::new(..))`
/// will result in Undefined Behaviour. /// will result in Undefined Behaviour.
unsafe fn sweep( unsafe fn sweep(
mut strong: &Cell<Option<NonNull<GcBox<dyn Trace>>>>, strong: &mut Vec<GcPointer>,
mut weak: &Cell<Option<NonNull<dyn ErasedEphemeronBox>>>, weak: &mut Vec<EphemeronPointer>,
total_allocated: &mut usize, total_allocated: &mut usize,
) { ) {
let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); let _timer = Profiler::global().start_event("Gc Sweeping", "gc");
let _guard = DropGuard::new(); let _guard = DropGuard::new();
while let Some(node) = strong.get() { strong.retain(|node| {
// SAFETY: The caller must ensure the validity of every node of `heap_start`. // SAFETY: The caller must ensure the validity of every node of `heap_start`.
let node_ref = unsafe { node.as_ref() }; let node_ref = unsafe { node.as_ref() };
if node_ref.is_marked() { if node_ref.is_marked() {
node_ref.header.unmark(); node_ref.header.unmark();
node_ref.reset_non_root_count(); node_ref.reset_non_root_count();
strong = &node_ref.header.next;
true
} else { } else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped. // SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.
// The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`. // The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
let unallocated_bytes = mem::size_of_val(&*unmarked_node); let unallocated_bytes = mem::size_of_val(&*unmarked_node);
*total_allocated -= unallocated_bytes; *total_allocated -= unallocated_bytes;
strong.set(unmarked_node.header.next.take());
false
} }
} });
while let Some(eph) = weak.get() { weak.retain(|eph| {
// SAFETY: The caller must ensure the validity of every node of `heap_start`. // SAFETY: The caller must ensure the validity of every node of `heap_start`.
let eph_ref = unsafe { eph.as_ref() }; let eph_ref = unsafe { eph.as_ref() };
let header = eph_ref.header(); let header = eph_ref.header();
if header.is_marked() { if header.is_marked() {
header.unmark(); header.unmark();
header.reset_non_root_count(); header.reset_non_root_count();
weak = &header.next;
true
} else { } else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped. // SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.
// The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`. // The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`.
let unmarked_eph = unsafe { Box::from_raw(eph.as_ptr()) }; let unmarked_eph = unsafe { Box::from_raw(eph.as_ptr()) };
let unallocated_bytes = mem::size_of_val(&*unmarked_eph); let unallocated_bytes = mem::size_of_val(&*unmarked_eph);
*total_allocated -= unallocated_bytes; *total_allocated -= unallocated_bytes;
weak.set(unmarked_eph.header().next.take());
false
} }
} });
} }
// Clean up the heap when BoaGc is dropped // Clean up the heap when BoaGc is dropped
fn dump(gc: &BoaGc) { fn dump(gc: &mut BoaGc) {
// Weak maps have to be dropped first, since the process dereferences GcBoxes. // Weak maps have to be dropped first, since the process dereferences GcBoxes.
// This can be done without initializing a dropguard since no GcBox's are being dropped. // This can be done without initializing a dropguard since no GcBox's are being dropped.
let weak_map_head = &gc.weak_map_start; for node in std::mem::take(&mut gc.weak_maps) {
while let Some(node) = weak_map_head.get() {
// SAFETY: // SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that // The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`. // was allocated by `Box::from_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; let _unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
weak_map_head.set(unmarked_node.next().take());
} }
// Not initializing a dropguard since this should only be invoked when BOA_GC is being dropped. // Not initializing a dropguard since this should only be invoked when BOA_GC is being dropped.
let _guard = DropGuard::new(); let _guard = DropGuard::new();
let strong_head = &gc.strong_start; for node in std::mem::take(&mut gc.strongs) {
while let Some(node) = strong_head.get() {
// SAFETY: // SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that // The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`. // was allocated by `Box::from_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; let _unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
strong_head.set(unmarked_node.header.next.take());
} }
let eph_head = &gc.weak_start; for node in std::mem::take(&mut gc.weaks) {
while let Some(node) = eph_head.get() {
// SAFETY: // SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that // The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`. // was allocated by `Box::from_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; let _unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
eph_head.set(unmarked_node.header().next.take());
} }
} }
} }
@ -523,6 +511,6 @@ pub fn has_weak_maps() -> bool {
BOA_GC.with(|current| { BOA_GC.with(|current| {
let gc = current.borrow(); let gc = current.borrow();
gc.weak_map_start.get().is_some() !gc.weak_maps.is_empty()
}) })
} }

3
boa_gc/src/test/mod.rs

@ -21,7 +21,7 @@ impl Harness {
BOA_GC.with(|current| { BOA_GC.with(|current| {
let gc = current.borrow(); let gc = current.borrow();
assert!(gc.strong_start.get().is_none()); assert!(gc.strongs.is_empty());
assert!(gc.runtime.bytes_allocated == 0); assert!(gc.runtime.bytes_allocated == 0);
}); });
} }
@ -43,6 +43,7 @@ impl Harness {
} }
} }
#[track_caller]
fn run_test(test: impl FnOnce() + Send + 'static) { fn run_test(test: impl FnOnce() + Send + 'static) {
let handle = std::thread::spawn(test); let handle = std::thread::spawn(test);
handle.join().unwrap(); handle.join().unwrap();

4
boa_gc/src/test/weak.rs

@ -164,7 +164,7 @@ fn eph_self_referential() {
*root.inner.inner.borrow_mut() = Some(eph.clone()); *root.inner.inner.borrow_mut() = Some(eph.clone());
assert!(eph.value().is_some()); assert!(eph.value().is_some());
Harness::assert_exact_bytes_allocated(80); Harness::assert_exact_bytes_allocated(48);
} }
*root.inner.inner.borrow_mut() = None; *root.inner.inner.borrow_mut() = None;
@ -210,7 +210,7 @@ fn eph_self_referential_chain() {
assert!(eph_start.value().is_some()); assert!(eph_start.value().is_some());
assert!(eph_chain2.value().is_some()); assert!(eph_chain2.value().is_some());
Harness::assert_exact_bytes_allocated(232); Harness::assert_exact_bytes_allocated(132);
} }
*root.borrow_mut() = None; *root.borrow_mut() = None;

Loading…
Cancel
Save