Browse Source

Find roots when running GC rather than runtime (#3109)

* Find roots when running GC

Attempt to address the issue #2773.

The existing implementation had an expensive overhead of managing root
counts, especially for mutable borrow of GcRefCell.

Instead of managing the root counts, this change counts the number of
Gc/WeakGc handles located in Gc heap objects and total number of them.
Then, we can find whether there is a root by comparing those numbers.

* Fix clippy errors

* Keep reference counts in Box

* Addressing comment

* Fix clippy errors

* Fix typo

* non_root_count includes mark bit

* give a space
pull/3176/head
Choongwoo Han 1 year ago committed by GitHub
parent
commit
6a7862917a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 18
      boa_engine/src/builtins/array/mod.rs
  2. 3
      boa_engine/src/lib.rs
  3. 10
      boa_engine/src/native_function.rs
  4. 84
      boa_gc/src/cell.rs
  5. 104
      boa_gc/src/internals/ephemeron_box.rs
  6. 100
      boa_gc/src/internals/gc_box.rs
  7. 37
      boa_gc/src/lib.rs
  8. 94
      boa_gc/src/pointers/ephemeron.rs
  9. 80
      boa_gc/src/pointers/gc.rs
  10. 1
      boa_gc/src/pointers/mod.rs
  11. 87
      boa_gc/src/pointers/rootable.rs
  12. 3
      boa_gc/src/pointers/weak.rs
  13. 2
      boa_gc/src/pointers/weak_map.rs
  14. 4
      boa_gc/src/test/weak.rs
  15. 37
      boa_gc/src/trace.rs
  16. 14
      boa_macros/src/lib.rs

18
boa_engine/src/builtins/array/mod.rs

@ -1064,19 +1064,21 @@ impl Array {
// d. Let lowerExists be ? HasProperty(O, lowerP).
let lower_exists = o.has_property(lower, context)?;
// e. If lowerExists is true, then
let mut lower_value = JsValue::undefined();
if lower_exists {
let lower_value = if lower_exists {
// i. Let lowerValue be ? Get(O, lowerP).
lower_value = o.get(lower, context)?;
}
o.get(lower, context)?
} else {
JsValue::undefined()
};
// f. Let upperExists be ? HasProperty(O, upperP).
let upper_exists = o.has_property(upper, context)?;
// g. If upperExists is true, then
let mut upper_value = JsValue::undefined();
if upper_exists {
let upper_value = if upper_exists {
// i. Let upperValue be ? Get(O, upperP).
upper_value = o.get(upper, context)?;
}
o.get(upper, context)?
} else {
JsValue::undefined()
};
match (lower_exists, upper_exists) {
// h. If lowerExists is true and upperExists is true, then
(true, true) => {

3
boa_engine/src/lib.rs

@ -118,7 +118,8 @@
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::cast_precision_loss,
clippy::cast_possible_wrap
clippy::cast_possible_wrap,
clippy::must_use_candidate,
)]
extern crate static_assertions as sa;

10
boa_engine/src/native_function.rs

@ -69,7 +69,7 @@ where
/// to use. All other closures can also be stored in a `NativeFunction`, albeit by using an `unsafe`
/// API, but note that passing closures implicitly capturing traceable types could cause
/// **Undefined Behaviour**.
#[derive(Clone)]
#[derive(Clone, Finalize)]
pub struct NativeFunction {
inner: Inner,
}
@ -80,14 +80,6 @@ enum Inner {
Closure(Gc<dyn TraceableClosure>),
}
impl Finalize for NativeFunction {
fn finalize(&self) {
if let Inner::Closure(c) = &self.inner {
c.finalize();
}
}
}
// Manual implementation because deriving `Trace` triggers the `single_use_lifetimes` lint.
// SAFETY: Only closures can contain `Trace` captures, so this implementation is safe.
unsafe impl Trace for NativeFunction {

84
boa_gc/src/cell.rs

@ -26,40 +26,27 @@ enum BorrowState {
Unused,
}
const ROOT: usize = 1;
const WRITING: usize = !1;
const WRITING: usize = !0;
const UNUSED: usize = 0;
/// The base borrowflag init is rooted, and has no outstanding borrows.
const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(ROOT);
const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(UNUSED);
impl BorrowFlag {
/// Check the current `BorrowState` of `BorrowFlag`.
const fn borrowed(self) -> BorrowState {
match self.0 & !ROOT {
match self.0 {
UNUSED => BorrowState::Unused,
WRITING => BorrowState::Writing,
_ => BorrowState::Reading,
}
}
/// Check whether the borrow bit is flagged.
const fn rooted(self) -> bool {
self.0 & ROOT > 0
}
/// Set the `BorrowFlag`'s state to writing.
const fn set_writing(self) -> Self {
// Set every bit other than the root bit, which is preserved
Self(self.0 | WRITING)
}
/// Remove the root flag on `BorrowFlag`
const fn set_unused(self) -> Self {
// Clear every bit other than the root bit, which is preserved
Self(self.0 & ROOT)
}
/// Increments the counter for a new borrow.
///
/// # Panic
@ -67,12 +54,7 @@ impl BorrowFlag {
/// - This method will panic after incrementing if the borrow count overflows.
fn add_reading(self) -> Self {
assert!(self.borrowed() != BorrowState::Writing);
// Add 1 to the integer starting at the second binary digit. As our
// borrowstate is not writing, we know that overflow cannot happen, so
// this is equivalent to the following, more complicated, expression:
//
// BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) + 1) << 1))
let flags = Self(self.0 + 0b10);
let flags = Self(self.0 + 1);
// This will fail if the borrow count overflows, which shouldn't happen,
// but let's be safe
@ -88,26 +70,13 @@ impl BorrowFlag {
/// - This method will panic if the current `BorrowState` is not reading.
fn sub_reading(self) -> Self {
assert!(self.borrowed() == BorrowState::Reading);
// Subtract 1 from the integer starting at the second binary digit. As
// our borrowstate is not writing or unused, we know that overflow or
// undeflow cannot happen, so this is equivalent to the following, more
// complicated, expression:
//
// BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) - 1) << 1))
Self(self.0 - 0b10)
}
/// Set the root flag on the `BorrowFlag`.
fn set_rooted(self, rooted: bool) -> Self {
// Preserve the non-root bits
Self((self.0 & !ROOT) | (usize::from(rooted)))
Self(self.0 - 1)
}
}
impl Debug for BorrowFlag {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("BorrowFlag")
.field("Rooted", &self.rooted())
.field("State", &self.borrowed())
.finish()
}
@ -214,12 +183,6 @@ impl<T: Trace + ?Sized> GcRefCell<T> {
// SAFETY: This is safe as the value is rooted if it was not previously rooted,
// so it cannot be dropped.
unsafe {
// Force the val_ref's contents to be rooted for the duration of the
// mutable borrow
if !self.flags.get().rooted() {
(*self.cell.get()).root();
}
Ok(GcRefMut {
gc_cell: self,
value: &mut *self.cell.get(),
@ -263,25 +226,11 @@ unsafe impl<T: Trace + ?Sized> Trace for GcRefCell<T> {
}
}
unsafe fn root(&self) {
assert!(!self.flags.get().rooted(), "Can't root a GcCell twice!");
self.flags.set(self.flags.get().set_rooted(true));
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).root() },
}
}
unsafe fn unroot(&self) {
assert!(self.flags.get().rooted(), "Can't unroot a GcCell twice!");
self.flags.set(self.flags.get().set_rooted(false));
fn trace_non_roots(&self) {
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).unroot() },
_ => unsafe { (*self.cell.get()).trace_non_roots() },
}
}
@ -407,12 +356,12 @@ impl<T: ?Sized + Display> Display for GcRef<'_, T> {
}
/// A wrapper type for a mutably borrowed value from a `GcCell<T>`.
pub struct GcRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> {
pub struct GcRefMut<'a, T: ?Sized + 'static, U: ?Sized = T> {
pub(crate) gc_cell: &'a GcRefCell<T>,
pub(crate) value: &'a mut U,
}
impl<'a, T: Trace + ?Sized, U: ?Sized> GcRefMut<'a, T, U> {
impl<'a, T: ?Sized, U: ?Sized> GcRefMut<'a, T, U> {
/// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum
/// variant.
///
@ -457,21 +406,10 @@ impl<T: Trace + ?Sized, U: ?Sized> DerefMut for GcRefMut<'_, T, U> {
}
}
impl<T: Trace + ?Sized, U: ?Sized> Drop for GcRefMut<'_, T, U> {
impl<T: ?Sized, U: ?Sized> Drop for GcRefMut<'_, T, U> {
fn drop(&mut self) {
debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing);
// Restore the rooted state of the GcCell's contents to the state of the GcCell.
// During the lifetime of the GcCellRefMut, the GcCell's contents are rooted.
if !self.gc_cell.flags.get().rooted() {
// SAFETY: If `GcCell` is no longer rooted, then unroot it. This should be safe
// as the internal `GcBox` should be guaranteed to have at least 1 root.
unsafe {
(*self.gc_cell.cell.get()).unroot();
}
}
self.gc_cell
.flags
.set(self.gc_cell.flags.get().set_unused());
self.gc_cell.flags.set(BorrowFlag(UNUSED));
}
}

104
boa_gc/src/internals/ephemeron_box.rs

@ -4,22 +4,21 @@ use std::{
ptr::{self, NonNull},
};
// Age and Weak Flags
const MARK_MASK: usize = 1 << (usize::BITS - 1);
const ROOTS_MASK: usize = !MARK_MASK;
const ROOTS_MAX: usize = ROOTS_MASK;
const MARK_MASK: u32 = 1 << (u32::BITS - 1);
const NON_ROOTS_MASK: u32 = !MARK_MASK;
const NON_ROOTS_MAX: u32 = NON_ROOTS_MASK;
/// The `EphemeronBoxHeader` contains the `EphemeronBoxHeader`'s current state for the `Collector`'s
/// Mark/Sweep as well as a pointer to the next ephemeron in the heap.
///
/// These flags include:
/// - Root Count
/// - Mark Flag Bit
/// `ref_count` is the number of Gc instances, and `non_root_count` is the number of
/// Gc instances in the heap. `non_root_count` also includes Mark Flag bit.
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
pub(crate) struct EphemeronBoxHeader {
roots: Cell<usize>,
ref_count: Cell<u32>,
non_root_count: Cell<u32>,
pub(crate) next: Cell<Option<NonNull<dyn ErasedEphemeronBox>>>,
}
@ -27,57 +26,64 @@ impl EphemeronBoxHeader {
/// Creates a new `EphemeronBoxHeader` with a root of 1 and next set to None.
pub(crate) fn new() -> Self {
Self {
roots: Cell::new(1),
ref_count: Cell::new(1),
non_root_count: Cell::new(0),
next: Cell::new(None),
}
}
/// Returns the `EphemeronBoxHeader`'s current root count
pub(crate) fn roots(&self) -> usize {
self.roots.get() & ROOTS_MASK
/// Returns the `EphemeronBoxHeader`'s current ref count
pub(crate) fn get_ref_count(&self) -> u32 {
self.ref_count.get()
}
/// Increments `EphemeronBoxHeader`'s root count.
pub(crate) fn inc_roots(&self) {
let roots = self.roots.get();
/// Returns a count for non-roots.
pub(crate) fn get_non_root_count(&self) -> u32 {
self.non_root_count.get() & NON_ROOTS_MASK
}
/// Increments `EphemeronBoxHeader`'s non-roots count.
pub(crate) fn inc_non_root_count(&self) {
let non_root_count = self.non_root_count.get();
if (roots & ROOTS_MASK) < ROOTS_MAX {
self.roots.set(roots + 1);
if (non_root_count & NON_ROOTS_MASK) < NON_ROOTS_MAX {
self.non_root_count.set(non_root_count.wrapping_add(1));
} else {
// TODO: implement a better way to handle root overload.
panic!("roots counter overflow");
panic!("non roots counter overflow");
}
}
/// Decreases `EphemeronBoxHeader`'s current root count.
pub(crate) fn dec_roots(&self) {
// Underflow check as a stop gap for current issue when dropping.
if self.roots.get() > 0 {
self.roots.set(self.roots.get() - 1);
}
/// Reset non-roots count to zero.
pub(crate) fn reset_non_root_count(&self) {
self.non_root_count
.set(self.non_root_count.get() & !NON_ROOTS_MASK);
}
/// Returns a bool for whether `EphemeronBoxHeader`'s mark bit is 1.
/// Returns a bool for whether `GcBoxHeader`'s mark bit is 1.
pub(crate) fn is_marked(&self) -> bool {
self.roots.get() & MARK_MASK != 0
self.non_root_count.get() & MARK_MASK != 0
}
/// Sets `EphemeronBoxHeader`'s mark bit to 1.
/// Sets `GcBoxHeader`'s mark bit to 1.
pub(crate) fn mark(&self) {
self.roots.set(self.roots.get() | MARK_MASK);
self.non_root_count
.set(self.non_root_count.get() | MARK_MASK);
}
/// Sets `EphemeronBoxHeader`'s mark bit to 0.
/// Sets `GcBoxHeader`'s mark bit to 0.
pub(crate) fn unmark(&self) {
self.roots.set(self.roots.get() & !MARK_MASK);
self.non_root_count
.set(self.non_root_count.get() & !MARK_MASK);
}
}
impl core::fmt::Debug for EphemeronBoxHeader {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("EphemeronBoxHeader")
.field("roots", &self.roots())
.field("marked", &self.is_marked())
.field("ref_count", &self.get_ref_count())
.field("non_root_count", &self.get_non_root_count())
.finish()
}
}
@ -138,18 +144,19 @@ impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
self.header.mark();
}
/// Increases the root count on this `EphemeronBox`.
///
/// Roots prevent the `EphemeronBox` from being destroyed by the garbage collector.
pub(crate) fn root(&self) {
self.header.inc_roots();
#[inline]
pub(crate) fn inc_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() + 1);
}
/// Decreases the root count on this `EphemeronBox`.
///
/// Roots prevent the `EphemeronBox` from being destroyed by the garbage collector.
pub(crate) fn unroot(&self) {
self.header.dec_roots();
#[inline]
pub(crate) fn dec_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() - 1);
}
#[inline]
pub(crate) fn inc_non_root_count(&self) {
self.header.inc_non_root_count();
}
}
@ -163,6 +170,8 @@ pub(crate) trait ErasedEphemeronBox {
/// "successfully traced".
unsafe fn trace(&self) -> bool;
fn trace_non_roots(&self);
/// Runs the finalization logic of the `EphemeronBox`'s held value, if the key is still live,
/// and clears its contents.
fn finalize_and_clear(&self);
@ -199,12 +208,21 @@ impl<K: Trace + ?Sized, V: Trace> ErasedEphemeronBox for EphemeronBox<K, V> {
is_key_marked
}
fn trace_non_roots(&self) {
let Some(data) = self.data.get() else {
return;
};
// SAFETY: `data` comes from a `Box`, so it is safe to dereference.
unsafe {
data.as_ref().value.trace_non_roots();
};
}
fn finalize_and_clear(&self) {
if let Some(data) = self.data.take() {
// SAFETY: `data` comes from an `into_raw` call, so this pointer is safe to pass to
// `from_raw`.
let contents = unsafe { Box::from_raw(data.as_ptr()) };
Trace::run_finalizer(&contents.value);
let _contents = unsafe { Box::from_raw(data.as_ptr()) };
}
}
}

100
boa_gc/src/internals/gc_box.rs

@ -5,22 +5,21 @@ use std::{
ptr::{self, NonNull},
};
// Age and Weak Flags
const MARK_MASK: usize = 1 << (usize::BITS - 1);
const ROOTS_MASK: usize = !MARK_MASK;
const ROOTS_MAX: usize = ROOTS_MASK;
const MARK_MASK: u32 = 1 << (u32::BITS - 1);
const NON_ROOTS_MASK: u32 = !MARK_MASK;
const NON_ROOTS_MAX: u32 = NON_ROOTS_MASK;
/// The `GcBoxheader` contains the `GcBox`'s current state for the `Collector`'s
/// Mark/Sweep as well as a pointer to the next node in the heap.
///
/// These flags include:
/// - Root Count
/// - Mark Flag Bit
/// `ref_count` is the number of Gc instances, and `non_root_count` is the number of
/// Gc instances in the heap. `non_root_count` also includes Mark Flag bit.
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
pub(crate) struct GcBoxHeader {
roots: Cell<usize>,
ref_count: Cell<u32>,
non_root_count: Cell<u32>,
pub(crate) next: Cell<Option<NonNull<GcBox<dyn Trace>>>>,
}
@ -28,57 +27,59 @@ impl GcBoxHeader {
/// Creates a new `GcBoxHeader` with a root of 1 and next set to None.
pub(crate) fn new() -> Self {
Self {
roots: Cell::new(1),
ref_count: Cell::new(1),
non_root_count: Cell::new(0),
next: Cell::new(None),
}
}
/// Returns the `GcBoxHeader`'s current root count
pub(crate) fn roots(&self) -> usize {
self.roots.get() & ROOTS_MASK
/// Returns the `GcBoxHeader`'s current non-roots count
pub(crate) fn get_non_root_count(&self) -> u32 {
self.non_root_count.get() & NON_ROOTS_MASK
}
/// Increments `GcBoxHeader`'s root count.
pub(crate) fn inc_roots(&self) {
let roots = self.roots.get();
/// Increments `GcBoxHeader`'s non-roots count.
pub(crate) fn inc_non_root_count(&self) {
let non_root_count = self.non_root_count.get();
if (roots & ROOTS_MASK) < ROOTS_MAX {
self.roots.set(roots + 1);
if (non_root_count & NON_ROOTS_MASK) < NON_ROOTS_MAX {
self.non_root_count.set(non_root_count.wrapping_add(1));
} else {
// TODO: implement a better way to handle root overload.
panic!("roots counter overflow");
panic!("non-roots counter overflow");
}
}
/// Decreases `GcBoxHeader`'s current root count.
pub(crate) fn dec_roots(&self) {
// Underflow check as a stop gap for current issue when dropping.
if self.roots.get() > 0 {
self.roots.set(self.roots.get() - 1);
}
/// Decreases `GcBoxHeader`'s current non-roots count.
pub(crate) fn reset_non_root_count(&self) {
self.non_root_count
.set(self.non_root_count.get() & !NON_ROOTS_MASK);
}
/// Returns a bool for whether `GcBoxHeader`'s mark bit is 1.
pub(crate) fn is_marked(&self) -> bool {
self.roots.get() & MARK_MASK != 0
self.non_root_count.get() & MARK_MASK != 0
}
/// Sets `GcBoxHeader`'s mark bit to 1.
pub(crate) fn mark(&self) {
self.roots.set(self.roots.get() | MARK_MASK);
self.non_root_count
.set(self.non_root_count.get() | MARK_MASK);
}
/// Sets `GcBoxHeader`'s mark bit to 0.
pub(crate) fn unmark(&self) {
self.roots.set(self.roots.get() & !MARK_MASK);
self.non_root_count
.set(self.non_root_count.get() & !MARK_MASK);
}
}
impl fmt::Debug for GcBoxHeader {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GcBoxHeader")
.field("roots", &self.roots())
.field("marked", &self.is_marked())
.field("ref_count", &self.ref_count.get())
.field("non_root_count", &self.get_non_root_count())
.finish()
}
}
@ -122,20 +123,6 @@ impl<T: Trace + ?Sized> GcBox<T> {
}
}
/// Increases the root count on this `GcBox`.
///
/// Roots prevent the `GcBox` from being destroyed by the garbage collector.
pub(crate) fn root(&self) {
self.header.inc_roots();
}
/// Decreases the root count on this `GcBox`.
///
/// Roots prevent the `GcBox` from being destroyed by the garbage collector.
pub(crate) fn unroot(&self) {
self.header.dec_roots();
}
/// Returns a reference to the `GcBox`'s value.
pub(crate) const fn value(&self) -> &T {
&self.value
@ -145,4 +132,33 @@ impl<T: Trace + ?Sized> GcBox<T> {
pub(crate) fn is_marked(&self) -> bool {
self.header.is_marked()
}
#[inline]
pub(crate) fn get_ref_count(&self) -> u32 {
self.header.ref_count.get()
}
#[inline]
pub(crate) fn inc_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() + 1);
}
#[inline]
pub(crate) fn dec_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() - 1);
}
#[inline]
pub(crate) fn get_non_root_count(&self) -> u32 {
self.header.get_non_root_count()
}
#[inline]
pub(crate) fn inc_non_root_count(&self) {
self.header.inc_non_root_count();
}
pub(crate) fn reset_non_root_count(&self) {
self.header.reset_non_root_count();
}
}

37
boa_gc/src/lib.rs

@ -283,6 +283,9 @@ impl Collector {
fn collect(gc: &mut BoaGc) {
let _timer = Profiler::global().start_event("Gc Full Collection", "gc");
gc.runtime.collections += 1;
Self::trace_non_roots(gc);
let unreachables = Self::mark_heap(&gc.strong_start, &gc.weak_start, &gc.weak_map_start);
// Only finalize if there are any unreachable nodes.
@ -324,6 +327,26 @@ impl Collector {
}
}
fn trace_non_roots(gc: &mut BoaGc) {
// Count all the handles located in GC heap.
// Then, we can find whether there is a reference from other places, and they are the roots.
let mut strong = &gc.strong_start;
while let Some(node) = strong.get() {
// SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { node.as_ref() };
node_ref.value().trace_non_roots();
strong = &node_ref.header.next;
}
let mut weak = &gc.weak_start;
while let Some(eph) = weak.get() {
// SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() };
eph_ref.trace_non_roots();
weak = &eph_ref.header().next;
}
}
/// Walk the heap and mark any nodes deemed reachable
fn mark_heap(
mut strong: &Cell<Option<NonNull<GcBox<dyn Trace>>>>,
@ -331,6 +354,7 @@ impl Collector {
mut weak_map: &Cell<Option<ErasedWeakMapBoxPointer>>,
) -> Unreachables {
let _timer = Profiler::global().start_event("Gc Marking", "gc");
// Walk the list, tracing and marking the nodes
let mut strong_dead = Vec::new();
let mut pending_ephemerons = Vec::new();
@ -341,9 +365,8 @@ impl Collector {
while let Some(node) = strong.get() {
// SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { node.as_ref() };
if node_ref.header.roots() != 0 {
// SAFETY: the reference to node must be valid as it is rooted. Passing
// invalid references can result in Undefined Behavior
if node_ref.get_non_root_count() < node_ref.get_ref_count() {
// SAFETY: the gc heap object should be alive if there is a root.
unsafe {
node_ref.mark_and_trace();
}
@ -375,7 +398,7 @@ impl Collector {
// SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() };
let header = eph_ref.header();
if header.roots() != 0 {
if header.get_non_root_count() < header.get_ref_count() {
header.mark();
}
// SAFETY: the garbage collector ensures `eph_ref` always points to valid data.
@ -463,8 +486,9 @@ impl Collector {
while let Some(node) = strong.get() {
// SAFETY: The caller must ensure the validity of every node of `heap_start`.
let node_ref = unsafe { node.as_ref() };
if node_ref.header.roots() > 0 || node_ref.is_marked() {
if node_ref.is_marked() {
node_ref.header.unmark();
node_ref.reset_non_root_count();
strong = &node_ref.header.next;
} else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.
@ -480,8 +504,9 @@ impl Collector {
// SAFETY: The caller must ensure the validity of every node of `heap_start`.
let eph_ref = unsafe { eph.as_ref() };
let header = eph_ref.header();
if header.roots() > 0 || header.is_marked() {
if header.is_marked() {
header.unmark();
header.reset_non_root_count();
weak = &header.next;
} else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.

94
boa_gc/src/pointers/ephemeron.rs

@ -4,9 +4,7 @@ use crate::{
trace::{Finalize, Trace},
Allocator, Gc,
};
use std::{cell::Cell, ptr::NonNull};
use super::rootable::Rootable;
use std::ptr::NonNull;
/// A key-value pair where the value becomes unaccesible when the key is garbage collected.
///
@ -18,7 +16,7 @@ use super::rootable::Rootable;
/// [acm]: https://dl.acm.org/doi/10.1145/263700.263733
#[derive(Debug)]
pub struct Ephemeron<K: Trace + ?Sized + 'static, V: Trace + 'static> {
inner_ptr: Cell<Rootable<EphemeronBox<K, V>>>,
inner_ptr: NonNull<EphemeronBox<K, V>>,
}
impl<K: Trace + ?Sized, V: Trace + Clone> Ephemeron<K, V> {
@ -26,62 +24,38 @@ impl<K: Trace + ?Sized, V: Trace + Clone> Ephemeron<K, V> {
///
/// This needs to return a clone of the value because holding a reference to it between
/// garbage collection passes could drop the underlying allocation, causing an Use After Free.
#[must_use]
pub fn value(&self) -> Option<V> {
// SAFETY: this is safe because `Ephemeron` is tracked to always point to a valid pointer
// `inner_ptr`.
unsafe { self.inner_ptr.get().as_ref().value().cloned() }
unsafe { self.inner_ptr.as_ref().value().cloned() }
}
/// Checks if the [`Ephemeron`] has a value.
#[must_use]
pub fn has_value(&self) -> bool {
// SAFETY: this is safe because `Ephemeron` is tracked to always point to a valid pointer
// `inner_ptr`.
unsafe { self.inner_ptr.get().as_ref().value().is_some() }
unsafe { self.inner_ptr.as_ref().value().is_some() }
}
}
impl<K: Trace + ?Sized, V: Trace> Ephemeron<K, V> {
/// Creates a new `Ephemeron`.
pub fn new(key: &Gc<K>, value: V) -> Self {
// SAFETY: `value` comes from the stack and should be rooted, meaning unrooting
// it to pass it to the underlying `EphemeronBox` is safe.
unsafe {
value.unroot();
}
// SAFETY: EphemeronBox is at least 2 bytes in size, and so its alignment is always a
// multiple of 2.
unsafe {
Self {
inner_ptr: Cell::new(
Rootable::new_unchecked(Allocator::alloc_ephemeron(EphemeronBox::new(
key, value,
)))
.rooted(),
),
}
}
let inner_ptr = Allocator::alloc_ephemeron(EphemeronBox::new(key, value));
Self { inner_ptr }
}
/// Returns `true` if the two `Ephemeron`s point to the same allocation.
#[must_use]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
EphemeronBox::ptr_eq(this.inner(), other.inner())
}
fn is_rooted(&self) -> bool {
self.inner_ptr.get().is_rooted()
}
fn root_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().rooted());
}
fn unroot_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().unrooted());
}
pub(crate) fn inner_ptr(&self) -> NonNull<EphemeronBox<K, V>> {
assert!(finalizer_safe() || self.is_rooted());
self.inner_ptr.get().as_ptr()
assert!(finalizer_safe());
self.inner_ptr
}
fn inner(&self) -> &EphemeronBox<K, V> {
@ -96,18 +70,21 @@ impl<K: Trace + ?Sized, V: Trace> Ephemeron<K, V> {
/// This function is unsafe because improper use may lead to memory corruption, double-free,
/// or misbehaviour of the garbage collector.
#[must_use]
unsafe fn from_raw(ptr: NonNull<EphemeronBox<K, V>>) -> Self {
// SAFETY: it is the caller's job to ensure the safety of this operation.
const unsafe fn from_raw(inner_ptr: NonNull<EphemeronBox<K, V>>) -> Self {
Self { inner_ptr }
}
}
impl<K: Trace + ?Sized, V: Trace> Finalize for Ephemeron<K, V> {
fn finalize(&self) {
// SAFETY: inner_ptr should be alive when calling finalize.
// We don't call inner_ptr() to avoid overhead of calling finalizer_safe().
unsafe {
Self {
inner_ptr: Cell::new(Rootable::new_unchecked(ptr).rooted()),
}
self.inner_ptr.as_ref().dec_ref_count();
}
}
}
impl<K: Trace + ?Sized, V: Trace> Finalize for Ephemeron<K, V> {}
// SAFETY: `Ephemeron`s trace implementation only marks its inner box because we want to stop
// tracing through weakly held pointers.
unsafe impl<K: Trace + ?Sized, V: Trace> Trace for Ephemeron<K, V> {
@ -119,22 +96,8 @@ unsafe impl<K: Trace + ?Sized, V: Trace> Trace for Ephemeron<K, V> {
}
}
unsafe fn root(&self) {
assert!(!self.is_rooted(), "Can't double-root a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().root();
self.root_ptr();
}
unsafe fn unroot(&self) {
assert!(self.is_rooted(), "Can't double-unroot a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().unroot();
self.unroot_ptr();
fn trace_non_roots(&self) {
self.inner().inc_non_root_count();
}
fn run_finalizer(&self) {
@ -145,11 +108,7 @@ unsafe impl<K: Trace + ?Sized, V: Trace> Trace for Ephemeron<K, V> {
impl<K: Trace + ?Sized, V: Trace> Clone for Ephemeron<K, V> {
fn clone(&self) -> Self {
let ptr = self.inner_ptr();
// SAFETY: since an `Ephemeron` is always valid, its `inner_ptr` must also be always a valid
// pointer.
unsafe {
ptr.as_ref().root();
}
self.inner().inc_ref_count();
// SAFETY: `&self` is a valid Ephemeron pointer.
unsafe { Self::from_raw(ptr) }
}
@ -157,9 +116,8 @@ impl<K: Trace + ?Sized, V: Trace> Clone for Ephemeron<K, V> {
impl<K: Trace + ?Sized, V: Trace> Drop for Ephemeron<K, V> {
fn drop(&mut self) {
// If this pointer was a root, we should unroot it.
if self.is_rooted() {
self.inner().unroot();
if finalizer_safe() {
Finalize::finalize(self);
}
}
}

80
boa_gc/src/pointers/gc.rs

@ -5,7 +5,6 @@ use crate::{
Allocator,
};
use std::{
cell::Cell,
cmp::Ordering,
fmt::{self, Debug, Display},
hash::{Hash, Hasher},
@ -15,11 +14,9 @@ use std::{
rc::Rc,
};
use super::rootable::Rootable;
/// A garbage-collected pointer type over an immutable value.
pub struct Gc<T: Trace + ?Sized + 'static> {
pub(crate) inner_ptr: Cell<Rootable<GcBox<T>>>,
pub(crate) inner_ptr: NonNull<GcBox<T>>,
pub(crate) marker: PhantomData<Rc<T>>,
}
@ -31,14 +28,8 @@ impl<T: Trace> Gc<T> {
// Note: Allocator can cause Collector to run
let inner_ptr = Allocator::alloc_gc(GcBox::new(value));
// SAFETY: inner_ptr was just allocated, so it must be a valid value that implements [`Trace`]
unsafe { (*inner_ptr.as_ptr()).value().unroot() }
// SAFETY: inner_ptr is 2-byte aligned.
let inner_ptr = unsafe { Rootable::new_unchecked(inner_ptr) };
Self {
inner_ptr: Cell::new(inner_ptr.rooted()),
inner_ptr,
marker: PhantomData,
}
}
@ -46,6 +37,7 @@ impl<T: Trace> Gc<T> {
/// Consumes the `Gc`, returning a wrapped raw pointer.
///
/// To avoid a memory leak, the pointer must be converted back to a `Gc` using [`Gc::from_raw`].
#[must_use]
pub fn into_raw(this: Self) -> NonNull<GcBox<T>> {
let ptr = this.inner_ptr();
std::mem::forget(this);
@ -55,6 +47,7 @@ impl<T: Trace> Gc<T> {
impl<T: Trace + ?Sized> Gc<T> {
/// Returns `true` if the two `Gc`s point to the same allocation.
#[must_use]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
GcBox::ptr_eq(this.inner(), other.inner())
}
@ -69,33 +62,18 @@ impl<T: Trace + ?Sized> Gc<T> {
/// This function is unsafe because improper use may lead to memory corruption, double-free,
/// or misbehaviour of the garbage collector.
#[must_use]
pub unsafe fn from_raw(ptr: NonNull<GcBox<T>>) -> Self {
// SAFETY: it is the caller's job to ensure the safety of this operation.
unsafe {
Self {
inner_ptr: Cell::new(Rootable::new_unchecked(ptr).rooted()),
marker: PhantomData,
}
pub const unsafe fn from_raw(inner_ptr: NonNull<GcBox<T>>) -> Self {
Self {
inner_ptr,
marker: PhantomData,
}
}
}
impl<T: Trace + ?Sized> Gc<T> {
fn is_rooted(&self) -> bool {
self.inner_ptr.get().is_rooted()
}
fn root_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().rooted());
}
fn unroot_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().unrooted());
}
pub(crate) fn inner_ptr(&self) -> NonNull<GcBox<T>> {
assert!(finalizer_safe() || self.is_rooted());
self.inner_ptr.get().as_ptr()
assert!(finalizer_safe());
self.inner_ptr
}
fn inner(&self) -> &GcBox<T> {
@ -104,7 +82,15 @@ impl<T: Trace + ?Sized> Gc<T> {
}
}
impl<T: Trace + ?Sized> Finalize for Gc<T> {}
impl<T: Trace + ?Sized> Finalize for Gc<T> {
fn finalize(&self) {
// SAFETY: inner_ptr should be alive when calling finalize.
// We don't call inner_ptr() to avoid overhead of calling finalizer_safe().
unsafe {
self.inner_ptr.as_ref().dec_ref_count();
}
}
}
// SAFETY: `Gc` maintains it's own rootedness and implements all methods of
// Trace. It is not possible to root an already rooted `Gc` and vice versa.
@ -116,22 +102,8 @@ unsafe impl<T: Trace + ?Sized> Trace for Gc<T> {
}
}
unsafe fn root(&self) {
assert!(!self.is_rooted(), "Can't double-root a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().root();
self.root_ptr();
}
unsafe fn unroot(&self) {
assert!(self.is_rooted(), "Can't double-unroot a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().unroot();
self.unroot_ptr();
fn trace_non_roots(&self) {
self.inner().inc_non_root_count();
}
fn run_finalizer(&self) {
@ -142,10 +114,7 @@ unsafe impl<T: Trace + ?Sized> Trace for Gc<T> {
impl<T: Trace + ?Sized> Clone for Gc<T> {
fn clone(&self) -> Self {
let ptr = self.inner_ptr();
// SAFETY: since a `Gc` is always valid, its `inner_ptr` must also be always a valid pointer.
unsafe {
ptr.as_ref().root();
}
self.inner().inc_ref_count();
// SAFETY: though `ptr` doesn't come from a `into_raw` call, it essentially does the same,
// but it skips the call to `std::mem::forget` since we have a reference instead of an owned
// value.
@ -163,9 +132,8 @@ impl<T: Trace + ?Sized> Deref for Gc<T> {
impl<T: Trace + ?Sized> Drop for Gc<T> {
fn drop(&mut self) {
// If this pointer was a root, we should unroot it.
if self.is_rooted() {
self.inner().unroot();
if finalizer_safe() {
Finalize::finalize(self);
}
}
}

1
boa_gc/src/pointers/mod.rs

@ -2,7 +2,6 @@
mod ephemeron;
mod gc;
mod rootable;
mod weak;
mod weak_map;

87
boa_gc/src/pointers/rootable.rs

@ -1,87 +0,0 @@
use std::ptr::{self, addr_of_mut, NonNull};
/// A [`NonNull`] pointer with a `rooted` tag.
///
/// This pointer can be created only from pointers that are 2-byte aligned. In other words,
/// the pointer must point to an address that is a multiple of 2.
pub(crate) struct Rootable<T: ?Sized> {
ptr: NonNull<T>,
}
impl<T: ?Sized> Copy for Rootable<T> {}
impl<T: ?Sized> Clone for Rootable<T> {
fn clone(&self) -> Self {
Self { ptr: self.ptr }
}
}
impl<T: ?Sized> std::fmt::Debug for Rootable<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Rootable")
.field("ptr", &self.as_ptr())
.field("is_rooted", &self.is_rooted())
.finish()
}
}
impl<T: ?Sized> Rootable<T> {
/// Creates a new `Rootable` without checking if the [`NonNull`] is properly aligned.
///
/// # Safety
///
/// `ptr` must be 2-byte aligned.
pub(crate) const unsafe fn new_unchecked(ptr: NonNull<T>) -> Self {
Self { ptr }
}
/// Returns `true` if the pointer is rooted.
pub(crate) fn is_rooted(self) -> bool {
self.ptr.as_ptr().cast::<u8>() as usize & 1 != 0
}
/// Returns a pointer with the same address as `self` but rooted.
pub(crate) fn rooted(self) -> Self {
let ptr = self.ptr.as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr));
// SAFETY: ptr must be a non null value.
unsafe { Self::new_unchecked(NonNull::new_unchecked(ptr)) }
}
/// Returns a pointer with the same address as `self` but unrooted.
pub(crate) fn unrooted(self) -> Self {
let ptr = self.ptr.as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr));
// SAFETY: ptr must be a non null value
unsafe { Self::new_unchecked(NonNull::new_unchecked(ptr)) }
}
/// Acquires the underlying `NonNull` pointer.
pub(crate) fn as_ptr(self) -> NonNull<T> {
self.unrooted().ptr
}
/// Returns a shared reference to the pointee.
///
/// # Safety
///
/// See [`NonNull::as_ref`].
pub(crate) unsafe fn as_ref(&self) -> &T {
// SAFETY: it is the caller's job to ensure the safety of this operation.
unsafe { self.as_ptr().as_ref() }
}
}
// Technically, this function is safe, since we're just modifying the address of a pointer without
// dereferencing it.
fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
// SAFETY: this should be safe as ptr must be a valid nonnull
unsafe {
ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::<u8>());
}
ptr
}

3
boa_gc/src/pointers/weak.rs

@ -13,6 +13,7 @@ pub struct WeakGc<T: Trace + ?Sized + 'static> {
impl<T: Trace> WeakGc<T> {
/// Creates a new weak pointer for a garbage collected value.
#[must_use]
pub fn new(value: &Gc<T>) -> Self {
Self {
inner: Ephemeron::new(value, value.clone()),
@ -20,11 +21,13 @@ impl<T: Trace> WeakGc<T> {
}
/// Upgrade returns a `Gc` pointer for the internal value if valid, or None if the value was already garbage collected.
#[must_use]
pub fn upgrade(&self) -> Option<Gc<T>> {
self.inner.value()
}
/// Check if the [`WeakGc`] can be upgraded.
#[must_use]
pub fn is_upgradable(&self) -> bool {
self.inner.has_value()
}

2
boa_gc/src/pointers/weak_map.rs

@ -28,12 +28,14 @@ impl<K: Trace, V: Trace + Clone> WeakMap<K, V> {
}
/// Returns `true` if the map contains a value for the specified key.
#[must_use]
#[inline]
pub fn contains_key(&self, key: &Gc<K>) -> bool {
self.inner.borrow().contains_key(&WeakGc::new(key))
}
/// Returns a reference to the value corresponding to the key.
#[must_use]
#[inline]
pub fn get(&self, key: &Gc<K>) -> Option<V> {
self.inner.borrow().get(&WeakGc::new(key)).cloned()

4
boa_gc/src/test/weak.rs

@ -164,7 +164,7 @@ fn eph_self_referential() {
*root.inner.inner.borrow_mut() = Some(eph.clone());
assert!(eph.value().is_some());
Harness::assert_exact_bytes_allocated(80);
Harness::assert_exact_bytes_allocated(72);
}
*root.inner.inner.borrow_mut() = None;
@ -210,7 +210,7 @@ fn eph_self_referential_chain() {
assert!(eph_start.value().is_some());
assert!(eph_chain2.value().is_some());
Harness::assert_exact_bytes_allocated(240);
Harness::assert_exact_bytes_allocated(216);
}
*root.borrow_mut() = None;

37
boa_gc/src/trace.rs

@ -39,19 +39,8 @@ pub unsafe trait Trace: Finalize {
/// See [`Trace`].
unsafe fn trace(&self);
/// Increments the root-count of all contained `Gc`s.
///
/// # Safety
///
/// See [`Trace`].
unsafe fn root(&self);
/// Decrements the root-count of all contained `Gc`s.
///
/// # Safety
///
/// See [`Trace`].
unsafe fn unroot(&self);
/// Trace handles located in GC heap, and mark them as non root.
fn trace_non_roots(&self);
/// Runs [`Finalize::finalize`] on this object and all
/// contained subobjects.
@ -67,9 +56,7 @@ macro_rules! empty_trace {
#[inline]
unsafe fn trace(&self) {}
#[inline]
unsafe fn root(&self) {}
#[inline]
unsafe fn unroot(&self) {}
fn trace_non_roots(&self) {}
#[inline]
fn run_finalizer(&self) {
$crate::Finalize::finalize(self)
@ -101,23 +88,9 @@ macro_rules! custom_trace {
$body
}
#[inline]
unsafe fn root(&self) {
fn trace_non_roots(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `root` is correctly implemented.
unsafe {
$crate::Trace::root(it);
}
}
let $this = self;
$body
}
#[inline]
unsafe fn unroot(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `unroot` is correctly implemented.
unsafe {
$crate::Trace::unroot(it);
}
$crate::Trace::trace_non_roots(it);
}
let $this = self;
$body

14
boa_macros/src/lib.rs

@ -251,21 +251,11 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
match *self { #trace_body }
}
#[inline]
unsafe fn root(&self) {
fn trace_non_roots(&self) {
#[allow(dead_code)]
fn mark<T: ::boa_gc::Trace + ?Sized>(it: &T) {
unsafe {
::boa_gc::Trace::root(it);
}
}
match *self { #trace_body }
}
#[inline]
unsafe fn unroot(&self) {
#[allow(dead_code)]
fn mark<T: ::boa_gc::Trace + ?Sized>(it: &T) {
unsafe {
::boa_gc::Trace::unroot(it);
::boa_gc::Trace::trace_non_roots(it);
}
}
match *self { #trace_body }

Loading…
Cancel
Save