Browse Source

Non recursive gc trace (#3508)

* Implement non-recursive GC tracing

* Mark `Trace::trace_non_roots()` as unsafe

* Apply review
pull/3511/head
Haled Odat 12 months ago committed by GitHub
parent
commit
f923830031
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      core/engine/src/builtins/function/mod.rs
  2. 2
      core/engine/src/builtins/generator/mod.rs
  3. 2
      core/engine/src/builtins/intl/collator/mod.rs
  4. 2
      core/engine/src/builtins/map/ordered_map.rs
  5. 6
      core/engine/src/builtins/promise/mod.rs
  6. 2
      core/engine/src/builtins/set/ordered_set.rs
  7. 2
      core/engine/src/environments/runtime/declarative/function.rs
  8. 6
      core/engine/src/error.rs
  9. 4
      core/engine/src/module/source.rs
  10. 4
      core/engine/src/native_function.rs
  11. 14
      core/engine/src/object/mod.rs
  12. 2
      core/engine/src/object/property_map.rs
  13. 2
      core/engine/src/value/mod.rs
  14. 2
      core/engine/src/vm/completion_record.rs
  15. 2
      core/engine/src/vm/mod.rs
  16. 14
      core/engine/src/vm/tests.rs
  17. 11
      core/gc/src/cell.rs
  18. 106
      core/gc/src/internals/ephemeron_box.rs
  19. 145
      core/gc/src/internals/gc_box.rs
  20. 104
      core/gc/src/internals/gc_header.rs
  21. 4
      core/gc/src/internals/mod.rs
  22. 92
      core/gc/src/internals/vtable.rs
  23. 8
      core/gc/src/internals/weak_map_box.rs
  24. 126
      core/gc/src/lib.rs
  25. 6
      core/gc/src/pointers/ephemeron.rs
  26. 48
      core/gc/src/pointers/gc.rs
  27. 1
      core/gc/src/pointers/mod.rs
  28. 4
      core/gc/src/pointers/weak_map.rs
  29. 35
      core/gc/src/test/allocation.rs
  30. 4
      core/gc/src/test/weak.rs
  31. 124
      core/gc/src/trace.rs
  32. 62
      core/macros/src/lib.rs

2
core/engine/src/builtins/function/mod.rs

@ -142,7 +142,7 @@ pub enum ClassFieldDefinition {
}
unsafe impl Trace for ClassFieldDefinition {
custom_trace! {this, {
custom_trace! {this, mark, {
match this {
Self::Public(_key, func) => {
mark(func);

2
core/engine/src/builtins/generator/mod.rs

@ -45,7 +45,7 @@ pub(crate) enum GeneratorState {
// Need to manually implement, since `Trace` adds a `Drop` impl which disallows destructuring.
unsafe impl Trace for GeneratorState {
custom_trace!(this, {
custom_trace!(this, mark, {
match &this {
Self::SuspendedStart { context } | Self::SuspendedYield { context } => mark(context),
Self::Executing | Self::Completed => {}

2
core/engine/src/builtins/intl/collator/mod.rs

@ -57,7 +57,7 @@ pub(crate) struct Collator {
// SAFETY: only `bound_compare` is a traceable object.
unsafe impl Trace for Collator {
custom_trace!(this, mark(&this.bound_compare));
custom_trace!(this, mark, mark(&this.bound_compare));
}
impl Collator {

2
core/engine/src/builtins/map/ordered_map.rs

@ -42,7 +42,7 @@ pub struct OrderedMap<V> {
}
unsafe impl<V: Trace> Trace for OrderedMap<V> {
custom_trace!(this, {
custom_trace!(this, mark, {
for (k, v) in &this.map {
if let MapKey::Key(key) = k {
mark(key);

6
core/engine/src/builtins/promise/mod.rs

@ -42,7 +42,7 @@ pub enum PromiseState {
}
unsafe impl Trace for PromiseState {
custom_trace!(this, {
custom_trace!(this, mark, {
match this {
Self::Fulfilled(v) | Self::Rejected(v) => mark(v),
Self::Pending => {}
@ -121,7 +121,7 @@ pub struct ResolvingFunctions {
// Manually implementing `Trace` to allow destructuring.
unsafe impl Trace for ResolvingFunctions {
custom_trace!(this, {
custom_trace!(this, mark, {
mark(&this.resolve);
mark(&this.reject);
});
@ -176,7 +176,7 @@ pub(crate) struct PromiseCapability {
// SAFETY: manually implementing `Trace` to allow destructuring.
unsafe impl Trace for PromiseCapability {
custom_trace!(this, {
custom_trace!(this, mark, {
mark(&this.promise);
mark(&this.functions);
});

2
core/engine/src/builtins/set/ordered_set.rs

@ -14,7 +14,7 @@ pub struct OrderedSet {
}
unsafe impl Trace for OrderedSet {
custom_trace!(this, {
custom_trace!(this, mark, {
for v in &this.inner {
if let MapKey::Key(v) = v {
mark(v);

2
core/engine/src/environments/runtime/declarative/function.rs

@ -161,7 +161,7 @@ pub(crate) enum ThisBindingStatus {
}
unsafe impl Trace for ThisBindingStatus {
custom_trace!(this, {
custom_trace!(this, mark, {
match this {
Self::Initialized(obj) => mark(obj),
Self::Lexical | Self::Uninitialized => {}

6
core/engine/src/error.rs

@ -53,7 +53,7 @@ pub struct JsError {
// SAFETY: just mirroring the default derive to allow destructuring.
unsafe impl Trace for JsError {
custom_trace!(this, mark(&this.inner));
custom_trace!(this, mark, mark(&this.inner));
}
/// Internal representation of a [`JsError`].
@ -76,6 +76,7 @@ enum Repr {
unsafe impl Trace for Repr {
custom_trace!(
this,
mark,
match &this {
Self::Native(err) => mark(err),
Self::Opaque(val) => mark(val),
@ -549,7 +550,7 @@ impl fmt::Display for JsNativeError {
// SAFETY: just mirroring the default derive to allow destructuring.
unsafe impl Trace for JsNativeError {
custom_trace!(this, {
custom_trace!(this, mark, {
mark(&this.kind);
mark(&this.cause);
mark(&this.realm);
@ -1108,6 +1109,7 @@ pub enum JsNativeErrorKind {
unsafe impl Trace for JsNativeErrorKind {
custom_trace!(
this,
mark,
match &this {
Self::Aggregate(errors) => mark(errors),
Self::Error

4
core/engine/src/module/source.rs

@ -104,7 +104,7 @@ enum Status {
// useful to have for state machines like `Status`. This is solved by manually implementing
// `Trace`.
unsafe impl Trace for Status {
custom_trace!(this, {
custom_trace!(this, mark, {
match this {
Self::Unlinked | Self::Linking { info: _ } => {}
Self::PreLinked { context, info: _ } | Self::Linked { context, info: _ } => {
@ -239,7 +239,7 @@ impl std::fmt::Debug for SourceTextContext {
}
unsafe impl Trace for SourceTextContext {
custom_trace!(this, {
custom_trace!(this, mark, {
mark(&this.codeblock);
mark(&this.environments);
mark(&this.realm);

4
core/engine/src/native_function.rs

@ -71,7 +71,7 @@ pub struct NativeFunctionObject {
// SAFETY: this traces all fields that need to be traced by the GC.
unsafe impl Trace for NativeFunctionObject {
custom_trace!(this, {
custom_trace!(this, mark, {
mark(&this.f);
mark(&this.realm);
});
@ -125,7 +125,7 @@ enum Inner {
// Manual implementation because deriving `Trace` triggers the `single_use_lifetimes` lint.
// SAFETY: Only closures can contain `Trace` captures, so this implementation is safe.
unsafe impl Trace for NativeFunction {
custom_trace!(this, {
custom_trace!(this, mark, {
if let Inner::Closure(c) = &this.inner {
mark(c);
}

14
core/engine/src/object/mod.rs

@ -178,7 +178,9 @@ impl dyn NativeObject {
}
/// The internal representation of a JavaScript object.
#[derive(Debug, Finalize)]
#[derive(Debug, Finalize, Trace)]
// SAFETY: This does not implement drop, so this is safe.
#[boa_gc(unsafe_no_drop)]
pub struct Object<T: ?Sized> {
/// The collection of properties contained in the object
pub(crate) properties: PropertyMap,
@ -201,16 +203,6 @@ impl<T: Default> Default for Object<T> {
}
}
unsafe impl<T: Trace + ?Sized> Trace for Object<T> {
boa_gc::custom_trace!(this, {
mark(&this.data);
mark(&this.properties);
for (_, element) in &this.private_elements {
mark(element);
}
});
}
/// A Private Name.
#[derive(Clone, Debug, PartialEq, Eq, Trace, Finalize)]
pub struct PrivateName {

2
core/engine/src/object/property_map.rs

@ -25,7 +25,7 @@ impl<K: Trace> Default for OrderedHashMap<K> {
}
unsafe impl<K: Trace> Trace for OrderedHashMap<K> {
custom_trace!(this, {
custom_trace!(this, mark, {
for (k, v) in &this.0 {
mark(k);
mark(v);

2
core/engine/src/value/mod.rs

@ -81,7 +81,7 @@ pub enum JsValue {
}
unsafe impl Trace for JsValue {
custom_trace! {this, {
custom_trace! {this, mark, {
if let Self::Object(o) = this {
mark(o);
}

2
core/engine/src/vm/completion_record.rs

@ -18,7 +18,7 @@ pub(crate) enum CompletionRecord {
// SAFETY: this matches all possible variants and traces
// their inner contents, which makes this safe.
unsafe impl Trace for CompletionRecord {
custom_trace!(this, {
custom_trace!(this, mark, {
match this {
Self::Normal(v) => mark(v),
Self::Return(r) => mark(r),

2
core/engine/src/vm/mod.rs

@ -86,7 +86,7 @@ pub(crate) enum ActiveRunnable {
}
unsafe impl Trace for ActiveRunnable {
custom_trace!(this, {
custom_trace!(this, mark, {
match this {
Self::Script(script) => mark(script),
Self::Module(module) => mark(module),

14
core/engine/src/vm/tests.rs

@ -417,3 +417,17 @@ fn cross_context_funtion_call() {
assert_eq!(result, Ok(JsValue::new(100)));
}
// See: https://github.com/boa-dev/boa/issues/1848
#[test]
fn long_object_chain_gc_trace_stack_overflow() {
run_test_actions([
TestAction::run(indoc! {r#"
let old = {};
for (let i = 0; i < 100000; i++) {
old = { old };
}
"#}),
TestAction::inspect_context(|_| boa_gc::force_collect()),
]);
}

11
core/gc/src/cell.rs

@ -1,6 +1,9 @@
//! A garbage collected cell implementation
use crate::trace::{Finalize, Trace};
use crate::{
trace::{Finalize, Trace},
Tracer,
};
use std::{
cell::{Cell, UnsafeCell},
cmp::Ordering,
@ -218,15 +221,15 @@ impl<T: Trace + ?Sized> Finalize for GcRefCell<T> {}
// Implementing a Trace while the cell is being written to or incorrectly implementing Trace
// on GcCell's value may cause Undefined Behavior
unsafe impl<T: Trace + ?Sized> Trace for GcRefCell<T> {
unsafe fn trace(&self) {
unsafe fn trace(&self, tracer: &mut Tracer) {
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).trace() },
_ => unsafe { (*self.cell.get()).trace(tracer) },
}
}
fn trace_non_roots(&self) {
unsafe fn trace_non_roots(&self) {
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.

106
core/gc/src/internals/ephemeron_box.rs

@ -1,94 +1,14 @@
use crate::{trace::Trace, Gc, GcBox};
use crate::{trace::Trace, Gc, GcBox, Tracer};
use std::{
cell::{Cell, UnsafeCell},
cell::UnsafeCell,
ptr::{self, NonNull},
};
const MARK_MASK: u32 = 1 << (u32::BITS - 1);
const NON_ROOTS_MASK: u32 = !MARK_MASK;
const NON_ROOTS_MAX: u32 = NON_ROOTS_MASK;
/// The `EphemeronBoxHeader` contains the `EphemeronBoxHeader`'s current state for the `Collector`'s
/// Mark/Sweep as well as a pointer to the next ephemeron in the heap.
///
/// `ref_count` is the number of Gc instances, and `non_root_count` is the number of
/// Gc instances in the heap. `non_root_count` also includes Mark Flag bit.
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
pub(crate) struct EphemeronBoxHeader {
ref_count: Cell<u32>,
non_root_count: Cell<u32>,
}
impl EphemeronBoxHeader {
/// Creates a new `EphemeronBoxHeader` with a root of 1 and next set to None.
pub(crate) fn new() -> Self {
Self {
ref_count: Cell::new(1),
non_root_count: Cell::new(0),
}
}
/// Returns the `EphemeronBoxHeader`'s current ref count
pub(crate) fn get_ref_count(&self) -> u32 {
self.ref_count.get()
}
/// Returns a count for non-roots.
pub(crate) fn get_non_root_count(&self) -> u32 {
self.non_root_count.get() & NON_ROOTS_MASK
}
/// Increments `EphemeronBoxHeader`'s non-roots count.
pub(crate) fn inc_non_root_count(&self) {
let non_root_count = self.non_root_count.get();
if (non_root_count & NON_ROOTS_MASK) < NON_ROOTS_MAX {
self.non_root_count.set(non_root_count.wrapping_add(1));
} else {
// TODO: implement a better way to handle root overload.
panic!("non roots counter overflow");
}
}
/// Reset non-roots count to zero.
pub(crate) fn reset_non_root_count(&self) {
self.non_root_count
.set(self.non_root_count.get() & !NON_ROOTS_MASK);
}
/// Returns a bool for whether `GcBoxHeader`'s mark bit is 1.
pub(crate) fn is_marked(&self) -> bool {
self.non_root_count.get() & MARK_MASK != 0
}
/// Sets `GcBoxHeader`'s mark bit to 1.
pub(crate) fn mark(&self) {
self.non_root_count
.set(self.non_root_count.get() | MARK_MASK);
}
/// Sets `GcBoxHeader`'s mark bit to 0.
pub(crate) fn unmark(&self) {
self.non_root_count
.set(self.non_root_count.get() & !MARK_MASK);
}
}
impl core::fmt::Debug for EphemeronBoxHeader {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("EphemeronBoxHeader")
.field("marked", &self.is_marked())
.field("ref_count", &self.get_ref_count())
.field("non_root_count", &self.get_non_root_count())
.finish()
}
}
use super::GcHeader;
/// The inner allocation of an [`Ephemeron`][crate::Ephemeron] pointer.
pub(crate) struct EphemeronBox<K: Trace + ?Sized + 'static, V: Trace + 'static> {
pub(crate) header: EphemeronBoxHeader,
pub(crate) header: GcHeader,
data: UnsafeCell<Option<Data<K, V>>>,
}
@ -101,7 +21,7 @@ impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
/// Creates a new `EphemeronBox` that tracks `key` and has `value` as its inner data.
pub(crate) fn new(key: &Gc<K>, value: V) -> Self {
Self {
header: EphemeronBoxHeader::new(),
header: GcHeader::new(),
data: UnsafeCell::new(Some(Data {
key: key.inner_ptr(),
value,
@ -112,7 +32,7 @@ impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
/// Creates a new `EphemeronBox` with its inner data in the invalidated state.
pub(crate) fn new_empty() -> Self {
Self {
header: EphemeronBoxHeader::new(),
header: GcHeader::new(),
data: UnsafeCell::new(None),
}
}
@ -190,12 +110,12 @@ impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
#[inline]
pub(crate) fn inc_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() + 1);
self.header.inc_ref_count();
}
#[inline]
pub(crate) fn dec_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() - 1);
self.header.dec_ref_count();
}
#[inline]
@ -206,13 +126,13 @@ impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
pub(crate) trait ErasedEphemeronBox {
/// Gets the header of the `EphemeronBox`.
fn header(&self) -> &EphemeronBoxHeader;
fn header(&self) -> &GcHeader;
/// Traces through the `EphemeronBox`'s held value, but only if it's marked and its key is also
/// marked. Returns `true` if the ephemeron successfuly traced through its value. This also
/// considers ephemerons that are marked but don't have their value anymore as
/// "successfully traced".
unsafe fn trace(&self) -> bool;
unsafe fn trace(&self, tracer: &mut Tracer) -> bool;
fn trace_non_roots(&self);
@ -222,11 +142,11 @@ pub(crate) trait ErasedEphemeronBox {
}
impl<K: Trace + ?Sized, V: Trace> ErasedEphemeronBox for EphemeronBox<K, V> {
fn header(&self) -> &EphemeronBoxHeader {
fn header(&self) -> &GcHeader {
&self.header
}
unsafe fn trace(&self) -> bool {
unsafe fn trace(&self, tracer: &mut Tracer) -> bool {
if !self.header.is_marked() {
return false;
}
@ -247,7 +167,7 @@ impl<K: Trace + ?Sized, V: Trace> ErasedEphemeronBox for EphemeronBox<K, V> {
if is_key_marked {
// SAFETY: this is safe to call, since we want to trace all reachable objects
// from a marked ephemeron that holds a live `key`.
unsafe { data.value.trace() }
unsafe { data.value.trace(tracer) }
}
is_key_marked

145
core/gc/src/internals/gc_box.rs

@ -1,95 +1,24 @@
use crate::Trace;
use std::{cell::Cell, fmt, ptr};
const MARK_MASK: u32 = 1 << (u32::BITS - 1);
const NON_ROOTS_MASK: u32 = !MARK_MASK;
const NON_ROOTS_MAX: u32 = NON_ROOTS_MASK;
/// The `GcBoxheader` contains the `GcBox`'s current state for the `Collector`'s
/// Mark/Sweep as well as a pointer to the next node in the heap.
///
/// `ref_count` is the number of Gc instances, and `non_root_count` is the number of
/// Gc instances in the heap. `non_root_count` also includes Mark Flag bit.
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
pub(crate) struct GcBoxHeader {
ref_count: Cell<u32>,
non_root_count: Cell<u32>,
}
impl GcBoxHeader {
/// Creates a new `GcBoxHeader` with a root of 1 and next set to None.
pub(crate) fn new() -> Self {
Self {
ref_count: Cell::new(1),
non_root_count: Cell::new(0),
}
}
/// Returns the `GcBoxHeader`'s current non-roots count
pub(crate) fn get_non_root_count(&self) -> u32 {
self.non_root_count.get() & NON_ROOTS_MASK
}
/// Increments `GcBoxHeader`'s non-roots count.
pub(crate) fn inc_non_root_count(&self) {
let non_root_count = self.non_root_count.get();
if (non_root_count & NON_ROOTS_MASK) < NON_ROOTS_MAX {
self.non_root_count.set(non_root_count.wrapping_add(1));
} else {
// TODO: implement a better way to handle root overload.
panic!("non-roots counter overflow");
}
}
/// Decreases `GcBoxHeader`'s current non-roots count.
pub(crate) fn reset_non_root_count(&self) {
self.non_root_count
.set(self.non_root_count.get() & !NON_ROOTS_MASK);
}
/// Returns a bool for whether `GcBoxHeader`'s mark bit is 1.
pub(crate) fn is_marked(&self) -> bool {
self.non_root_count.get() & MARK_MASK != 0
}
/// Sets `GcBoxHeader`'s mark bit to 1.
pub(crate) fn mark(&self) {
self.non_root_count
.set(self.non_root_count.get() | MARK_MASK);
}
/// Sets `GcBoxHeader`'s mark bit to 0.
pub(crate) fn unmark(&self) {
self.non_root_count
.set(self.non_root_count.get() & !MARK_MASK);
}
}
use std::ptr::{self};
impl fmt::Debug for GcBoxHeader {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GcBoxHeader")
.field("marked", &self.is_marked())
.field("ref_count", &self.ref_count.get())
.field("non_root_count", &self.get_non_root_count())
.finish_non_exhaustive()
}
}
use super::{vtable_of, DropFn, GcHeader, RunFinalizerFn, TraceFn, TraceNonRootsFn, VTable};
/// A garbage collected allocation.
#[derive(Debug)]
#[repr(C)]
pub struct GcBox<T: Trace + ?Sized + 'static> {
pub(crate) header: GcBoxHeader,
pub(crate) header: GcHeader,
pub(crate) vtable: &'static VTable,
value: T,
}
impl<T: Trace> GcBox<T> {
/// Returns a new `GcBox` with a rooted `GcBoxHeader`.
pub(crate) fn new(value: T) -> Self {
let vtable = vtable_of::<T>();
Self {
header: GcBoxHeader::new(),
header: GcHeader::new(),
vtable,
value,
}
}
@ -103,20 +32,6 @@ impl<T: Trace + ?Sized> GcBox<T> {
ptr::eq(&this.header, &other.header)
}
/// Marks this `GcBox` and traces its value.
pub(crate) unsafe fn mark_and_trace(&self) {
if !self.header.is_marked() {
self.header.mark();
// SAFETY: if `GcBox::trace_inner()` has been called, then,
// this box must have been deemed as reachable via tracing
// from a root, which by extension means that value has not
// been dropped either.
unsafe {
self.value.trace();
}
}
}
/// Returns a reference to the `GcBox`'s value.
pub(crate) const fn value(&self) -> &T {
&self.value
@ -127,24 +42,14 @@ impl<T: Trace + ?Sized> GcBox<T> {
self.header.is_marked()
}
#[inline]
pub(crate) fn get_ref_count(&self) -> u32 {
self.header.ref_count.get()
}
#[inline]
pub(crate) fn inc_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() + 1);
self.header.inc_ref_count();
}
#[inline]
pub(crate) fn dec_ref_count(&self) {
self.header.ref_count.set(self.header.ref_count.get() - 1);
}
#[inline]
pub(crate) fn get_non_root_count(&self) -> u32 {
self.header.get_non_root_count()
self.header.dec_ref_count();
}
#[inline]
@ -155,4 +60,34 @@ impl<T: Trace + ?Sized> GcBox<T> {
pub(crate) fn reset_non_root_count(&self) {
self.header.reset_non_root_count();
}
/// Check if the gc object is rooted.
///
/// # Note
///
/// This only gives valid result if the we have run through the
/// tracing non roots phase.
pub(crate) fn is_rooted(&self) -> bool {
self.header.is_rooted()
}
pub(crate) fn trace_fn(&self) -> TraceFn {
self.vtable.trace_fn()
}
pub(crate) fn trace_non_roots_fn(&self) -> TraceNonRootsFn {
self.vtable.trace_non_roots_fn()
}
pub(crate) fn run_finalizer_fn(&self) -> RunFinalizerFn {
self.vtable.run_finalizer_fn()
}
pub(crate) fn drop_fn(&self) -> DropFn {
self.vtable.drop_fn()
}
pub(crate) fn size(&self) -> usize {
self.vtable.size()
}
}

104
core/gc/src/internals/gc_header.rs

@ -0,0 +1,104 @@
use std::{cell::Cell, fmt};
const MARK_MASK: u32 = 1 << (u32::BITS - 1);
const NON_ROOTS_MASK: u32 = !MARK_MASK;
const NON_ROOTS_MAX: u32 = NON_ROOTS_MASK;
/// The `Gcheader` contains the `GcBox`'s and `EphemeronBox`'s current state for the `Collector`'s
/// Mark/Sweep as well as a pointer to the next node in the heap.
///
/// `ref_count` is the number of Gc instances, and `non_root_count` is the number of
/// Gc instances in the heap. `non_root_count` also includes Mark Flag bit.
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
pub(crate) struct GcHeader {
ref_count: Cell<u32>,
non_root_count: Cell<u32>,
}
impl GcHeader {
/// Creates a new [`GcHeader`] with a root of 1 and next set to None.
pub(crate) fn new() -> Self {
Self {
ref_count: Cell::new(1),
non_root_count: Cell::new(0),
}
}
/// Returns the [`GcHeader`]'s current ref count.
pub(crate) fn ref_count(&self) -> u32 {
self.ref_count.get()
}
/// Returns the [`GcHeader`]'s current non-roots count
pub(crate) fn non_root_count(&self) -> u32 {
self.non_root_count.get() & NON_ROOTS_MASK
}
/// Increments [`GcHeader`]'s non-roots count.
pub(crate) fn inc_non_root_count(&self) {
let non_root_count = self.non_root_count.get();
if (non_root_count & NON_ROOTS_MASK) < NON_ROOTS_MAX {
self.non_root_count.set(non_root_count.wrapping_add(1));
} else {
// TODO: implement a better way to handle root overload.
panic!("non-roots counter overflow");
}
}
/// Decreases [`GcHeader`]'s current non-roots count.
pub(crate) fn reset_non_root_count(&self) {
self.non_root_count
.set(self.non_root_count.get() & !NON_ROOTS_MASK);
}
/// Returns a bool for whether [`GcHeader`]'s mark bit is 1.
pub(crate) fn is_marked(&self) -> bool {
self.non_root_count.get() & MARK_MASK != 0
}
#[inline]
pub(crate) fn inc_ref_count(&self) {
self.ref_count.set(self.ref_count.get() + 1);
}
#[inline]
pub(crate) fn dec_ref_count(&self) {
self.ref_count.set(self.ref_count.get() - 1);
}
/// Check if the gc object is rooted.
///
/// # Note
///
/// This only gives valid result if the we have run through the
/// tracing non roots phase.
#[inline]
pub(crate) fn is_rooted(&self) -> bool {
self.non_root_count() < self.ref_count()
}
/// Sets [`GcHeader`]'s mark bit to 1.
pub(crate) fn mark(&self) {
self.non_root_count
.set(self.non_root_count.get() | MARK_MASK);
}
/// Sets [`GcHeader`]'s mark bit to 0.
pub(crate) fn unmark(&self) {
self.non_root_count
.set(self.non_root_count.get() & !MARK_MASK);
}
}
impl fmt::Debug for GcHeader {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GcHeader")
.field("marked", &self.is_marked())
.field("ref_count", &self.ref_count.get())
.field("non_root_count", &self.non_root_count())
.finish_non_exhaustive()
}
}

4
core/gc/src/internals/mod.rs

@ -1,8 +1,12 @@
mod ephemeron_box;
mod gc_box;
mod gc_header;
mod vtable;
mod weak_map_box;
pub(crate) use self::ephemeron_box::{EphemeronBox, ErasedEphemeronBox};
pub(crate) use self::gc_header::GcHeader;
pub(crate) use self::weak_map_box::{ErasedWeakMapBox, WeakMapBox};
pub(crate) use vtable::{vtable_of, DropFn, RunFinalizerFn, TraceFn, TraceNonRootsFn, VTable};
pub use self::gc_box::GcBox;

92
core/gc/src/internals/vtable.rs

@ -0,0 +1,92 @@
use crate::{GcBox, GcErasedPointer, Trace, Tracer};
// Workaround: https://users.rust-lang.org/t/custom-vtables-with-integers/78508
pub(crate) const fn vtable_of<T: Trace + 'static>() -> &'static VTable {
trait HasVTable: Trace + Sized + 'static {
const VTABLE: &'static VTable;
unsafe fn trace_fn(this: GcErasedPointer, tracer: &mut Tracer) {
// SAFETY: The caller must ensure that the passed erased pointer is `GcBox<Self>`.
let value = unsafe { this.cast::<GcBox<Self>>().as_ref().value() };
// SAFETY: The implementor must ensure that `trace` is correctly implemented.
unsafe {
Trace::trace(value, tracer);
}
}
unsafe fn trace_non_roots_fn(this: GcErasedPointer) {
// SAFETY: The caller must ensure that the passed erased pointer is `GcBox<Self>`.
let value = unsafe { this.cast::<GcBox<Self>>().as_ref().value() };
// SAFETY: The implementor must ensure that `trace_non_roots` is correctly implemented.
unsafe {
Self::trace_non_roots(value);
}
}
unsafe fn run_finalizer_fn(this: GcErasedPointer) {
// SAFETY: The caller must ensure that the passed erased pointer is `GcBox<Self>`.
let value = unsafe { this.cast::<GcBox<Self>>().as_ref().value() };
Self::run_finalizer(value);
}
// SAFETY: The caller must ensure that the passed erased pointer is `GcBox<Self>`.
unsafe fn drop_fn(this: GcErasedPointer) {
// SAFETY: The caller must ensure that the passed erased pointer is `GcBox<Self>`.
let this = this.cast::<GcBox<Self>>();
// SAFETY: The caller must ensure the erased pointer is not droped or deallocated.
let _value = unsafe { Box::from_raw(this.as_ptr()) };
}
}
impl<T: Trace + 'static> HasVTable for T {
const VTABLE: &'static VTable = &VTable {
trace_fn: T::trace_fn,
trace_non_roots_fn: T::trace_non_roots_fn,
run_finalizer_fn: T::run_finalizer_fn,
drop_fn: T::drop_fn,
size: std::mem::size_of::<GcBox<T>>(),
};
}
T::VTABLE
}
pub(crate) type TraceFn = unsafe fn(this: GcErasedPointer, tracer: &mut Tracer);
pub(crate) type TraceNonRootsFn = unsafe fn(this: GcErasedPointer);
pub(crate) type RunFinalizerFn = unsafe fn(this: GcErasedPointer);
pub(crate) type DropFn = unsafe fn(this: GcErasedPointer);
#[derive(Debug)]
pub(crate) struct VTable {
trace_fn: TraceFn,
trace_non_roots_fn: TraceNonRootsFn,
run_finalizer_fn: RunFinalizerFn,
drop_fn: DropFn,
size: usize,
}
impl VTable {
pub(crate) fn trace_fn(&self) -> TraceFn {
self.trace_fn
}
pub(crate) fn trace_non_roots_fn(&self) -> TraceNonRootsFn {
self.trace_non_roots_fn
}
pub(crate) fn run_finalizer_fn(&self) -> RunFinalizerFn {
self.run_finalizer_fn
}
pub(crate) fn drop_fn(&self) -> DropFn {
self.drop_fn
}
pub(crate) fn size(&self) -> usize {
self.size
}
}

8
core/gc/src/internals/weak_map_box.rs

@ -1,4 +1,4 @@
use crate::{pointers::RawWeakMap, GcRefCell, Trace, WeakGc};
use crate::{pointers::RawWeakMap, GcRefCell, Trace, Tracer, WeakGc};
/// A box that is used to track [`WeakMap`][`crate::WeakMap`]s.
pub(crate) struct WeakMapBox<K: Trace + ?Sized + 'static, V: Trace + Sized + 'static> {
@ -14,7 +14,7 @@ pub(crate) trait ErasedWeakMapBox {
fn is_live(&self) -> bool;
/// Traces the weak reference inside of the [`WeakMapBox`] if the weak map is live.
unsafe fn trace(&self);
unsafe fn trace(&self, tracer: &mut Tracer);
}
impl<K: Trace + ?Sized, V: Trace + Clone> ErasedWeakMapBox for WeakMapBox<K, V> {
@ -30,10 +30,10 @@ impl<K: Trace + ?Sized, V: Trace + Clone> ErasedWeakMapBox for WeakMapBox<K, V>
self.map.upgrade().is_some()
}
unsafe fn trace(&self) {
unsafe fn trace(&self, tracer: &mut Tracer) {
if self.map.upgrade().is_some() {
// SAFETY: When the weak map is live, the weak reference should be traced.
unsafe { self.map.trace() }
unsafe { self.map.trace(tracer) }
}
}
}

126
core/gc/src/lib.rs

@ -25,20 +25,20 @@ pub(crate) mod internals;
use boa_profiler::Profiler;
use internals::{EphemeronBox, ErasedEphemeronBox, ErasedWeakMapBox, WeakMapBox};
use pointers::RawWeakMap;
use pointers::{NonTraceable, RawWeakMap};
use std::{
cell::{Cell, RefCell},
mem,
ptr::NonNull,
};
pub use crate::trace::{Finalize, Trace};
pub use crate::trace::{Finalize, Trace, Tracer};
pub use boa_macros::{Finalize, Trace};
pub use cell::{GcRef, GcRefCell, GcRefMut};
pub use internals::GcBox;
pub use pointers::{Ephemeron, Gc, WeakGc, WeakMap};
type GcPointer = NonNull<GcBox<dyn Trace>>;
type GcErasedPointer = NonNull<GcBox<NonTraceable>>;
type EphemeronPointer = NonNull<dyn ErasedEphemeronBox>;
type ErasedWeakMapBoxPointer = NonNull<dyn ErasedWeakMapBox>;
@ -79,7 +79,7 @@ struct GcRuntimeData {
struct BoaGc {
config: GcConfig,
runtime: GcRuntimeData,
strongs: Vec<GcPointer>,
strongs: Vec<GcErasedPointer>,
weaks: Vec<EphemeronPointer>,
weak_maps: Vec<ErasedWeakMapBoxPointer>,
}
@ -135,7 +135,7 @@ impl Allocator {
Self::manage_state(&mut gc);
// Safety: value cannot be a null pointer, since `Box` cannot return null pointers.
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(value))) };
let erased: NonNull<GcBox<dyn Trace>> = ptr;
let erased: NonNull<GcBox<NonTraceable>> = ptr.cast();
gc.strongs.push(erased);
gc.runtime.bytes_allocated += element_size;
@ -202,7 +202,7 @@ impl Allocator {
}
struct Unreachables {
strong: Vec<NonNull<GcBox<dyn Trace>>>,
strong: Vec<GcErasedPointer>,
weak: Vec<NonNull<dyn ErasedEphemeronBox>>,
}
@ -228,7 +228,11 @@ impl Collector {
Self::trace_non_roots(gc);
let unreachables = Self::mark_heap(&gc.strongs, &gc.weaks, &gc.weak_maps);
let mut tracer = Tracer::new();
let unreachables = Self::mark_heap(&mut tracer, &gc.strongs, &gc.weaks, &gc.weak_maps);
assert!(tracer.is_empty(), "The queue should be empty");
// Only finalize if there are any unreachable nodes.
if !unreachables.strong.is_empty() || !unreachables.weak.is_empty() {
@ -236,7 +240,9 @@ impl Collector {
// SAFETY: All passed pointers are valid, since we won't deallocate until `Self::sweep`.
unsafe { Self::finalize(unreachables) };
let _final_unreachables = Self::mark_heap(&gc.strongs, &gc.weaks, &gc.weak_maps);
// Reuse the tracer's already allocated capacity.
let _final_unreachables =
Self::mark_heap(&mut tracer, &gc.strongs, &gc.weaks, &gc.weak_maps);
}
// SAFETY: The head of our linked list is always valid per the invariants of our GC.
@ -277,8 +283,12 @@ impl Collector {
// Then, we can find whether there is a reference from other places, and they are the roots.
for node in &gc.strongs {
// SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { node.as_ref() };
node_ref.value().trace_non_roots();
let trace_non_roots_fn = unsafe { node.as_ref() }.trace_non_roots_fn();
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
trace_non_roots_fn(*node);
}
}
for eph in &gc.weaks {
@ -290,7 +300,8 @@ impl Collector {
/// Walk the heap and mark any nodes deemed reachable
fn mark_heap(
strongs: &[GcPointer],
tracer: &mut Tracer,
strongs: &[GcErasedPointer],
weaks: &[EphemeronPointer],
weak_maps: &[ErasedWeakMapBoxPointer],
) -> Unreachables {
@ -306,10 +317,26 @@ impl Collector {
for node in strongs {
// SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { node.as_ref() };
if node_ref.get_non_root_count() < node_ref.get_ref_count() {
// SAFETY: the gc heap object should be alive if there is a root.
unsafe {
node_ref.mark_and_trace();
if node_ref.is_rooted() {
tracer.enqueue(*node);
while let Some(node) = tracer.next() {
// SAFETY: the gc heap object should be alive if there is a root.
let node_ref = unsafe { node.as_ref() };
if !node_ref.header.is_marked() {
node_ref.header.mark();
// SAFETY: if `GcBox::trace_inner()` has been called, then,
// this box must have been deemed as reachable via tracing
// from a root, which by extension means that value has not
// been dropped either.
let trace_fn = node_ref.trace_fn();
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe { trace_fn(node, tracer) }
}
}
} else if !node_ref.is_marked() {
strong_dead.push(*node);
@ -338,13 +365,23 @@ impl Collector {
// SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() };
let header = eph_ref.header();
if header.get_non_root_count() < header.get_ref_count() {
if header.is_rooted() {
header.mark();
}
// SAFETY: the garbage collector ensures `eph_ref` always points to valid data.
if unsafe { !eph_ref.trace() } {
if unsafe { !eph_ref.trace(tracer) } {
pending_ephemerons.push(*eph);
}
while let Some(node) = tracer.next() {
// SAFETY: node must be valid as this phase cannot drop any node.
let trace_fn = unsafe { node.as_ref() }.trace_fn();
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
trace_fn(node, tracer);
}
}
}
// 2. Trace all the weak pointers in the live weak maps to make sure they do not get swept.
@ -353,7 +390,17 @@ impl Collector {
let node_ref = unsafe { w.as_ref() };
// SAFETY: The garbage collector ensures that all nodes are valid.
unsafe { node_ref.trace() };
unsafe { node_ref.trace(tracer) };
while let Some(node) = tracer.next() {
// SAFETY: node must be valid as this phase cannot drop any node.
let trace_fn = unsafe { node.as_ref() }.trace_fn();
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
trace_fn(node, tracer);
}
}
}
// 3. Iterate through all pending ephemerons, removing the ones which have been successfully
@ -365,7 +412,19 @@ impl Collector {
// SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() };
// SAFETY: the garbage collector ensures `eph_ref` always points to valid data.
unsafe { !eph_ref.trace() }
let is_key_marked = unsafe { !eph_ref.trace(tracer) };
while let Some(node) = tracer.next() {
// SAFETY: node must be valid as this phase cannot drop any node.
let trace_fn = unsafe { node.as_ref() }.trace_fn();
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
trace_fn(node, tracer);
}
}
is_key_marked
});
if previous_len == pending_ephemerons.len() {
@ -396,8 +455,13 @@ impl Collector {
let _timer = Profiler::global().start_event("Gc Finalization", "gc");
for node in unreachables.strong {
// SAFETY: The caller must ensure all pointers inside `unreachables.strong` are valid.
let node = unsafe { node.as_ref() };
Trace::run_finalizer(node.value());
let node_ref = unsafe { node.as_ref() };
let run_finalizer_fn = node_ref.run_finalizer_fn();
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
run_finalizer_fn(node);
}
}
for node in unreachables.weak {
// SAFETY: The caller must ensure all pointers inside `unreachables.weak` are valid.
@ -413,7 +477,7 @@ impl Collector {
/// - Providing a list of pointers that weren't allocated by `Box::into_raw(Box::new(..))`
/// will result in Undefined Behaviour.
unsafe fn sweep(
strong: &mut Vec<GcPointer>,
strong: &mut Vec<GcErasedPointer>,
weak: &mut Vec<EphemeronPointer>,
total_allocated: &mut usize,
) {
@ -431,9 +495,14 @@ impl Collector {
} else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.
// The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
let unallocated_bytes = mem::size_of_val(&*unmarked_node);
*total_allocated -= unallocated_bytes;
let drop_fn = node_ref.drop_fn();
let size = node_ref.size();
*total_allocated -= size;
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
drop_fn(*node);
}
false
}
@ -478,7 +547,12 @@ impl Collector {
// SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`.
let _unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
let drop_fn = unsafe { node.as_ref() }.drop_fn();
// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
drop_fn(node);
}
}
for node in std::mem::take(&mut gc.weaks) {

6
core/gc/src/pointers/ephemeron.rs

@ -2,7 +2,7 @@ use crate::{
finalizer_safe,
internals::EphemeronBox,
trace::{Finalize, Trace},
Allocator, Gc,
Allocator, Gc, Tracer,
};
use std::ptr::NonNull;
@ -108,7 +108,7 @@ impl<K: Trace + ?Sized, V: Trace> Finalize for Ephemeron<K, V> {
// SAFETY: `Ephemeron`s trace implementation only marks its inner box because we want to stop
// tracing through weakly held pointers.
unsafe impl<K: Trace + ?Sized, V: Trace> Trace for Ephemeron<K, V> {
unsafe fn trace(&self) {
unsafe fn trace(&self, _tracer: &mut Tracer) {
// SAFETY: We need to mark the inner box of the `Ephemeron` since it is reachable
// from a root and this means it cannot be dropped.
unsafe {
@ -116,7 +116,7 @@ unsafe impl<K: Trace + ?Sized, V: Trace> Trace for Ephemeron<K, V> {
}
}
fn trace_non_roots(&self) {
unsafe fn trace_non_roots(&self) {
self.inner().inc_non_root_count();
}

48
core/gc/src/pointers/gc.rs

@ -2,7 +2,7 @@ use crate::{
finalizer_safe,
internals::{EphemeronBox, GcBox},
trace::{Finalize, Trace},
Allocator, Ephemeron, WeakGc,
Allocator, Ephemeron, GcErasedPointer, Tracer, WeakGc,
};
use std::{
cmp::Ordering,
@ -14,6 +14,39 @@ use std::{
rc::Rc,
};
/// Zero sized struct that is used to ensure that we do not call trace methods,
/// call its finalization method or drop it.
///
/// This can only happen if we are accessing a [`GcErasedPointer`] directly which is a bug.
/// Panics if any of it's methods are called.
///
/// Note: Accessing the [`crate::internals::GcHeader`] of [`GcErasedPointer`] is fine.
pub(crate) struct NonTraceable(());
impl Finalize for NonTraceable {
fn finalize(&self) {
unreachable!()
}
}
unsafe impl Trace for NonTraceable {
unsafe fn trace(&self, _tracer: &mut Tracer) {
unreachable!()
}
unsafe fn trace_non_roots(&self) {
unreachable!()
}
fn run_finalizer(&self) {
unreachable!()
}
}
impl Drop for NonTraceable {
fn drop(&mut self) {
unreachable!()
}
}
/// A garbage-collected pointer type over an immutable value.
pub struct Gc<T: Trace + ?Sized + 'static> {
pub(crate) inner_ptr: NonNull<GcBox<T>>,
@ -98,6 +131,10 @@ impl<T: Trace + ?Sized> Gc<T> {
marker: PhantomData,
}
}
pub(crate) fn as_erased(&self) -> GcErasedPointer {
self.inner_ptr.cast()
}
}
impl<T: Trace + ?Sized> Gc<T> {
@ -125,14 +162,11 @@ impl<T: Trace + ?Sized> Finalize for Gc<T> {
// SAFETY: `Gc` maintains it's own rootedness and implements all methods of
// Trace. It is not possible to root an already rooted `Gc` and vice versa.
unsafe impl<T: Trace + ?Sized> Trace for Gc<T> {
unsafe fn trace(&self) {
// SAFETY: Inner must be live and allocated GcBox.
unsafe {
self.inner().mark_and_trace();
}
unsafe fn trace(&self, tracer: &mut Tracer) {
tracer.enqueue(self.as_erased());
}
fn trace_non_roots(&self) {
unsafe fn trace_non_roots(&self) {
self.inner().inc_non_root_count();
}

1
core/gc/src/pointers/mod.rs

@ -10,4 +10,5 @@ pub use gc::Gc;
pub use weak::WeakGc;
pub use weak_map::WeakMap;
pub(crate) use gc::NonTraceable;
pub(crate) use weak_map::RawWeakMap;

4
core/gc/src/pointers/weak_map.rs

@ -17,7 +17,7 @@ pub struct WeakMap<K: Trace + ?Sized + 'static, V: Trace + 'static> {
}
unsafe impl<K: Trace + ?Sized + 'static, V: Trace + 'static> Trace for WeakMap<K, V> {
custom_trace!(this, {
custom_trace!(this, mark, {
mark(&this.inner);
});
}
@ -84,7 +84,7 @@ where
K: Trace + ?Sized + 'static,
V: Trace + 'static,
{
custom_trace!(this, {
custom_trace!(this, mark, {
for eph in this.iter() {
mark(eph);
}

35
core/gc/src/test/allocation.rs

@ -1,5 +1,7 @@
use boa_macros::{Finalize, Trace};
use super::{run_test, Harness};
use crate::{force_collect, Gc, GcRefCell};
use crate::{force_collect, Gc, GcBox, GcRefCell};
#[test]
fn gc_basic_cell_allocation() {
@ -29,3 +31,34 @@ fn gc_basic_pointer_alloc() {
Harness::assert_empty_gc();
});
}
#[test]
// Takes too long to finish in miri
#[cfg_attr(miri, ignore)]
fn gc_recursion() {
run_test(|| {
#[derive(Debug, Finalize, Trace)]
struct S {
i: usize,
next: Option<Gc<S>>,
}
const SIZE: usize = std::mem::size_of::<GcBox<S>>();
const COUNT: usize = 1_000_000;
let mut root = Gc::new(S { i: 0, next: None });
for i in 1..COUNT {
root = Gc::new(S {
i,
next: Some(root),
});
}
Harness::assert_bytes_allocated();
Harness::assert_exact_bytes_allocated(SIZE * COUNT);
drop(root);
force_collect();
Harness::assert_empty_gc();
});
}

4
core/gc/src/test/weak.rs

@ -164,7 +164,7 @@ fn eph_self_referential() {
*root.inner.inner.borrow_mut() = Some(eph.clone());
assert!(eph.value().is_some());
Harness::assert_exact_bytes_allocated(48);
Harness::assert_exact_bytes_allocated(56);
}
*root.inner.inner.borrow_mut() = None;
@ -210,7 +210,7 @@ fn eph_self_referential_chain() {
assert!(eph_start.value().is_some());
assert!(eph_chain2.value().is_some());
Harness::assert_exact_bytes_allocated(132);
Harness::assert_exact_bytes_allocated(168);
}
*root.borrow_mut() = None;

124
core/gc/src/trace.rs

@ -14,6 +14,35 @@ use std::{
sync::atomic,
};
use crate::GcErasedPointer;
/// A queue used to trace [`crate::Gc<T>`] non-recursively.
#[doc(hidden)]
#[allow(missing_debug_implementations)]
pub struct Tracer {
queue: VecDeque<GcErasedPointer>,
}
impl Tracer {
pub(crate) fn new() -> Self {
Self {
queue: VecDeque::default(),
}
}
pub(crate) fn enqueue(&mut self, node: GcErasedPointer) {
self.queue.push_back(node);
}
pub(crate) fn next(&mut self) -> Option<GcErasedPointer> {
self.queue.pop_front()
}
pub(crate) fn is_empty(&mut self) -> bool {
self.queue.is_empty()
}
}
/// Substitute for the [`Drop`] trait for garbage collected types.
pub trait Finalize {
/// Cleanup logic for a type.
@ -35,10 +64,14 @@ pub unsafe trait Trace: Finalize {
/// # Safety
///
/// See [`Trace`].
unsafe fn trace(&self);
unsafe fn trace(&self, tracer: &mut Tracer);
/// Trace handles located in GC heap, and mark them as non root.
fn trace_non_roots(&self);
///
/// # Safety
///
/// See [`Trace`].
unsafe fn trace_non_roots(&self);
/// Runs [`Finalize::finalize`] on this object and all
/// contained subobjects.
@ -52,9 +85,9 @@ pub unsafe trait Trace: Finalize {
macro_rules! empty_trace {
() => {
#[inline]
unsafe fn trace(&self) {}
unsafe fn trace(&self, _tracer: &mut $crate::Tracer) {}
#[inline]
fn trace_non_roots(&self) {}
unsafe fn trace_non_roots(&self) {}
#[inline]
fn run_finalizer(&self) {
$crate::Finalize::finalize(self)
@ -73,29 +106,32 @@ macro_rules! empty_trace {
/// Misusing the `mark` function may result in Undefined Behaviour.
#[macro_export]
macro_rules! custom_trace {
($this:ident, $body:expr) => {
($this:ident, $marker:ident, $body:expr) => {
#[inline]
unsafe fn trace(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
unsafe fn trace(&self, tracer: &mut $crate::Tracer) {
let mut $marker = |it: &dyn $crate::Trace| {
// SAFETY: The implementor must ensure that `trace` is correctly implemented.
unsafe {
$crate::Trace::trace(it);
$crate::Trace::trace(it, tracer);
}
}
};
let $this = self;
$body
}
#[inline]
fn trace_non_roots(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
$crate::Trace::trace_non_roots(it);
unsafe fn trace_non_roots(&self) {
fn $marker<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `trace` is correctly implemented.
unsafe {
$crate::Trace::trace_non_roots(it);
}
}
let $this = self;
$body
}
#[inline]
fn run_finalizer(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
fn $marker<T: $crate::Trace + ?Sized>(it: &T) {
$crate::Trace::run_finalizer(it);
}
$crate::Finalize::finalize(self);
@ -180,7 +216,7 @@ impl<T: Trace, const N: usize> Finalize for [T; N] {}
// SAFETY:
// All elements inside the array are correctly marked.
unsafe impl<T: Trace, const N: usize> Trace for [T; N] {
custom_trace!(this, {
custom_trace!(this, mark, {
for v in this {
mark(v);
}
@ -219,12 +255,12 @@ macro_rules! tuple_finalize_trace {
// SAFETY:
// All elements inside the tuple are correctly marked.
unsafe impl<$($args: $crate::Trace),*> Trace for ($($args,)*) {
custom_trace!(this, {
#[allow(non_snake_case, unused_unsafe)]
fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*)) {
custom_trace!(this, mark, {
#[allow(non_snake_case, unused_unsafe, unused_mut)]
let mut avoid_lints = |&($(ref $args,)*): &($($args,)*)| {
// SAFETY: The implementor must ensure a correct implementation.
unsafe { $(mark($args);)* }
}
};
avoid_lints(this)
});
}
@ -259,15 +295,31 @@ type_arg_tuple_based_finalize_trace_impls![
impl<T: Trace + ?Sized> Finalize for Box<T> {}
// SAFETY: The inner value of the `Box` is correctly marked.
unsafe impl<T: Trace + ?Sized> Trace for Box<T> {
custom_trace!(this, {
mark(&**this);
});
#[inline]
unsafe fn trace(&self, tracer: &mut Tracer) {
// SAFETY: The implementor must ensure that `trace` is correctly implemented.
unsafe {
Trace::trace(&**self, tracer);
}
}
#[inline]
unsafe fn trace_non_roots(&self) {
// SAFETY: The implementor must ensure that `trace_non_roots` is correctly implemented.
unsafe {
Trace::trace_non_roots(&**self);
}
}
#[inline]
fn run_finalizer(&self) {
Finalize::finalize(self);
Trace::run_finalizer(&**self);
}
}
impl<T: Trace> Finalize for Box<[T]> {}
// SAFETY: All the inner elements of the `Box` array are correctly marked.
unsafe impl<T: Trace> Trace for Box<[T]> {
custom_trace!(this, {
custom_trace!(this, mark, {
for e in &**this {
mark(e);
}
@ -277,7 +329,7 @@ unsafe impl<T: Trace> Trace for Box<[T]> {
impl<T: Trace> Finalize for Vec<T> {}
// SAFETY: All the inner elements of the `Vec` are correctly marked.
unsafe impl<T: Trace> Trace for Vec<T> {
custom_trace!(this, {
custom_trace!(this, mark, {
for e in this {
mark(e);
}
@ -290,7 +342,7 @@ impl<T: Trace> Finalize for thin_vec::ThinVec<T> {}
#[cfg(feature = "thin-vec")]
// SAFETY: All the inner elements of the `Vec` are correctly marked.
unsafe impl<T: Trace> Trace for thin_vec::ThinVec<T> {
custom_trace!(this, {
custom_trace!(this, mark, {
for e in this {
mark(e);
}
@ -300,7 +352,7 @@ unsafe impl<T: Trace> Trace for thin_vec::ThinVec<T> {
impl<T: Trace> Finalize for Option<T> {}
// SAFETY: The inner value of the `Option` is correctly marked.
unsafe impl<T: Trace> Trace for Option<T> {
custom_trace!(this, {
custom_trace!(this, mark, {
if let Some(ref v) = *this {
mark(v);
}
@ -310,7 +362,7 @@ unsafe impl<T: Trace> Trace for Option<T> {
impl<T: Trace, E: Trace> Finalize for Result<T, E> {}
// SAFETY: Both inner values of the `Result` are correctly marked.
unsafe impl<T: Trace, E: Trace> Trace for Result<T, E> {
custom_trace!(this, {
custom_trace!(this, mark, {
match *this {
Ok(ref v) => mark(v),
Err(ref v) => mark(v),
@ -321,7 +373,7 @@ unsafe impl<T: Trace, E: Trace> Trace for Result<T, E> {
impl<T: Ord + Trace> Finalize for BinaryHeap<T> {}
// SAFETY: All the elements of the `BinaryHeap` are correctly marked.
unsafe impl<T: Ord + Trace> Trace for BinaryHeap<T> {
custom_trace!(this, {
custom_trace!(this, mark, {
for v in this {
mark(v);
}
@ -331,7 +383,7 @@ unsafe impl<T: Ord + Trace> Trace for BinaryHeap<T> {
impl<K: Trace, V: Trace> Finalize for BTreeMap<K, V> {}
// SAFETY: All the elements of the `BTreeMap` are correctly marked.
unsafe impl<K: Trace, V: Trace> Trace for BTreeMap<K, V> {
custom_trace!(this, {
custom_trace!(this, mark, {
for (k, v) in this {
mark(k);
mark(v);
@ -342,7 +394,7 @@ unsafe impl<K: Trace, V: Trace> Trace for BTreeMap<K, V> {
impl<T: Trace> Finalize for BTreeSet<T> {}
// SAFETY: All the elements of the `BTreeSet` are correctly marked.
unsafe impl<T: Trace> Trace for BTreeSet<T> {
custom_trace!(this, {
custom_trace!(this, mark, {
for v in this {
mark(v);
}
@ -357,7 +409,7 @@ impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Finalize
unsafe impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Trace
for hashbrown::hash_map::HashMap<K, V, S>
{
custom_trace!(this, {
custom_trace!(this, mark, {
for (k, v) in this {
mark(k);
mark(v);
@ -368,7 +420,7 @@ unsafe impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Trace
impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Finalize for HashMap<K, V, S> {}
// SAFETY: All the elements of the `HashMap` are correctly marked.
unsafe impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Trace for HashMap<K, V, S> {
custom_trace!(this, {
custom_trace!(this, mark, {
for (k, v) in this {
mark(k);
mark(v);
@ -379,7 +431,7 @@ unsafe impl<K: Eq + Hash + Trace, V: Trace, S: BuildHasher> Trace for HashMap<K,
impl<T: Eq + Hash + Trace, S: BuildHasher> Finalize for HashSet<T, S> {}
// SAFETY: All the elements of the `HashSet` are correctly marked.
unsafe impl<T: Eq + Hash + Trace, S: BuildHasher> Trace for HashSet<T, S> {
custom_trace!(this, {
custom_trace!(this, mark, {
for v in this {
mark(v);
}
@ -389,7 +441,7 @@ unsafe impl<T: Eq + Hash + Trace, S: BuildHasher> Trace for HashSet<T, S> {
impl<T: Eq + Hash + Trace> Finalize for LinkedList<T> {}
// SAFETY: All the elements of the `LinkedList` are correctly marked.
unsafe impl<T: Eq + Hash + Trace> Trace for LinkedList<T> {
custom_trace!(this, {
custom_trace!(this, mark, {
#[allow(clippy::explicit_iter_loop)]
for v in this.iter() {
mark(v);
@ -406,7 +458,7 @@ unsafe impl<T> Trace for PhantomData<T> {
impl<T: Trace> Finalize for VecDeque<T> {}
// SAFETY: All the elements of the `VecDeque` are correctly marked.
unsafe impl<T: Trace> Trace for VecDeque<T> {
custom_trace!(this, {
custom_trace!(this, mark, {
for v in this {
mark(v);
}
@ -420,7 +472,7 @@ unsafe impl<T: ToOwned + Trace + ?Sized> Trace for Cow<'static, T>
where
T::Owned: Trace,
{
custom_trace!(this, {
custom_trace!(this, mark, {
if let Cow::Owned(ref v) = this {
mark(v);
}
@ -431,7 +483,7 @@ impl<T: Trace> Finalize for Cell<Option<T>> {}
// SAFETY: Taking and setting is done in a single action, and recursive traces should find a `None`
// value instead of the original `T`, making this safe.
unsafe impl<T: Trace> Trace for Cell<Option<T>> {
custom_trace!(this, {
custom_trace!(this, mark, {
if let Some(v) = this.take() {
mark(&v);
this.set(Some(v));

62
core/macros/src/lib.rs

@ -179,25 +179,29 @@ decl_derive! {
fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
struct EmptyTrace {
copy: bool,
drop: bool,
}
impl Parse for EmptyTrace {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let i: Ident = input.parse()?;
if i != "empty_trace" && i != "unsafe_empty_trace" {
if i != "empty_trace" && i != "unsafe_empty_trace" && i != "unsafe_no_drop" {
let msg = format!(
"expected token \"empty_trace\" or \"unsafe_empty_trace\", found {i:?}"
"expected token \"empty_trace\", \"unsafe_empty_trace\" or \"unsafe_no_drop\", found {i:?}"
);
return Err(syn::Error::new_spanned(i.clone(), msg));
}
Ok(Self {
copy: i == "empty_trace",
drop: i == "empty_trace" || i != "unsafe_no_drop",
})
}
}
let mut drop = true;
for attr in &s.ast().attrs {
if attr.path().is_ident("boa_gc") {
let trace = match attr.parse_args::<EmptyTrace>() {
@ -209,13 +213,18 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
s.add_where_predicate(syn::parse_quote!(Self: Copy));
}
if !trace.drop {
drop = false;
continue;
}
return s.unsafe_bound_impl(
quote!(::boa_gc::Trace),
quote! {
#[inline(always)]
unsafe fn trace(&self) {}
unsafe fn trace(&self, _tracer: &mut ::boa_gc::Tracer) {}
#[inline(always)]
fn trace_non_roots(&self) {}
unsafe fn trace_non_roots(&self) {}
#[inline]
fn run_finalizer(&self) {
::boa_gc::Finalize::finalize(self)
@ -231,31 +240,34 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
.iter()
.any(|attr| attr.path().is_ident("unsafe_ignore_trace"))
});
let trace_body = s.each(|bi| quote!(mark(#bi)));
let trace_body = s.each(|bi| quote!(::boa_gc::Trace::trace(#bi, tracer)));
let trace_other_body = s.each(|bi| quote!(mark(#bi)));
s.add_bounds(AddBounds::Fields);
let trace_impl = s.unsafe_bound_impl(
quote!(::boa_gc::Trace),
quote! {
#[inline]
unsafe fn trace(&self) {
unsafe fn trace(&self, tracer: &mut ::boa_gc::Tracer) {
#[allow(dead_code)]
fn mark<T: ::boa_gc::Trace + ?Sized>(it: &T) {
let mut mark = |it: &dyn ::boa_gc::Trace| {
// SAFETY: The implementor must ensure that `trace` is correctly implemented.
unsafe {
::boa_gc::Trace::trace(it);
::boa_gc::Trace::trace(it, tracer);
}
}
};
match *self { #trace_body }
}
#[inline]
fn trace_non_roots(&self) {
unsafe fn trace_non_roots(&self) {
#[allow(dead_code)]
fn mark<T: ::boa_gc::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `trace_non_roots` is correctly implemented.
unsafe {
::boa_gc::Trace::trace_non_roots(it);
}
}
match *self { #trace_body }
match *self { #trace_other_body }
}
#[inline]
fn run_finalizer(&self) {
@ -266,7 +278,7 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
::boa_gc::Trace::run_finalizer(it);
}
}
match *self { #trace_body }
match *self { #trace_other_body }
}
},
);
@ -274,18 +286,22 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
// We also implement drop to prevent unsafe drop implementations on this
// type and encourage people to use Finalize. This implementation will
// call `Finalize::finalize` if it is safe to do so.
let drop_impl = s.unbound_impl(
quote!(::core::ops::Drop),
quote! {
#[allow(clippy::inline_always)]
#[inline(always)]
fn drop(&mut self) {
if ::boa_gc::finalizer_safe() {
::boa_gc::Finalize::finalize(self);
let drop_impl = if drop {
s.unbound_impl(
quote!(::core::ops::Drop),
quote! {
#[allow(clippy::inline_always)]
#[inline(always)]
fn drop(&mut self) {
if ::boa_gc::finalizer_safe() {
::boa_gc::Finalize::finalize(self);
}
}
}
},
);
},
)
} else {
quote!()
};
quote! {
#trace_impl

Loading…
Cancel
Save