Browse Source

Improve the design of ephemerons in our GC (#2530)

This PR changes the following:
- Modifies `EphemeronBox` to be more akin to `GcBox`, with its own header, roots and markers. This also makes it more similar to [Racket's](https://docs.racket-lang.org/reference/ephemerons.html) implementation.
- Removes `EPHEMERON_QUEUE`.
- Ephemerons are now tracked on a special `weak_start` linked list, instead of `strong_start` which is where all other GC boxes live.
- Documents all unsafe blocks.
- Documents our current garbage collection algorithm. I hope this'll clarify a bit what exactly are we doing on every garbage collection.
- Renames/removes some functions.
pull/2559/head
José Julián Espina 2 years ago
parent
commit
7c9eef8edc
  1. 6
      boa_engine/src/builtins/async_generator/mod.rs
  2. 4
      boa_engine/src/builtins/generator/mod.rs
  3. 18
      boa_engine/src/builtins/promise/mod.rs
  4. 4
      boa_engine/src/bytecompiler/mod.rs
  5. 8
      boa_engine/src/environments/compile.rs
  6. 30
      boa_engine/src/environments/runtime.rs
  7. 20
      boa_engine/src/object/jsobject.rs
  8. 8
      boa_engine/src/object/mod.rs
  9. 7
      boa_engine/src/realm.rs
  10. 8
      boa_engine/src/vm/code_block.rs
  11. 6
      boa_engine/src/vm/opcode/await_stm/mod.rs
  12. 4
      boa_examples/src/bin/closures.rs
  13. 94
      boa_gc/src/cell.rs
  14. 257
      boa_gc/src/internals/ephemeron_box.rs
  15. 59
      boa_gc/src/internals/gc_box.rs
  16. 2
      boa_gc/src/internals/mod.rs
  17. 306
      boa_gc/src/lib.rs
  18. 156
      boa_gc/src/pointers/ephemeron.rs
  19. 122
      boa_gc/src/pointers/gc.rs
  20. 1
      boa_gc/src/pointers/mod.rs
  21. 87
      boa_gc/src/pointers/rootable.rs
  22. 23
      boa_gc/src/pointers/weak.rs
  23. 4
      boa_gc/src/test/allocation.rs
  24. 6
      boa_gc/src/test/cell.rs
  25. 9
      boa_gc/src/test/mod.rs
  26. 128
      boa_gc/src/test/weak.rs
  27. 28
      boa_gc/src/trace.rs
  28. 10
      boa_macros/src/lib.rs

6
boa_engine/src/builtins/async_generator/mod.rs

@ -19,7 +19,7 @@ use crate::{
vm::GeneratorResumeKind,
Context, JsError, JsResult,
};
use boa_gc::{Finalize, Gc, GcCell, Trace};
use boa_gc::{Finalize, Gc, GcRefCell, Trace};
use boa_profiler::Profiler;
use std::collections::VecDeque;
@ -57,7 +57,7 @@ pub struct AsyncGenerator {
pub(crate) state: AsyncGeneratorState,
/// The `[[AsyncGeneratorContext]]` internal slot.
pub(crate) context: Option<Gc<GcCell<GeneratorContext>>>,
pub(crate) context: Option<Gc<GcRefCell<GeneratorContext>>>,
/// The `[[AsyncGeneratorQueue]]` internal slot.
pub(crate) queue: VecDeque<AsyncGeneratorRequest>,
@ -512,7 +512,7 @@ impl AsyncGenerator {
pub(crate) fn resume(
generator: &JsObject,
state: AsyncGeneratorState,
generator_context: &Gc<GcCell<GeneratorContext>>,
generator_context: &Gc<GcRefCell<GeneratorContext>>,
completion: (JsResult<JsValue>, bool),
context: &mut Context<'_>,
) {

4
boa_engine/src/builtins/generator/mod.rs

@ -20,7 +20,7 @@ use crate::{
vm::{CallFrame, GeneratorResumeKind, ReturnType},
Context, JsError, JsResult,
};
use boa_gc::{Finalize, Gc, GcCell, Trace};
use boa_gc::{Finalize, Gc, GcRefCell, Trace};
use boa_profiler::Profiler;
/// Indicates the state of a generator.
@ -52,7 +52,7 @@ pub struct Generator {
pub(crate) state: GeneratorState,
/// The `[[GeneratorContext]]` internal slot.
pub(crate) context: Option<Gc<GcCell<GeneratorContext>>>,
pub(crate) context: Option<Gc<GcRefCell<GeneratorContext>>>,
}
impl BuiltIn for Generator {

18
boa_engine/src/builtins/promise/mod.rs

@ -19,7 +19,7 @@ use crate::{
value::JsValue,
Context, JsError, JsResult,
};
use boa_gc::{Finalize, Gc, GcCell, Trace};
use boa_gc::{Finalize, Gc, GcRefCell, Trace};
use boa_profiler::Profiler;
use std::{cell::Cell, rc::Rc};
use tap::{Conv, Pipe};
@ -161,7 +161,7 @@ impl PromiseCapability {
// 2. NOTE: C is assumed to be a constructor function that supports the parameter conventions of the Promise constructor (see 27.2.3.1).
// 3. Let promiseCapability be the PromiseCapability Record { [[Promise]]: undefined, [[Resolve]]: undefined, [[Reject]]: undefined }.
let promise_capability = Gc::new(GcCell::new(RejectResolve {
let promise_capability = Gc::new(GcRefCell::new(RejectResolve {
reject: JsValue::undefined(),
resolve: JsValue::undefined(),
}));
@ -208,7 +208,7 @@ impl PromiseCapability {
.into();
// 6. Let promise be ? Construct(C, « executor »).
let promise = c.construct(&[executor], Some(&c), context)?;
let promise = c.construct(&[executor], None, context)?;
let promise_capability = promise_capability.borrow();
@ -470,14 +470,14 @@ impl Promise {
#[unsafe_ignore_trace]
already_called: Rc<Cell<bool>>,
index: usize,
values: Gc<GcCell<Vec<JsValue>>>,
values: Gc<GcRefCell<Vec<JsValue>>>,
capability_resolve: JsFunction,
#[unsafe_ignore_trace]
remaining_elements_count: Rc<Cell<i32>>,
}
// 1. Let values be a new empty List.
let values = Gc::new(GcCell::new(Vec::new()));
let values = Gc::new(GcRefCell::new(Vec::new()));
// 2. Let remainingElementsCount be the Record { [[Value]]: 1 }.
let remaining_elements_count = Rc::new(Cell::new(1));
@ -714,14 +714,14 @@ impl Promise {
#[unsafe_ignore_trace]
already_called: Rc<Cell<bool>>,
index: usize,
values: Gc<GcCell<Vec<JsValue>>>,
values: Gc<GcRefCell<Vec<JsValue>>>,
capability: JsFunction,
#[unsafe_ignore_trace]
remaining_elements: Rc<Cell<i32>>,
}
// 1. Let values be a new empty List.
let values = Gc::new(GcCell::new(Vec::new()));
let values = Gc::new(GcRefCell::new(Vec::new()));
// 2. Let remainingElementsCount be the Record { [[Value]]: 1 }.
let remaining_elements_count = Rc::new(Cell::new(1));
@ -1057,14 +1057,14 @@ impl Promise {
#[unsafe_ignore_trace]
already_called: Rc<Cell<bool>>,
index: usize,
errors: Gc<GcCell<Vec<JsValue>>>,
errors: Gc<GcRefCell<Vec<JsValue>>>,
capability_reject: JsFunction,
#[unsafe_ignore_trace]
remaining_elements_count: Rc<Cell<i32>>,
}
// 1. Let errors be a new empty List.
let errors = Gc::new(GcCell::new(Vec::new()));
let errors = Gc::new(GcRefCell::new(Vec::new()));
// 2. Let remainingElementsCount be the Record { [[Value]]: 1 }.
let remaining_elements_count = Rc::new(Cell::new(1));

4
boa_engine/src/bytecompiler/mod.rs

@ -27,7 +27,7 @@ use boa_ast::{
pattern::Pattern,
Declaration, Expression, Statement, StatementList, StatementListItem,
};
use boa_gc::{Gc, GcCell};
use boa_gc::{Gc, GcRefCell};
use boa_interner::{Interner, Sym};
use rustc_hash::FxHashMap;
@ -246,7 +246,7 @@ impl<'b, 'host> ByteCompiler<'b, 'host> {
/// Push a compile time environment to the current `CodeBlock` and return it's index.
fn push_compile_environment(
&mut self,
environment: Gc<GcCell<CompileTimeEnvironment>>,
environment: Gc<GcRefCell<CompileTimeEnvironment>>,
) -> usize {
let index = self.code_block.compile_environments.len();
self.code_block.compile_environments.push(environment);

8
boa_engine/src/environments/compile.rs

@ -2,7 +2,7 @@ use crate::{
environments::runtime::BindingLocator, property::PropertyDescriptor, Context, JsString, JsValue,
};
use boa_ast::expression::Identifier;
use boa_gc::{Finalize, Gc, GcCell, Trace};
use boa_gc::{Finalize, Gc, GcRefCell, Trace};
use rustc_hash::FxHashMap;
@ -22,7 +22,7 @@ struct CompileTimeBinding {
/// A compile time environment also indicates, if it is a function environment.
#[derive(Debug, Finalize, Trace)]
pub(crate) struct CompileTimeEnvironment {
outer: Option<Gc<GcCell<Self>>>,
outer: Option<Gc<GcRefCell<Self>>>,
environment_index: usize,
#[unsafe_ignore_trace]
bindings: FxHashMap<Identifier, CompileTimeBinding>,
@ -208,7 +208,7 @@ impl Context<'_> {
let environment_index = self.realm.compile_env.borrow().environment_index + 1;
let outer = self.realm.compile_env.clone();
self.realm.compile_env = Gc::new(GcCell::new(CompileTimeEnvironment {
self.realm.compile_env = Gc::new(GcRefCell::new(CompileTimeEnvironment {
outer: Some(outer),
environment_index,
bindings: FxHashMap::default(),
@ -225,7 +225,7 @@ impl Context<'_> {
/// Panics if there are no more environments that can be pop'ed.
pub(crate) fn pop_compile_time_environment(
&mut self,
) -> (usize, Gc<GcCell<CompileTimeEnvironment>>) {
) -> (usize, Gc<GcRefCell<CompileTimeEnvironment>>) {
let current_env_borrow = self.realm.compile_env.borrow();
if let Some(outer) = &current_env_borrow.outer {
let outer_clone = outer.clone();

30
boa_engine/src/environments/runtime.rs

@ -2,7 +2,7 @@ use crate::{
environments::CompileTimeEnvironment, error::JsNativeError, object::JsObject, Context, JsValue,
};
use boa_ast::expression::Identifier;
use boa_gc::{Finalize, Gc, GcCell, Trace};
use boa_gc::{Finalize, Gc, GcRefCell, Trace};
use rustc_hash::FxHashSet;
use std::cell::Cell;
@ -28,8 +28,8 @@ use std::cell::Cell;
/// All poisoned environments have to be checked for added bindings.
#[derive(Debug, Trace, Finalize)]
pub(crate) struct DeclarativeEnvironment {
bindings: GcCell<Vec<Option<JsValue>>>,
compile: Gc<GcCell<CompileTimeEnvironment>>,
bindings: GcRefCell<Vec<Option<JsValue>>>,
compile: Gc<GcRefCell<CompileTimeEnvironment>>,
#[unsafe_ignore_trace]
poisoned: Cell<bool>,
slots: Option<EnvironmentSlots>,
@ -38,13 +38,13 @@ pub(crate) struct DeclarativeEnvironment {
/// Describes the different types of internal slot data that an environment can hold.
#[derive(Clone, Debug, Trace, Finalize)]
pub(crate) enum EnvironmentSlots {
Function(GcCell<FunctionSlots>),
Function(GcRefCell<FunctionSlots>),
Global,
}
impl EnvironmentSlots {
/// Return the slots if they are part of a function environment.
pub(crate) const fn as_function_slots(&self) -> Option<&GcCell<FunctionSlots>> {
pub(crate) const fn as_function_slots(&self) -> Option<&GcRefCell<FunctionSlots>> {
if let Self::Function(env) = &self {
Some(env)
} else {
@ -225,10 +225,10 @@ pub struct DeclarativeEnvironmentStack {
impl DeclarativeEnvironmentStack {
/// Create a new environment stack with the most outer declarative environment.
pub(crate) fn new(global_compile_environment: Gc<GcCell<CompileTimeEnvironment>>) -> Self {
pub(crate) fn new(global_compile_environment: Gc<GcRefCell<CompileTimeEnvironment>>) -> Self {
Self {
stack: vec![Gc::new(DeclarativeEnvironment {
bindings: GcCell::new(Vec::new()),
bindings: GcRefCell::new(Vec::new()),
compile: global_compile_environment,
poisoned: Cell::new(false),
slots: Some(EnvironmentSlots::Global),
@ -349,7 +349,7 @@ impl DeclarativeEnvironmentStack {
pub(crate) fn push_declarative(
&mut self,
num_bindings: usize,
compile_environment: Gc<GcCell<CompileTimeEnvironment>>,
compile_environment: Gc<GcRefCell<CompileTimeEnvironment>>,
) -> usize {
let poisoned = self
.stack
@ -361,7 +361,7 @@ impl DeclarativeEnvironmentStack {
let index = self.stack.len();
self.stack.push(Gc::new(DeclarativeEnvironment {
bindings: GcCell::new(vec![None; num_bindings]),
bindings: GcRefCell::new(vec![None; num_bindings]),
compile: compile_environment,
poisoned: Cell::new(poisoned),
slots: None,
@ -378,7 +378,7 @@ impl DeclarativeEnvironmentStack {
pub(crate) fn push_function(
&mut self,
num_bindings: usize,
compile_environment: Gc<GcCell<CompileTimeEnvironment>>,
compile_environment: Gc<GcRefCell<CompileTimeEnvironment>>,
this: Option<JsValue>,
function_object: JsObject,
new_target: Option<JsObject>,
@ -402,10 +402,10 @@ impl DeclarativeEnvironmentStack {
let this = this.unwrap_or(JsValue::Null);
self.stack.push(Gc::new(DeclarativeEnvironment {
bindings: GcCell::new(vec![None; num_bindings]),
bindings: GcRefCell::new(vec![None; num_bindings]),
compile: compile_environment,
poisoned: Cell::new(poisoned),
slots: Some(EnvironmentSlots::Function(GcCell::new(FunctionSlots {
slots: Some(EnvironmentSlots::Function(GcRefCell::new(FunctionSlots {
this,
this_binding_status,
function_object,
@ -422,7 +422,7 @@ impl DeclarativeEnvironmentStack {
pub(crate) fn push_function_inherit(
&mut self,
num_bindings: usize,
compile_environment: Gc<GcCell<CompileTimeEnvironment>>,
compile_environment: Gc<GcRefCell<CompileTimeEnvironment>>,
) {
let outer = self
.stack
@ -433,7 +433,7 @@ impl DeclarativeEnvironmentStack {
let slots = outer.slots.clone();
self.stack.push(Gc::new(DeclarativeEnvironment {
bindings: GcCell::new(vec![None; num_bindings]),
bindings: GcRefCell::new(vec![None; num_bindings]),
compile: compile_environment,
poisoned: Cell::new(poisoned),
slots,
@ -480,7 +480,7 @@ impl DeclarativeEnvironmentStack {
/// # Panics
///
/// Panics if no environment exists on the stack.
pub(crate) fn current_compile_environment(&self) -> Gc<GcCell<CompileTimeEnvironment>> {
pub(crate) fn current_compile_environment(&self) -> Gc<GcRefCell<CompileTimeEnvironment>> {
self.stack
.last()
.expect("global environment must always exist")

20
boa_engine/src/object/jsobject.rs

@ -10,7 +10,7 @@ use crate::{
value::PreferredType,
Context, JsResult, JsValue,
};
use boa_gc::{self, Finalize, Gc, GcCell, Trace};
use boa_gc::{self, Finalize, Gc, GcRefCell, Trace};
use std::{
cell::RefCell,
collections::HashMap,
@ -20,15 +20,15 @@ use std::{
};
/// A wrapper type for an immutably borrowed type T.
pub type Ref<'a, T> = boa_gc::GcCellRef<'a, T>;
pub type Ref<'a, T> = boa_gc::GcRef<'a, T>;
/// A wrapper type for a mutably borrowed type T.
pub type RefMut<'a, T, U> = boa_gc::GcCellRefMut<'a, T, U>;
pub type RefMut<'a, T, U> = boa_gc::GcRefMut<'a, T, U>;
/// Garbage collected `Object`.
#[derive(Trace, Finalize, Clone, Default)]
pub struct JsObject {
inner: Gc<GcCell<Object>>,
inner: Gc<GcRefCell<Object>>,
}
impl JsObject {
@ -68,7 +68,7 @@ impl JsObject {
/// [`OrdinaryObjectCreate`]: https://tc39.es/ecma262/#sec-ordinaryobjectcreate
pub fn from_proto_and_data<O: Into<Option<Self>>>(prototype: O, data: ObjectData) -> Self {
Self {
inner: Gc::new(GcCell::new(Object {
inner: Gc::new(GcRefCell::new(Object {
data,
prototype: prototype.into(),
extensible: true,
@ -754,21 +754,21 @@ Cannot both specify accessors and a value or writable attribute",
)
}
pub(crate) const fn inner(&self) -> &Gc<GcCell<Object>> {
pub(crate) const fn inner(&self) -> &Gc<GcRefCell<Object>> {
&self.inner
}
}
impl AsRef<GcCell<Object>> for JsObject {
impl AsRef<GcRefCell<Object>> for JsObject {
#[inline]
fn as_ref(&self) -> &GcCell<Object> {
fn as_ref(&self) -> &GcRefCell<Object> {
&self.inner
}
}
impl From<Gc<GcCell<Object>>> for JsObject {
impl From<Gc<GcRefCell<Object>>> for JsObject {
#[inline]
fn from(inner: Gc<GcCell<Object>>) -> Self {
fn from(inner: Gc<GcRefCell<Object>>) -> Self {
Self { inner }
}
}

8
boa_engine/src/object/mod.rs

@ -55,7 +55,7 @@ use crate::{
Context, JsBigInt, JsString, JsSymbol, JsValue,
};
use boa_gc::{custom_trace, Finalize, GcCell, Trace, WeakGc};
use boa_gc::{custom_trace, Finalize, GcRefCell, Trace, WeakGc};
use std::{
any::Any,
fmt::{self, Debug},
@ -266,7 +266,7 @@ pub enum ObjectKind {
Promise(Promise),
/// The `WeakRef` object kind.
WeakRef(WeakGc<GcCell<Object>>),
WeakRef(WeakGc<GcRefCell<Object>>),
/// The `Intl.Collator` object kind.
#[cfg(feature = "intl")]
@ -618,7 +618,7 @@ impl ObjectData {
}
/// Creates the `WeakRef` object data
pub fn weak_ref(weak_ref: WeakGc<GcCell<Object>>) -> Self {
pub fn weak_ref(weak_ref: WeakGc<GcRefCell<Object>>) -> Self {
Self {
kind: ObjectKind::WeakRef(weak_ref),
internal_methods: &ORDINARY_INTERNAL_METHODS,
@ -1623,7 +1623,7 @@ impl Object {
/// Gets the `WeakRef` data if the object is a `WeakRef`.
#[inline]
pub const fn as_weak_ref(&self) -> Option<&WeakGc<GcCell<Self>>> {
pub const fn as_weak_ref(&self) -> Option<&WeakGc<GcRefCell<Self>>> {
match self.data {
ObjectData {
kind: ObjectKind::WeakRef(ref weak_ref),

7
boa_engine/src/realm.rs

@ -10,7 +10,7 @@ use crate::{
environments::{CompileTimeEnvironment, DeclarativeEnvironmentStack},
object::{GlobalPropertyMap, JsObject, JsPrototype, ObjectData, PropertyMap},
};
use boa_gc::{Gc, GcCell};
use boa_gc::{Gc, GcRefCell};
use boa_profiler::Profiler;
/// Representation of a Realm.
@ -23,7 +23,7 @@ pub struct Realm {
pub(crate) global_property_map: PropertyMap,
pub(crate) global_prototype: JsPrototype,
pub(crate) environments: DeclarativeEnvironmentStack,
pub(crate) compile_env: Gc<GcCell<CompileTimeEnvironment>>,
pub(crate) compile_env: Gc<GcRefCell<CompileTimeEnvironment>>,
}
impl Realm {
@ -36,7 +36,8 @@ impl Realm {
// Allow identification of the global object easily
let global_object = JsObject::from_proto_and_data(None, ObjectData::global());
let global_compile_environment = Gc::new(GcCell::new(CompileTimeEnvironment::new_global()));
let global_compile_environment =
Gc::new(GcRefCell::new(CompileTimeEnvironment::new_global()));
Self {
global_object,

8
boa_engine/src/vm/code_block.rs

@ -22,7 +22,7 @@ use boa_ast::{
expression::Identifier,
function::{FormalParameterList, PrivateName},
};
use boa_gc::{Finalize, Gc, GcCell, Trace};
use boa_gc::{Finalize, Gc, GcRefCell, Trace};
use boa_interner::{Interner, Sym, ToInternedString};
use boa_profiler::Profiler;
use std::{collections::VecDeque, convert::TryInto, mem::size_of};
@ -105,7 +105,7 @@ pub struct CodeBlock {
pub(crate) arguments_binding: Option<BindingLocator>,
/// Compile time environments in this function.
pub(crate) compile_environments: Vec<Gc<GcCell<CompileTimeEnvironment>>>,
pub(crate) compile_environments: Vec<Gc<GcRefCell<CompileTimeEnvironment>>>,
/// The `[[IsClassConstructor]]` internal slot.
pub(crate) is_class_constructor: bool,
@ -1125,7 +1125,7 @@ impl JsObject {
prototype,
ObjectData::generator(Generator {
state: GeneratorState::SuspendedStart,
context: Some(Gc::new(GcCell::new(GeneratorContext {
context: Some(Gc::new(GcRefCell::new(GeneratorContext {
environments,
call_frame,
stack,
@ -1272,7 +1272,7 @@ impl JsObject {
prototype,
ObjectData::async_generator(AsyncGenerator {
state: AsyncGeneratorState::SuspendedStart,
context: Some(Gc::new(GcCell::new(GeneratorContext {
context: Some(Gc::new(GcRefCell::new(GeneratorContext {
environments,
call_frame,
stack,

6
boa_engine/src/vm/opcode/await_stm/mod.rs

@ -1,4 +1,4 @@
use boa_gc::{Gc, GcCell};
use boa_gc::{Gc, GcRefCell};
use crate::{
builtins::{JsArgs, Promise},
@ -61,7 +61,7 @@ impl Operation for Await {
Ok(JsValue::undefined())
},
Gc::new(GcCell::new((
Gc::new(GcRefCell::new((
context.realm.environments.clone(),
context.vm.stack.clone(),
context.vm.frame().clone(),
@ -104,7 +104,7 @@ impl Operation for Await {
Ok(JsValue::undefined())
},
Gc::new(GcCell::new((
Gc::new(GcRefCell::new((
context.realm.environments.clone(),
context.vm.stack.clone(),
context.vm.frame().clone(),

4
boa_examples/src/bin/closures.rs

@ -11,7 +11,7 @@ use boa_engine::{
string::utf16,
Context, JsError, JsNativeError, JsString, JsValue,
};
use boa_gc::{Finalize, GcCell, Trace};
use boa_gc::{Finalize, GcRefCell, Trace};
fn main() -> Result<(), JsError> {
// We create a new `Context` to create a new Javascript executor.
@ -96,7 +96,7 @@ fn main() -> Result<(), JsError> {
Ok(message.into())
},
// Here is where we move `clone_variable` into the closure.
GcCell::new(clone_variable),
GcRefCell::new(clone_variable),
),
)
// And here we assign `createMessage` to the `name` property of the closure.

94
boa_gc/src/cell.rs

@ -117,12 +117,12 @@ impl Debug for BorrowFlag {
/// that can be used inside of a garbage-collected pointer.
///
/// This object is a `RefCell` that can be used inside of a `Gc<T>`.
pub struct GcCell<T: ?Sized + 'static> {
pub struct GcRefCell<T: ?Sized + 'static> {
pub(crate) flags: Cell<BorrowFlag>,
pub(crate) cell: UnsafeCell<T>,
}
impl<T: Trace> GcCell<T> {
impl<T: Trace> GcRefCell<T> {
/// Creates a new `GcCell` containing `value`.
pub const fn new(value: T) -> Self {
Self {
@ -137,7 +137,7 @@ impl<T: Trace> GcCell<T> {
}
}
impl<T: Trace + ?Sized> GcCell<T> {
impl<T: Trace + ?Sized> GcRefCell<T> {
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `GcCellRef` exits scope.
@ -146,7 +146,7 @@ impl<T: Trace + ?Sized> GcCell<T> {
/// # Panics
///
/// Panics if the value is currently mutably borrowed.
pub fn borrow(&self) -> GcCellRef<'_, T> {
pub fn borrow(&self) -> GcRef<'_, T> {
match self.try_borrow() {
Ok(value) => value,
Err(e) => panic!("{}", e),
@ -161,7 +161,7 @@ impl<T: Trace + ?Sized> GcCell<T> {
/// # Panics
///
/// Panics if the value is currently borrowed.
pub fn borrow_mut(&self) -> GcCellRefMut<'_, T> {
pub fn borrow_mut(&self) -> GcRefMut<'_, T> {
match self.try_borrow_mut() {
Ok(value) => value,
Err(e) => panic!("{}", e),
@ -179,7 +179,7 @@ impl<T: Trace + ?Sized> GcCell<T> {
/// # Errors
///
/// Returns an `Err` if the value is currently mutably borrowed.
pub fn try_borrow(&self) -> Result<GcCellRef<'_, T>, BorrowError> {
pub fn try_borrow(&self) -> Result<GcRef<'_, T>, BorrowError> {
if self.flags.get().borrowed() == BorrowState::Writing {
return Err(BorrowError);
}
@ -187,7 +187,7 @@ impl<T: Trace + ?Sized> GcCell<T> {
// SAFETY: calling value on a rooted value may cause Undefined Behavior
unsafe {
Ok(GcCellRef {
Ok(GcRef {
flags: &self.flags,
value: &*self.cell.get(),
})
@ -204,7 +204,7 @@ impl<T: Trace + ?Sized> GcCell<T> {
/// # Errors
///
/// Returns an `Err` if the value is currently borrowed.
pub fn try_borrow_mut(&self) -> Result<GcCellRefMut<'_, T>, BorrowMutError> {
pub fn try_borrow_mut(&self) -> Result<GcRefMut<'_, T>, BorrowMutError> {
if self.flags.get().borrowed() != BorrowState::Unused {
return Err(BorrowMutError);
}
@ -219,7 +219,7 @@ impl<T: Trace + ?Sized> GcCell<T> {
(*self.cell.get()).root();
}
Ok(GcCellRefMut {
Ok(GcRefMut {
gc_cell: self,
value: &mut *self.cell.get(),
})
@ -247,13 +247,13 @@ impl Display for BorrowMutError {
}
}
impl<T: Trace + ?Sized> Finalize for GcCell<T> {}
impl<T: Trace + ?Sized> Finalize for GcRefCell<T> {}
// SAFETY: GcCell maintains it's own BorrowState and rootedness. GcCell's implementation
// focuses on only continuing Trace based methods while the cell state is not written.
// Implementing a Trace while the cell is being written to or incorrectly implementing Trace
// on GcCell's value may cause Undefined Behavior
unsafe impl<T: Trace + ?Sized> Trace for GcCell<T> {
unsafe impl<T: Trace + ?Sized> Trace for GcRefCell<T> {
unsafe fn trace(&self) {
match self.flags.get().borrowed() {
BorrowState::Writing => (),
@ -262,14 +262,6 @@ unsafe impl<T: Trace + ?Sized> Trace for GcCell<T> {
}
}
unsafe fn weak_trace(&self) {
match self.flags.get().borrowed() {
BorrowState::Writing => (),
// SAFETY: Please see GcCell's Trace impl Safety note.
_ => unsafe { (*self.cell.get()).weak_trace() },
}
}
unsafe fn root(&self) {
assert!(!self.flags.get().rooted(), "Can't root a GcCell twice!");
self.flags.set(self.flags.get().set_rooted(true));
@ -303,12 +295,12 @@ unsafe impl<T: Trace + ?Sized> Trace for GcCell<T> {
}
/// A wrapper type for an immutably borrowed value from a `GcCell<T>`.
pub struct GcCellRef<'a, T: ?Sized + 'static> {
pub struct GcRef<'a, T: ?Sized + 'static> {
pub(crate) flags: &'a Cell<BorrowFlag>,
pub(crate) value: &'a T,
}
impl<'a, T: ?Sized> GcCellRef<'a, T> {
impl<'a, T: ?Sized> GcRef<'a, T> {
/// Copies a `GcCellRef`.
///
/// The `GcCell` is already immutably borrowed, so this cannot fail.
@ -319,9 +311,9 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> {
/// the contents of a `GcCell`.
#[allow(clippy::should_implement_trait)]
#[must_use]
pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> {
pub fn clone(orig: &GcRef<'a, T>) -> GcRef<'a, T> {
orig.flags.set(orig.flags.get().add_reading());
GcCellRef {
GcRef {
flags: orig.flags,
value: orig.value,
}
@ -334,12 +326,12 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> {
/// This is an associated function that needs to be used as `GcCellRef::map(...)`.
/// A method would interfere with methods of the same name on the contents
/// of a `GcCellRef` used through `Deref`.
pub fn map<U, F>(orig: Self, f: F) -> GcCellRef<'a, U>
pub fn map<U, F>(orig: Self, f: F) -> GcRef<'a, U>
where
U: ?Sized,
F: FnOnce(&T) -> &U,
{
let ret = GcCellRef {
let ret = GcRef {
flags: orig.flags,
value: f(orig.value),
};
@ -357,7 +349,7 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> {
///
/// This is an associated function that needs to be used as `GcCellRef::map_split(...)`.
/// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`.
pub fn map_split<U, V, F>(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>)
pub fn map_split<U, V, F>(orig: Self, f: F) -> (GcRef<'a, U>, GcRef<'a, V>)
where
U: ?Sized,
V: ?Sized,
@ -368,11 +360,11 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> {
orig.flags.set(orig.flags.get().add_reading());
let ret = (
GcCellRef {
GcRef {
flags: orig.flags,
value: a,
},
GcCellRef {
GcRef {
flags: orig.flags,
value: b,
},
@ -386,7 +378,7 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> {
}
}
impl<T: ?Sized> Deref for GcCellRef<'_, T> {
impl<T: ?Sized> Deref for GcRef<'_, T> {
type Target = T;
fn deref(&self) -> &T {
@ -394,32 +386,32 @@ impl<T: ?Sized> Deref for GcCellRef<'_, T> {
}
}
impl<T: ?Sized> Drop for GcCellRef<'_, T> {
impl<T: ?Sized> Drop for GcRef<'_, T> {
fn drop(&mut self) {
debug_assert!(self.flags.get().borrowed() == BorrowState::Reading);
self.flags.set(self.flags.get().sub_reading());
}
}
impl<T: ?Sized + Debug> Debug for GcCellRef<'_, T> {
impl<T: ?Sized + Debug> Debug for GcRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<T: ?Sized + Display> Display for GcCellRef<'_, T> {
impl<T: ?Sized + Display> Display for GcRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}
}
/// A wrapper type for a mutably borrowed value from a `GcCell<T>`.
pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> {
pub(crate) gc_cell: &'a GcCell<T>,
pub struct GcRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> {
pub(crate) gc_cell: &'a GcRefCell<T>,
pub(crate) value: &'a mut U,
}
impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> {
impl<'a, T: Trace + ?Sized, U: ?Sized> GcRefMut<'a, T, U> {
/// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum
/// variant.
///
@ -428,16 +420,16 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> {
/// This is an associated function that needs to be used as
/// `GcCellRefMut::map(...)`. A method would interfere with methods of the same
/// name on the contents of a `GcCell` used through `Deref`.
pub fn map<V, F>(orig: Self, f: F) -> GcCellRefMut<'a, T, V>
pub fn map<V, F>(orig: Self, f: F) -> GcRefMut<'a, T, V>
where
V: ?Sized,
F: FnOnce(&mut U) -> &mut V,
{
// SAFETY: This is safe as `GcCellRefMut` is already borrowed, so the value is rooted.
#[allow(trivial_casts)]
// SAFETY: This is safe as `GcCellRefMut` is already borrowed, so the value is rooted.
let value = unsafe { &mut *(orig.value as *mut U) };
let ret = GcCellRefMut {
let ret = GcRefMut {
gc_cell: orig.gc_cell,
value: f(value),
};
@ -450,7 +442,7 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> {
}
}
impl<T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'_, T, U> {
impl<T: Trace + ?Sized, U: ?Sized> Deref for GcRefMut<'_, T, U> {
type Target = U;
fn deref(&self) -> &U {
@ -458,13 +450,13 @@ impl<T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'_, T, U> {
}
}
impl<T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'_, T, U> {
impl<T: Trace + ?Sized, U: ?Sized> DerefMut for GcRefMut<'_, T, U> {
fn deref_mut(&mut self) -> &mut U {
self.value
}
}
impl<T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'_, T, U> {
impl<T: Trace + ?Sized, U: ?Sized> Drop for GcRefMut<'_, T, U> {
fn drop(&mut self) {
debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing);
// Restore the rooted state of the GcCell's contents to the state of the GcCell.
@ -482,45 +474,45 @@ impl<T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'_, T, U> {
}
}
impl<T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'_, T, U> {
impl<T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcRefMut<'_, T, U> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'_, T, U> {
impl<T: Trace + ?Sized, U: Display + ?Sized> Display for GcRefMut<'_, T, U> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}
}
// SAFETY: GcCell<T> tracks it's `BorrowState` is `Writing`
unsafe impl<T: ?Sized + Send> Send for GcCell<T> {}
unsafe impl<T: ?Sized + Send> Send for GcRefCell<T> {}
impl<T: Trace + Clone> Clone for GcCell<T> {
impl<T: Trace + Clone> Clone for GcRefCell<T> {
fn clone(&self) -> Self {
Self::new(self.borrow().clone())
}
}
impl<T: Trace + Default> Default for GcCell<T> {
impl<T: Trace + Default> Default for GcRefCell<T> {
fn default() -> Self {
Self::new(Default::default())
}
}
#[allow(clippy::inline_always)]
impl<T: Trace + ?Sized + PartialEq> PartialEq for GcCell<T> {
impl<T: Trace + ?Sized + PartialEq> PartialEq for GcRefCell<T> {
#[inline(always)]
fn eq(&self, other: &Self) -> bool {
*self.borrow() == *other.borrow()
}
}
impl<T: Trace + ?Sized + Eq> Eq for GcCell<T> {}
impl<T: Trace + ?Sized + Eq> Eq for GcRefCell<T> {}
#[allow(clippy::inline_always)]
impl<T: Trace + ?Sized + PartialOrd> PartialOrd for GcCell<T> {
impl<T: Trace + ?Sized + PartialOrd> PartialOrd for GcRefCell<T> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
(*self.borrow()).partial_cmp(&*other.borrow())
@ -547,13 +539,13 @@ impl<T: Trace + ?Sized + PartialOrd> PartialOrd for GcCell<T> {
}
}
impl<T: Trace + ?Sized + Ord> Ord for GcCell<T> {
impl<T: Trace + ?Sized + Ord> Ord for GcRefCell<T> {
fn cmp(&self, other: &Self) -> Ordering {
(*self.borrow()).cmp(&*other.borrow())
}
}
impl<T: Trace + ?Sized + Debug> Debug for GcCell<T> {
impl<T: Trace + ?Sized + Debug> Debug for GcRefCell<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.flags.get().borrowed() {
BorrowState::Unused | BorrowState::Reading => f

257
boa_gc/src/internals/ephemeron_box.rs

@ -1,115 +1,210 @@
use crate::{finalizer_safe, trace::Trace, Finalize, Gc, GcBox};
use std::{cell::Cell, ptr::NonNull};
/// The inner allocation of an [`Ephemeron`][crate::Ephemeron] pointer.
pub(crate) struct EphemeronBox<K: Trace + ?Sized + 'static, V: Trace + ?Sized + 'static> {
key: Cell<Option<NonNull<GcBox<K>>>>,
value: V,
use crate::{trace::Trace, Gc, GcBox};
use std::{
cell::Cell,
ptr::{self, NonNull},
};
// Age and Weak Flags
const MARK_MASK: usize = 1 << (usize::BITS - 1);
const ROOTS_MASK: usize = !MARK_MASK;
const ROOTS_MAX: usize = ROOTS_MASK;
/// The `EphemeronBoxHeader` contains the `EphemeronBoxHeader`'s current state for the `Collector`'s
/// Mark/Sweep as well as a pointer to the next ephemeron in the heap.
///
/// These flags include:
/// - Root Count
/// - Mark Flag Bit
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
pub(crate) struct EphemeronBoxHeader {
roots: Cell<usize>,
pub(crate) next: Cell<Option<NonNull<dyn ErasedEphemeronBox>>>,
}
impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
pub(crate) fn new(key: &Gc<K>, value: V) -> Self {
impl EphemeronBoxHeader {
/// Creates a new `EphemeronBoxHeader` with a root of 1 and next set to None.
pub(crate) fn new() -> Self {
Self {
key: Cell::new(Some(key.inner_ptr())),
value,
roots: Cell::new(1),
next: Cell::new(None),
}
}
}
impl<K: Trace + ?Sized, V: Trace + ?Sized> EphemeronBox<K, V> {
/// Checks if the key pointer is marked by Trace
pub(crate) fn is_marked(&self) -> bool {
self.inner_key().map_or(false, GcBox::is_marked)
/// Returns the `EphemeronBoxHeader`'s current root count
pub(crate) fn roots(&self) -> usize {
self.roots.get() & ROOTS_MASK
}
/// Returns some pointer to the `key`'s `GcBox` or None
/// # Panics
/// This method will panic if called while the garbage collector is dropping.
pub(crate) fn inner_key_ptr(&self) -> Option<*mut GcBox<K>> {
assert!(finalizer_safe());
self.key.get().map(NonNull::as_ptr)
/// Increments `EphemeronBoxHeader`'s root count.
pub(crate) fn inc_roots(&self) {
let roots = self.roots.get();
if (roots & ROOTS_MASK) < ROOTS_MAX {
self.roots.set(roots + 1);
} else {
// TODO: implement a better way to handle root overload.
panic!("roots counter overflow");
}
}
/// Returns some reference to `key`'s `GcBox` or None
pub(crate) fn inner_key(&self) -> Option<&GcBox<K>> {
// SAFETY: This is safe as `EphemeronBox::inner_key_ptr()` will
// fetch either a live `GcBox` or None. The value of `key` is set
// to None in the case where `EphemeronBox` and `key`'s `GcBox`
// entered into `Collector::sweep()` as unmarked.
unsafe { self.inner_key_ptr().map(|inner_key| &*inner_key) }
/// Decreases `EphemeronBoxHeader`'s current root count.
pub(crate) fn dec_roots(&self) {
// Underflow check as a stop gap for current issue when dropping.
if self.roots.get() > 0 {
self.roots.set(self.roots.get() - 1);
}
}
/// Returns a reference to the value of `key`'s `GcBox`
pub(crate) fn key(&self) -> Option<&K> {
self.inner_key().map(GcBox::value)
/// Returns a bool for whether `EphemeronBoxHeader`'s mark bit is 1.
pub(crate) fn is_marked(&self) -> bool {
self.roots.get() & MARK_MASK != 0
}
/// Returns a reference to `value`
pub(crate) const fn value(&self) -> &V {
&self.value
/// Sets `EphemeronBoxHeader`'s mark bit to 1.
pub(crate) fn mark(&self) {
self.roots.set(self.roots.get() | MARK_MASK);
}
/// Calls [`Trace::weak_trace()`][crate::Trace] on key
fn weak_trace_key(&self) {
if let Some(key) = self.inner_key() {
key.weak_trace_inner();
}
/// Sets `EphemeronBoxHeader`'s mark bit to 0.
pub(crate) fn unmark(&self) {
self.roots.set(self.roots.get() & !MARK_MASK);
}
}
/// Calls [`Trace::weak_trace()`][crate::Trace] on value
fn weak_trace_value(&self) {
// SAFETY: Value is a sized element that must implement trace. The
// operation is safe as EphemeronBox owns value and `Trace::weak_trace`
// must be implemented on it
unsafe {
self.value().weak_trace();
}
impl core::fmt::Debug for EphemeronBoxHeader {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.debug_struct("EphemeronBoxHeader")
.field("roots", &self.roots())
.field("marked", &self.is_marked())
.finish()
}
}
// `EphemeronBox`'s Finalize is special in that if it is determined to be unreachable
// and therefore so has the `GcBox` that `key`stores the pointer to, then we set `key`
// to None to guarantee that we do not access freed memory.
impl<K: Trace + ?Sized, V: Trace + ?Sized> Finalize for EphemeronBox<K, V> {
fn finalize(&self) {
self.key.set(None);
/// The inner allocation of an [`Ephemeron`][crate::Ephemeron] pointer.
pub(crate) struct EphemeronBox<K: Trace + ?Sized + 'static, V: Trace + 'static> {
pub(crate) header: EphemeronBoxHeader,
data: Cell<Option<NonNull<Data<K, V>>>>,
}
impl<K: Trace + ?Sized + 'static, V: Trace + 'static> Drop for EphemeronBox<K, V> {
fn drop(&mut self) {
if let Some(data) = self.data.take() {
// SAFETY: `data` comes from an `into_raw` call, so this pointer is safe to pass to
// `from_raw`.
drop(unsafe { Box::from_raw(data.as_ptr()) });
}
}
}
// SAFETY: EphemeronBox implements primarly two methods of trace `Trace::is_marked_ephemeron`
// to determine whether the key field is stored and `Trace::weak_trace` which continues the `Trace::weak_trace()`
// into `key` and `value`.
unsafe impl<K: Trace + ?Sized, V: Trace + ?Sized> Trace for EphemeronBox<K, V> {
unsafe fn trace(&self) {
/* An ephemeron is never traced with Phase One Trace */
struct Data<K: Trace + ?Sized + 'static, V: Trace + 'static> {
key: NonNull<GcBox<K>>,
value: V,
}
impl<K: Trace + ?Sized, V: Trace> EphemeronBox<K, V> {
pub(crate) fn new(key: &Gc<K>, value: V) -> Self {
let data = Box::into_raw(Box::new(Data {
key: key.inner_ptr(),
value,
}));
// SAFETY: `Box::into_raw` must always return a non-null pointer.
let data = unsafe { NonNull::new_unchecked(data) };
Self {
header: EphemeronBoxHeader::new(),
data: Cell::new(Some(data)),
}
}
/// Checks if the `key`'s `GcBox` has been marked by `Trace::trace()` or `Trace::weak_trace`.
fn is_marked_ephemeron(&self) -> bool {
self.is_marked()
/// Returns `true` if the two references refer to the same `GcBox`.
pub(crate) fn ptr_eq(this: &Self, other: &Self) -> bool {
// Use .header to ignore fat pointer vtables, to work around
// https://github.com/rust-lang/rust/issues/46139
ptr::eq(&this.header, &other.header)
}
/// Checks if this `EphemeronBox` has already been determined reachable. If so, continue to trace
/// value in `key` and `value`.
unsafe fn weak_trace(&self) {
if self.is_marked() {
self.weak_trace_key();
self.weak_trace_value();
}
/// Returns a reference to the ephemeron's value or None.
pub(crate) fn value(&self) -> Option<&V> {
// SAFETY: the garbage collector ensures `ptr` is valid as long as `data` is `Some`.
unsafe { self.data.get().map(|ptr| &ptr.as_ref().value) }
}
/// Marks this `EphemeronBox` as live.
///
/// This doesn't mark the inner value of the ephemeron. [`ErasedEphemeronBox::trace`]
/// does this, and it's called by the garbage collector on demand.
pub(crate) unsafe fn mark(&self) {
self.header.mark();
}
/// Increases the root count on this `EphemeronBox`.
///
/// Roots prevent the `EphemeronBox` from being destroyed by the garbage collector.
pub(crate) fn root(&self) {
self.header.inc_roots();
}
/// Decreases the root count on this `EphemeronBox`.
///
/// Roots prevent the `EphemeronBox` from being destroyed by the garbage collector.
pub(crate) fn unroot(&self) {
self.header.dec_roots();
}
}
pub(crate) trait ErasedEphemeronBox {
/// Gets the header of the `EphemeronBox`.
fn header(&self) -> &EphemeronBoxHeader;
/// Traces through the `EphemeronBox`'s held value, but only if it's marked and its key is also
/// marked. Returns `true` if the ephemeron successfuly traced through its value. This also
/// considers ephemerons that are marked but don't have their value anymore as
/// "successfully traced".
unsafe fn trace(&self) -> bool;
/// Runs the finalization logic of the `EphemeronBox`'s held value, if the key is still live,
/// and clears its contents.
fn finalize_and_clear(&self);
}
impl<K: Trace + ?Sized, V: Trace> ErasedEphemeronBox for EphemeronBox<K, V> {
fn header(&self) -> &EphemeronBoxHeader {
&self.header
}
// EphemeronBox does not implement root.
unsafe fn root(&self) {}
unsafe fn trace(&self) -> bool {
if !self.header.is_marked() {
return false;
}
let Some(data) = self.data.get() else {
return true;
};
// EphemeronBox does not implement unroot
unsafe fn unroot(&self) {}
// SAFETY: `data` comes from a `Box`, so it is safe to dereference.
let data = unsafe { data.as_ref() };
// SAFETY: `key` comes from a `Gc`, and the garbage collector only invalidates
// `key` when it is unreachable, making `key` always valid.
let key = unsafe { data.key.as_ref() };
// An `EphemeronBox`'s key is set to None once it has been finalized.
//
// NOTE: while it is possible for the `key`'s pointer value to be
// resurrected, we should still consider the finalize the ephemeron
// box and set the `key` to None.
fn run_finalizer(&self) {
Finalize::finalize(self);
let is_key_marked = key.is_marked();
if is_key_marked {
// SAFETY: this is safe to call, since we want to trace all reachable objects
// from a marked ephemeron that holds a live `key`.
unsafe { data.value.trace() }
}
is_key_marked
}
fn finalize_and_clear(&self) {
if let Some(data) = self.data.take() {
// SAFETY: `data` comes from an `into_raw` call, so this pointer is safe to pass to
// `from_raw`.
let contents = unsafe { Box::from_raw(data.as_ptr()) };
contents.value.finalize();
}
}
}

59
boa_gc/src/internals/gc_box.rs

@ -6,9 +6,8 @@ use std::{
};
// Age and Weak Flags
const MARK_MASK: usize = 1 << (usize::BITS - 2);
const WEAK_MASK: usize = 1 << (usize::BITS - 1);
const ROOTS_MASK: usize = !(MARK_MASK | WEAK_MASK);
const MARK_MASK: usize = 1 << (usize::BITS - 1);
const ROOTS_MASK: usize = !MARK_MASK;
const ROOTS_MAX: usize = ROOTS_MASK;
/// The `GcBoxheader` contains the `GcBox`'s current state for the `Collector`'s
@ -17,7 +16,6 @@ const ROOTS_MAX: usize = ROOTS_MASK;
/// These flags include:
/// - Root Count
/// - Mark Flag Bit
/// - Weak Flag Bit
///
/// The next node is set by the `Allocator` during initialization and by the
/// `Collector` during the sweep phase.
@ -35,15 +33,6 @@ impl GcBoxHeader {
}
}
/// Creates a new `GcBoxHeader` with the Weak bit at 1 and roots of 1.
pub(crate) fn new_weak() -> Self {
// Set weak_flag
Self {
roots: Cell::new(WEAK_MASK | 1),
next: Cell::new(None),
}
}
/// Returns the `GcBoxHeader`'s current root count
pub(crate) fn roots(&self) -> usize {
self.roots.get() & ROOTS_MASK
@ -83,19 +72,13 @@ impl GcBoxHeader {
pub(crate) fn unmark(&self) {
self.roots.set(self.roots.get() & !MARK_MASK);
}
/// Returns a bool for whether the `GcBoxHeader`'s weak bit is 1.
pub(crate) fn is_ephemeron(&self) -> bool {
self.roots.get() & WEAK_MASK != 0
}
}
impl fmt::Debug for GcBoxHeader {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GcBoxHeader")
.field("Roots", &self.roots())
.field("Weak", &self.is_ephemeron())
.field("Marked", &self.is_marked())
.field("roots", &self.roots())
.field("marked", &self.is_marked())
.finish()
}
}
@ -104,7 +87,7 @@ impl fmt::Debug for GcBoxHeader {
#[derive(Debug)]
pub struct GcBox<T: Trace + ?Sized + 'static> {
pub(crate) header: GcBoxHeader,
pub(crate) value: T,
value: T,
}
impl<T: Trace> GcBox<T> {
@ -115,14 +98,6 @@ impl<T: Trace> GcBox<T> {
value,
}
}
/// Returns a new `GcBox` with a rooted and weak `GcBoxHeader`.
pub(crate) fn new_weak(value: T) -> Self {
Self {
header: GcBoxHeader::new_weak(),
value,
}
}
}
impl<T: Trace + ?Sized> GcBox<T> {
@ -133,9 +108,9 @@ impl<T: Trace + ?Sized> GcBox<T> {
ptr::eq(&this.header, &other.header)
}
/// Marks this `GcBox` and marks through its data.
pub(crate) unsafe fn trace_inner(&self) {
if !self.header.is_marked() && !self.header.is_ephemeron() {
/// Marks this `GcBox` and traces its value.
pub(crate) unsafe fn mark_and_trace(&self) {
if !self.header.is_marked() {
self.header.mark();
// SAFETY: if `GcBox::trace_inner()` has been called, then,
// this box must have been deemed as reachable via tracing
@ -147,29 +122,17 @@ impl<T: Trace + ?Sized> GcBox<T> {
}
}
/// Trace inner data and search for ephemerons to add to the ephemeron queue.
pub(crate) fn weak_trace_inner(&self) {
if !self.header.is_marked() && !self.header.is_ephemeron() {
self.header.mark();
// SAFETY: if a `GcBox` has `weak_trace_inner` called, then the inner.
// value must have been deemed as reachable.
unsafe {
self.value.weak_trace();
}
}
}
/// Increases the root count on this `GcBox`.
///
/// Roots prevent the `GcBox` from being destroyed by the garbage collector.
pub(crate) fn root_inner(&self) {
pub(crate) fn root(&self) {
self.header.inc_roots();
}
/// Decreases the root count on this `GcBox`.
///
/// Roots prevent the `GcBox` from being destroyed by the garbage collector.
pub(crate) fn unroot_inner(&self) {
pub(crate) fn unroot(&self) {
self.header.dec_roots();
}
@ -178,7 +141,7 @@ impl<T: Trace + ?Sized> GcBox<T> {
&self.value
}
/// Returns a bool for whether the header is marked.
/// Returns `true` if the header is marked.
pub(crate) fn is_marked(&self) -> bool {
self.header.is_marked()
}

2
boa_gc/src/internals/mod.rs

@ -1,5 +1,5 @@
mod ephemeron_box;
mod gc_box;
pub(crate) use self::ephemeron_box::EphemeronBox;
pub(crate) use self::ephemeron_box::{EphemeronBox, ErasedEphemeronBox};
pub use self::gc_box::GcBox;

306
boa_gc/src/lib.rs

@ -79,6 +79,7 @@
clippy::perf,
clippy::pedantic,
clippy::nursery,
clippy::undocumented_unsafe_blocks
)]
#![allow(
clippy::module_name_repetitions,
@ -95,6 +96,7 @@ mod trace;
pub(crate) mod internals;
use boa_profiler::Profiler;
use internals::{EphemeronBox, ErasedEphemeronBox};
use std::{
cell::{Cell, RefCell},
mem,
@ -103,18 +105,19 @@ use std::{
pub use crate::trace::{Finalize, Trace};
pub use boa_macros::{Finalize, Trace};
pub use cell::{GcCell, GcCellRef, GcCellRefMut};
pub use cell::{GcRef, GcRefCell, GcRefMut};
pub use internals::GcBox;
pub use pointers::{Ephemeron, Gc, WeakGc};
type GcPointer = NonNull<GcBox<dyn Trace>>;
type EphemeronPointer = NonNull<dyn ErasedEphemeronBox>;
thread_local!(static EPHEMERON_QUEUE: Cell<Option<Vec<GcPointer>>> = Cell::new(None));
thread_local!(static GC_DROPPING: Cell<bool> = Cell::new(false));
thread_local!(static BOA_GC: RefCell<BoaGc> = RefCell::new( BoaGc {
config: GcConfig::default(),
runtime: GcRuntimeData::default(),
adult_start: Cell::new(None),
strong_start: Cell::new(None),
weak_start: Cell::new(None)
}));
#[derive(Debug, Clone, Copy)]
@ -145,7 +148,8 @@ struct GcRuntimeData {
struct BoaGc {
config: GcConfig,
runtime: GcRuntimeData,
adult_start: Cell<Option<GcPointer>>,
strong_start: Cell<Option<GcPointer>>,
weak_start: Cell<Option<EphemeronPointer>>,
}
impl Drop for BoaGc {
@ -190,18 +194,40 @@ struct Allocator;
impl Allocator {
/// Allocate a new garbage collected value to the Garbage Collector's heap.
fn allocate<T: Trace>(value: GcBox<T>) -> NonNull<GcBox<T>> {
let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc");
fn alloc_gc<T: Trace>(value: GcBox<T>) -> NonNull<GcBox<T>> {
let _timer = Profiler::global().start_event("New GcBox", "BoaAlloc");
let element_size = mem::size_of_val::<GcBox<T>>(&value);
BOA_GC.with(|st| {
let mut gc = st.borrow_mut();
Self::manage_state(&mut gc);
value.header.next.set(gc.adult_start.take());
// Safety: Value Cannot be a null as it must be a GcBox<T>
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::from(value))) };
value.header.next.set(gc.strong_start.take());
// Safety: value cannot be a null pointer, since `Box` cannot return null pointers.
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(value))) };
let erased: NonNull<GcBox<dyn Trace>> = ptr;
gc.adult_start.set(Some(ptr));
gc.strong_start.set(Some(erased));
gc.runtime.bytes_allocated += element_size;
ptr
})
}
fn alloc_ephemeron<K: Trace + ?Sized, V: Trace>(
value: EphemeronBox<K, V>,
) -> NonNull<EphemeronBox<K, V>> {
let _timer = Profiler::global().start_event("New EphemeronBox", "BoaAlloc");
let element_size = mem::size_of_val::<EphemeronBox<K, V>>(&value);
BOA_GC.with(|st| {
let mut gc = st.borrow_mut();
Self::manage_state(&mut gc);
value.header.next.set(gc.weak_start.take());
// Safety: value cannot be a null pointer, since `Box` cannot return null pointers.
let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(value))) };
let erased: NonNull<dyn ErasedEphemeronBox> = ptr;
gc.weak_start.set(Some(erased));
gc.runtime.bytes_allocated += element_size;
ptr
@ -210,7 +236,7 @@ impl Allocator {
fn manage_state(gc: &mut BoaGc) {
if gc.runtime.bytes_allocated > gc.config.threshold {
Collector::run_full_collection(gc);
Collector::collect(gc);
if gc.runtime.bytes_allocated
> gc.config.threshold / 100 * gc.config.used_space_percentage
@ -222,141 +248,153 @@ impl Allocator {
}
}
// This collector currently functions in four main phases
//
// Mark -> Finalize -> Mark -> Sweep
//
// Mark nodes as reachable then finalize the unreachable nodes. A remark phase
// then needs to be retriggered as finalization can potentially resurrect dead
// nodes.
//
// A better approach in a more concurrent structure may be to reorder.
//
// Mark -> Sweep -> Finalize
struct Unreachables {
strong: Vec<NonNull<GcBox<dyn Trace>>>,
weak: Vec<NonNull<dyn ErasedEphemeronBox>>,
}
/// This collector currently functions in four main phases
///
/// Mark -> Finalize -> Mark -> Sweep
///
/// 1. Mark nodes as reachable.
/// 2. Finalize the unreachable nodes.
/// 3. Mark again because `Finalize::finalize` can potentially resurrect dead nodes.
/// 4. Sweep and drop all dead nodes.
///
/// A better approach in a more concurrent structure may be to reorder.
///
/// Mark -> Sweep -> Finalize
struct Collector;
impl Collector {
/// Run a collection on the full heap.
fn run_full_collection(gc: &mut BoaGc) {
fn collect(gc: &mut BoaGc) {
let _timer = Profiler::global().start_event("Gc Full Collection", "gc");
gc.runtime.collections += 1;
let unreachable_adults = Self::mark_heap(&gc.adult_start);
let unreachables = Self::mark_heap(&gc.strong_start, &gc.weak_start);
// Check if any unreachable nodes were found and finalize
if !unreachable_adults.is_empty() {
// SAFETY: Please see `Collector::finalize()`
unsafe { Self::finalize(unreachable_adults) };
}
// Only finalize if there are any unreachable nodes.
if !unreachables.strong.is_empty() || unreachables.weak.is_empty() {
// Finalize all the unreachable nodes.
// SAFETY: All passed pointers are valid, since we won't deallocate until `Self::sweep`.
unsafe { Self::finalize(unreachables) };
let _final_unreachable_adults = Self::mark_heap(&gc.adult_start);
let _final_unreachables = Self::mark_heap(&gc.strong_start, &gc.weak_start);
}
// SAFETY: Please see `Collector::sweep()`
// SAFETY: The head of our linked list is always valid per the invariants of our GC.
unsafe {
Self::sweep(&gc.adult_start, &mut gc.runtime.bytes_allocated);
Self::sweep(
&gc.strong_start,
&gc.weak_start,
&mut gc.runtime.bytes_allocated,
);
}
}
/// Walk the heap and mark any nodes deemed reachable
fn mark_heap(head: &Cell<Option<NonNull<GcBox<dyn Trace>>>>) -> Vec<NonNull<GcBox<dyn Trace>>> {
fn mark_heap(
mut strong: &Cell<Option<NonNull<GcBox<dyn Trace>>>>,
mut weak: &Cell<Option<NonNull<dyn ErasedEphemeronBox>>>,
) -> Unreachables {
let _timer = Profiler::global().start_event("Gc Marking", "gc");
// Walk the list, tracing and marking the nodes
let mut finalize = Vec::new();
let mut ephemeron_queue = Vec::new();
let mut mark_head = head;
while let Some(node) = mark_head.get() {
// SAFETY: node must be valid as it is coming directly from the heap.
let mut strong_dead = Vec::new();
let mut pending_ephemerons = Vec::new();
// === Preliminary mark phase ===
//
// 0. Get the naive list of possibly dead nodes.
while let Some(node) = strong.get() {
// SAFETY: node must be valid as this phase cannot drop any node.
let node_ref = unsafe { node.as_ref() };
if node_ref.header.is_ephemeron() {
ephemeron_queue.push(node);
} else if node_ref.header.roots() > 0 {
if node_ref.header.roots() > 0 {
// SAFETY: the reference to node must be valid as it is rooted. Passing
// invalid references can result in Undefined Behavior
unsafe {
node_ref.trace_inner();
node_ref.mark_and_trace();
}
} else {
finalize.push(node);
} else if !node_ref.is_marked() {
strong_dead.push(node);
}
mark_head = &node_ref.header.next;
strong = &node_ref.header.next;
}
// Ephemeron Evaluation
if !ephemeron_queue.is_empty() {
ephemeron_queue = Self::mark_ephemerons(ephemeron_queue);
// 0.1. Early return if there are no ephemerons in the GC
if weak.get().is_none() {
strong_dead.retain_mut(|node| {
// SAFETY: node must be valid as this phase cannot drop any node.
unsafe { !node.as_ref().is_marked() }
});
return Unreachables {
strong: strong_dead,
weak: Vec::new(),
};
}
// Any left over nodes in the ephemeron queue at this point are
// unreachable and need to be notified/finalized.
finalize.extend(ephemeron_queue);
finalize
}
// === Weak mark phase ===
//
// 1. Get the naive list of ephemerons that are supposedly dead or their key is dead and
// trace all the ephemerons that have roots and their keys are live. Also remove from
// this list the ephemerons that are marked but their value is dead.
while let Some(eph) = weak.get() {
// SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() };
// SAFETY: the garbage collector ensures `eph_ref` always points to valid data.
if unsafe { !eph_ref.trace() } {
pending_ephemerons.push(eph);
}
weak = &eph_ref.header().next;
}
// Tracing Ephemerons/Weak is always requires tracing the inner nodes in case it ends up marking unmarked node
//
// Time complexity should be something like O(nd) where d is the longest chain of epehemerons
/// Mark any ephemerons that are deemed live and trace their fields.
fn mark_ephemerons(
initial_queue: Vec<NonNull<GcBox<dyn Trace>>>,
) -> Vec<NonNull<GcBox<dyn Trace>>> {
let mut ephemeron_queue = initial_queue;
// 2. Iterate through all pending ephemerons, removing the ones which have been successfully
// traced. If there are no changes in the pending ephemerons list, it means that there are no
// more reachable ephemerons from the remaining ephemeron values.
let mut previous_len = pending_ephemerons.len();
loop {
// iterate through ephemeron queue, sorting nodes by whether they
// are reachable or unreachable<?>
let (reachable, other): (Vec<_>, Vec<_>) =
ephemeron_queue.into_iter().partition(|node| {
// SAFETY: Any node on the eph_queue or the heap must be non null
let node = unsafe { node.as_ref() };
if node.value.is_marked_ephemeron() {
node.header.mark();
true
} else {
node.header.roots() > 0
}
});
// Replace the old queue with the unreachable<?>
ephemeron_queue = other;
// If reachable nodes is not empty, trace values. If it is empty,
// break from the loop
if reachable.is_empty() {
pending_ephemerons.retain_mut(|eph| {
// SAFETY: node must be valid as this phase cannot drop any node.
let eph_ref = unsafe { eph.as_ref() };
// SAFETY: the garbage collector ensures `eph_ref` always points to valid data.
unsafe { !eph_ref.trace() }
});
if previous_len == pending_ephemerons.len() {
break;
}
EPHEMERON_QUEUE.with(|state| state.set(Some(Vec::new())));
// iterate through reachable nodes and trace their values,
// enqueuing any ephemeron that is found during the trace
for node in reachable {
// TODO: deal with fetch ephemeron_queue
// SAFETY: Node must be a valid pointer or else it would not be deemed reachable.
unsafe {
node.as_ref().weak_trace_inner();
}
}
EPHEMERON_QUEUE.with(|st| {
if let Some(found_nodes) = st.take() {
ephemeron_queue.extend(found_nodes);
}
});
previous_len = pending_ephemerons.len();
}
// 3. The remaining list should contain the ephemerons that are either unreachable or its key
// is dead. Cleanup the strong pointers since this procedure could have marked some more strong
// pointers.
strong_dead.retain_mut(|node| {
// SAFETY: node must be valid as this phase cannot drop any node.
unsafe { !node.as_ref().is_marked() }
});
Unreachables {
strong: strong_dead,
weak: pending_ephemerons,
}
ephemeron_queue
}
/// # Safety
///
/// Passing a vec with invalid pointers will result in Undefined Behaviour.
unsafe fn finalize(finalize_vec: Vec<NonNull<GcBox<dyn Trace>>>) {
/// Passing a `strong` or a `weak` vec with invalid pointers will result in Undefined Behaviour.
unsafe fn finalize(unreachables: Unreachables) {
let _timer = Profiler::global().start_event("Gc Finalization", "gc");
for node in finalize_vec {
// We double check that the unreachable nodes are actually unreachable
// prior to finalization as they could have been marked by a different
// trace after initially being added to the queue
//
// SAFETY: The caller must ensure all pointers inside `finalize_vec` are valid.
for node in unreachables.strong {
// SAFETY: The caller must ensure all pointers inside `unreachables.strong` are valid.
let node = unsafe { node.as_ref() };
if !node.header.is_marked() {
Trace::run_finalizer(&node.value);
}
Trace::run_finalizer(&node.value());
}
for node in unreachables.weak {
// SAFETY: The caller must ensure all pointers inside `unreachables.weak` are valid.
let node = unsafe { node.as_ref() };
node.finalize_and_clear();
}
}
@ -367,30 +405,43 @@ impl Collector {
/// - Providing a list of pointers that weren't allocated by `Box::into_raw(Box::new(..))`
/// will result in Undefined Behaviour.
unsafe fn sweep(
heap_start: &Cell<Option<NonNull<GcBox<dyn Trace>>>>,
mut strong: &Cell<Option<NonNull<GcBox<dyn Trace>>>>,
mut weak: &Cell<Option<NonNull<dyn ErasedEphemeronBox>>>,
total_allocated: &mut usize,
) {
let _timer = Profiler::global().start_event("Gc Sweeping", "gc");
let _guard = DropGuard::new();
let mut sweep_head = heap_start;
while let Some(node) = sweep_head.get() {
while let Some(node) = strong.get() {
// SAFETY: The caller must ensure the validity of every node of `heap_start`.
let node_ref = unsafe { node.as_ref() };
if node_ref.is_marked() {
if node_ref.header.roots() > 0 || node_ref.is_marked() {
node_ref.header.unmark();
sweep_head = &node_ref.header.next;
} else if node_ref.header.is_ephemeron() && node_ref.header.roots() > 0 {
// Keep the ephemeron box's alive if rooted, but note that it's pointer is no longer safe
Trace::run_finalizer(&node_ref.value);
sweep_head = &node_ref.header.next;
strong = &node_ref.header.next;
} else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.
// The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
let unallocated_bytes = mem::size_of_val::<GcBox<_>>(&*unmarked_node);
let unallocated_bytes = mem::size_of_val(&*unmarked_node);
*total_allocated -= unallocated_bytes;
sweep_head.set(unmarked_node.header.next.take());
strong.set(unmarked_node.header.next.take());
}
}
while let Some(eph) = weak.get() {
// SAFETY: The caller must ensure the validity of every node of `heap_start`.
let eph_ref = unsafe { eph.as_ref() };
let header = eph_ref.header();
if header.roots() > 0 || header.is_marked() {
header.unmark();
weak = &header.next;
} else {
// SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped.
// The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`.
let unmarked_eph = unsafe { Box::from_raw(eph.as_ptr()) };
let unallocated_bytes = mem::size_of_val(&*unmarked_eph);
*total_allocated -= unallocated_bytes;
weak.set(unmarked_eph.header().next.take());
}
}
}
@ -400,13 +451,22 @@ impl Collector {
// Not initializing a dropguard since this should only be invoked when BOA_GC is being dropped.
let _guard = DropGuard::new();
let sweep_head = &gc.adult_start;
while let Some(node) = sweep_head.get() {
let strong_head = &gc.strong_start;
while let Some(node) = strong_head.get() {
// SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
strong_head.set(unmarked_node.header.next.take());
}
let eph_head = &gc.weak_start;
while let Some(node) = eph_head.get() {
// SAFETY:
// The `Allocator` must always ensure its start node is a valid, non-null pointer that
// was allocated by `Box::from_raw(Box::new(..))`.
let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) };
sweep_head.set(unmarked_node.header.next.take());
eph_head.set(unmarked_node.header().next.take());
}
}
}
@ -417,7 +477,7 @@ pub fn force_collect() {
let mut gc = current.borrow_mut();
if gc.runtime.bytes_allocated > 0 {
Collector::run_full_collection(&mut gc);
Collector::collect(&mut gc);
}
});
}

156
boa_gc/src/pointers/ephemeron.rs

@ -2,11 +2,12 @@ use crate::{
finalizer_safe,
internals::EphemeronBox,
trace::{Finalize, Trace},
Allocator, Gc, GcBox, EPHEMERON_QUEUE,
Allocator, Gc,
};
use std::{cell::Cell, ptr::NonNull};
#[derive(Debug)]
use super::rootable::Rootable;
/// A key-value pair where the value becomes unaccesible when the key is garbage collected.
///
/// See Racket's explanation on [**ephemerons**][eph] for a brief overview or read Barry Hayes'
@ -15,70 +16,119 @@ use std::{cell::Cell, ptr::NonNull};
///
/// [eph]: https://docs.racket-lang.org/reference/ephemerons.html
/// [acm]: https://dl.acm.org/doi/10.1145/263700.263733
#[derive(Debug)]
pub struct Ephemeron<K: Trace + ?Sized + 'static, V: Trace + 'static> {
inner_ptr: Cell<NonNull<GcBox<EphemeronBox<K, V>>>>,
inner_ptr: Cell<Rootable<EphemeronBox<K, V>>>,
}
impl<K: Trace + ?Sized, V: Trace + Clone> Ephemeron<K, V> {
/// Gets the stored value of this `Ephemeron`, or `None` if the key was already garbage collected.
///
/// This needs to return a clone of the value because holding a reference to it between
/// garbage collection passes could drop the underlying allocation, causing an Use After Free.
pub fn value(&self) -> Option<V> {
// SAFETY: this is safe because `Ephemeron` is tracked to always point to a valid pointer
// `inner_ptr`.
unsafe { self.inner_ptr.get().as_ref().value().cloned() }
}
}
impl<K: Trace + ?Sized, V: Trace> Ephemeron<K, V> {
/// Creates a new `Ephemeron`.
pub fn new(key: &Gc<K>, value: V) -> Self {
Self {
inner_ptr: Cell::new(Allocator::allocate(GcBox::new_weak(EphemeronBox::new(
key, value,
)))),
// SAFETY: `value` comes from the stack and should be rooted, meaning unrooting
// it to pass it to the underlying `EphemeronBox` is safe.
unsafe {
value.unroot();
}
// SAFETY: EphemeronBox is at least 2 bytes in size, and so its alignment is always a
// multiple of 2.
unsafe {
Self {
inner_ptr: Cell::new(
Rootable::new_unchecked(Allocator::alloc_ephemeron(EphemeronBox::new(
key, value,
)))
.rooted(),
),
}
}
}
}
impl<K: Trace + ?Sized, V: Trace> Ephemeron<K, V> {
fn inner_ptr(&self) -> NonNull<GcBox<EphemeronBox<K, V>>> {
self.inner_ptr.get()
/// Returns `true` if the two `Ephemeron`s point to the same allocation.
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
EphemeronBox::ptr_eq(this.inner(), other.inner())
}
fn inner(&self) -> &GcBox<EphemeronBox<K, V>> {
// SAFETY: GcBox<EphemeronBox<K,V>> must live until it is unrooted by Drop
unsafe { &*self.inner_ptr().as_ptr() }
fn is_rooted(&self) -> bool {
self.inner_ptr.get().is_rooted()
}
/// Gets the weak key of this `Ephemeron`, or `None` if the key was already garbage
/// collected.
pub fn key(&self) -> Option<&K> {
self.inner().value().key()
fn root_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().rooted());
}
/// Gets the stored value of this `Ephemeron`.
pub fn value(&self) -> &V {
self.inner().value().value()
fn unroot_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().unrooted());
}
/// Gets a `Gc` for the stored key of this `Ephemeron`.
pub fn upgrade_key(&self) -> Option<Gc<K>> {
// SAFETY: ptr must be a valid pointer or None would have been returned.
self.inner().value().inner_key_ptr().map(|ptr| unsafe {
let inner_ptr = NonNull::new_unchecked(ptr);
Gc::from_ptr(inner_ptr)
})
pub(crate) fn inner_ptr(&self) -> NonNull<EphemeronBox<K, V>> {
assert!(finalizer_safe());
self.inner_ptr.get().as_ptr()
}
}
impl<K: Trace, V: Trace> Finalize for Ephemeron<K, V> {}
fn inner(&self) -> &EphemeronBox<K, V> {
// SAFETY: Please see Gc::inner_ptr()
unsafe { self.inner_ptr().as_ref() }
}
// SAFETY: Ephemerons trace implementation is standard for everything except `Trace::weak_trace()`,
// which pushes the GcBox<EphemeronBox<_>> onto the EphemeronQueue
unsafe impl<K: Trace, V: Trace> Trace for Ephemeron<K, V> {
unsafe fn trace(&self) {}
/// Constructs an `Ephemeron<K, V>` from a raw pointer.
///
/// # Safety
///
/// This function is unsafe because improper use may lead to memory corruption, double-free,
/// or misbehaviour of the garbage collector.
#[must_use]
unsafe fn from_raw(ptr: NonNull<EphemeronBox<K, V>>) -> Self {
// SAFETY: it is the caller's job to ensure the safety of this operation.
unsafe {
Self {
inner_ptr: Cell::new(Rootable::new_unchecked(ptr).rooted()),
}
}
}
}
// Push this Ephemeron's pointer onto the EphemeronQueue
unsafe fn weak_trace(&self) {
EPHEMERON_QUEUE.with(|q| {
let mut queue = q.take().expect("queue is initialized by weak_trace");
queue.push(self.inner_ptr());
});
impl<K: Trace + ?Sized, V: Trace> Finalize for Ephemeron<K, V> {}
// SAFETY: `Ephemeron`s trace implementation only marks its inner box because we want to stop
// tracing through weakly held pointers.
unsafe impl<K: Trace + ?Sized, V: Trace> Trace for Ephemeron<K, V> {
unsafe fn trace(&self) {
// SAFETY: We need to mark the inner box of the `Ephemeron` since it is reachable
// from a root and this means it cannot be dropped.
unsafe {
self.inner().mark();
}
}
unsafe fn root(&self) {}
unsafe fn root(&self) {
assert!(!self.is_rooted(), "Can't double-root a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().root();
self.root_ptr();
}
unsafe fn unroot(&self) {}
unsafe fn unroot(&self) {
assert!(self.is_rooted(), "Can't double-unroot a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().unroot();
self.unroot_ptr();
}
fn run_finalizer(&self) {
Finalize::finalize(self);
@ -87,26 +137,22 @@ unsafe impl<K: Trace, V: Trace> Trace for Ephemeron<K, V> {
impl<K: Trace + ?Sized, V: Trace> Clone for Ephemeron<K, V> {
fn clone(&self) -> Self {
// SAFETY: This is safe because the inner_ptr must live as long as it's roots.
// Mismanagement of roots can cause inner_ptr to use after free or Undefined
// Behavior.
let ptr = self.inner_ptr();
// SAFETY: since an `Ephemeron` is always valid, its `inner_ptr` must also be always a valid
// pointer.
unsafe {
let eph = Self {
inner_ptr: Cell::new(NonNull::new_unchecked(self.inner_ptr().as_ptr())),
};
// Increment the Ephemeron's GcBox roots by 1
self.inner().root_inner();
eph
ptr.as_ref().root();
}
// SAFETY: `&self` is a valid Ephemeron pointer.
unsafe { Self::from_raw(ptr) }
}
}
impl<K: Trace + ?Sized, V: Trace> Drop for Ephemeron<K, V> {
fn drop(&mut self) {
// NOTE: We assert that this drop call is not a
// drop from `Collector::dump` or `Collector::sweep`
if finalizer_safe() {
self.inner().unroot_inner();
// If this pointer was a root, we should unroot it.
if self.is_rooted() {
self.inner().unroot();
}
}
}

122
boa_gc/src/pointers/gc.rs

@ -11,23 +11,15 @@ use std::{
hash::{Hash, Hasher},
marker::PhantomData,
ops::Deref,
ptr::{self, addr_of_mut, NonNull},
ptr::NonNull,
rc::Rc,
};
// Technically, this function is safe, since we're just modifying the address of a pointer without
// dereferencing it.
pub(crate) fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
// SAFETY: this should be safe as ptr must be a valid nonnull
unsafe {
ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::<u8>());
}
ptr
}
use super::rootable::Rootable;
/// A garbage-collected pointer type over an immutable value.
pub struct Gc<T: Trace + ?Sized + 'static> {
pub(crate) inner_ptr: Cell<NonNull<GcBox<T>>>,
pub(crate) inner_ptr: Cell<Rootable<GcBox<T>>>,
pub(crate) marker: PhantomData<Rc<T>>,
}
@ -37,15 +29,17 @@ impl<T: Trace> Gc<T> {
// Create GcBox and allocate it to heap.
//
// Note: Allocator can cause Collector to run
let inner_ptr = Allocator::allocate(GcBox::new(value));
let inner_ptr = Allocator::alloc_gc(GcBox::new(value));
// SAFETY: inner_ptr was just allocated, so it must be a valid value that implements [`Trace`]
unsafe { (*inner_ptr.as_ptr()).value().unroot() }
let gc = Self {
inner_ptr: Cell::new(inner_ptr),
// SAFETY: inner_ptr is 2-byte aligned.
let inner_ptr = unsafe { Rootable::new_unchecked(inner_ptr) };
Self {
inner_ptr: Cell::new(inner_ptr.rooted()),
marker: PhantomData,
};
gc.set_root();
gc
}
}
/// Consumes the `Gc`, returning a wrapped raw pointer.
@ -53,7 +47,7 @@ impl<T: Trace> Gc<T> {
/// To avoid a memory leak, the pointer must be converted back to a `Gc` using [`Gc::from_raw`].
#[allow(clippy::use_self)]
pub fn into_raw(this: Gc<T>) -> NonNull<GcBox<T>> {
let ptr = this.inner_ptr.get();
let ptr = this.inner_ptr();
std::mem::forget(this);
ptr
}
@ -75,68 +69,33 @@ impl<T: Trace + ?Sized> Gc<T> {
/// This function is unsafe because improper use may lead to memory corruption, double-free,
/// or misbehaviour of the garbage collector.
#[must_use]
pub const unsafe fn from_raw(ptr: NonNull<GcBox<T>>) -> Self {
Self {
inner_ptr: Cell::new(ptr),
marker: PhantomData,
}
}
/// Return a rooted `Gc` from a `GcBox` pointer
pub(crate) unsafe fn from_ptr(ptr: NonNull<GcBox<T>>) -> Self {
// SAFETY: the caller must ensure that the pointer is valid.
pub unsafe fn from_raw(ptr: NonNull<GcBox<T>>) -> Self {
// SAFETY: it is the caller's job to ensure the safety of this operation.
unsafe {
ptr.as_ref().root_inner();
let gc = Self {
inner_ptr: Cell::new(ptr),
Self {
inner_ptr: Cell::new(Rootable::new_unchecked(ptr).rooted()),
marker: PhantomData,
};
gc.set_root();
gc
}
}
}
}
/// Returns the given pointer with its root bit cleared.
pub(crate) unsafe fn clear_root_bit<T: ?Sized + Trace>(
ptr: NonNull<GcBox<T>>,
) -> NonNull<GcBox<T>> {
let ptr = ptr.as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr));
// SAFETY: ptr must be a non null value
unsafe { NonNull::new_unchecked(ptr) }
}
impl<T: Trace + ?Sized> Gc<T> {
fn rooted(&self) -> bool {
self.inner_ptr.get().as_ptr().cast::<u8>() as usize & 1 != 0
fn is_rooted(&self) -> bool {
self.inner_ptr.get().is_rooted()
}
pub(crate) fn set_root(&self) {
let ptr = self.inner_ptr.get().as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr));
// SAFETY: ptr must be a non null value.
unsafe {
self.inner_ptr.set(NonNull::new_unchecked(ptr));
}
fn root_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().rooted());
}
fn clear_root(&self) {
// SAFETY: inner_ptr must be a valid non-null pointer to a live GcBox.
unsafe {
self.inner_ptr.set(clear_root_bit(self.inner_ptr.get()));
}
fn unroot_ptr(&self) {
self.inner_ptr.set(self.inner_ptr.get().unrooted());
}
pub(crate) fn inner_ptr(&self) -> NonNull<GcBox<T>> {
assert!(finalizer_safe());
// SAFETY: inner_ptr must be a live GcBox. Calling this on a dropped GcBox
// can result in Undefined Behavior.
unsafe { clear_root_bit(self.inner_ptr.get()) }
self.inner_ptr.get().as_ptr()
}
fn inner(&self) -> &GcBox<T> {
@ -153,30 +112,26 @@ unsafe impl<T: Trace + ?Sized> Trace for Gc<T> {
unsafe fn trace(&self) {
// SAFETY: Inner must be live and allocated GcBox.
unsafe {
self.inner().trace_inner();
self.inner().mark_and_trace();
}
}
unsafe fn weak_trace(&self) {
self.inner().weak_trace_inner();
}
unsafe fn root(&self) {
assert!(!self.rooted(), "Can't double-root a Gc<T>");
assert!(!self.is_rooted(), "Can't double-root a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().root_inner();
self.set_root();
self.inner().root();
self.root_ptr();
}
unsafe fn unroot(&self) {
assert!(self.rooted(), "Can't double-unroot a Gc<T>");
assert!(self.is_rooted(), "Can't double-unroot a Gc<T>");
// Try to get inner before modifying our state. Inner may be
// inaccessible due to this method being invoked during the sweeping
// phase, and we don't want to modify our state before panicking.
self.inner().unroot_inner();
self.clear_root();
self.inner().unroot();
self.unroot_ptr();
}
fn run_finalizer(&self) {
@ -186,8 +141,15 @@ unsafe impl<T: Trace + ?Sized> Trace for Gc<T> {
impl<T: Trace + ?Sized> Clone for Gc<T> {
fn clone(&self) -> Self {
// SAFETY: `&self` is a valid Gc pointer.
unsafe { Self::from_ptr(self.inner_ptr()) }
let ptr = self.inner_ptr();
// SAFETY: since a `Gc` is always valid, its `inner_ptr` must also be always a valid pointer.
unsafe {
ptr.as_ref().root();
}
// SAFETY: though `ptr` doesn't come from a `into_raw` call, it essentially does the same,
// but it skips the call to `std::mem::forget` since we have a reference instead of an owned
// value.
unsafe { Self::from_raw(ptr) }
}
}
@ -202,8 +164,8 @@ impl<T: Trace + ?Sized> Deref for Gc<T> {
impl<T: Trace + ?Sized> Drop for Gc<T> {
fn drop(&mut self) {
// If this pointer was a root, we should unroot it.
if self.rooted() {
self.inner().unroot_inner();
if self.is_rooted() {
self.inner().unroot();
}
}
}

1
boa_gc/src/pointers/mod.rs

@ -2,6 +2,7 @@
mod ephemeron;
mod gc;
mod rootable;
mod weak;
pub use ephemeron::Ephemeron;

87
boa_gc/src/pointers/rootable.rs

@ -0,0 +1,87 @@
use std::ptr::{self, addr_of_mut, NonNull};
/// A [`NonNull`] pointer with a `rooted` tag.
///
/// This pointer can be created only from pointers that are 2-byte aligned. In other words,
/// the pointer must point to an address that is a multiple of 2.
pub(crate) struct Rootable<T: ?Sized> {
ptr: NonNull<T>,
}
impl<T: ?Sized> Copy for Rootable<T> {}
impl<T: ?Sized> Clone for Rootable<T> {
fn clone(&self) -> Self {
Self { ptr: self.ptr }
}
}
impl<T: ?Sized> std::fmt::Debug for Rootable<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Rootable")
.field("ptr", &self.as_ptr())
.field("is_rooted", &self.is_rooted())
.finish()
}
}
impl<T: ?Sized> Rootable<T> {
/// Creates a new `Rootable` without checking if the [`NonNull`] is properly aligned.
///
/// # Safety
///
/// `ptr` must be 2-byte aligned.
pub(crate) const unsafe fn new_unchecked(ptr: NonNull<T>) -> Self {
Self { ptr }
}
/// Returns `true` if the pointer is rooted.
pub(crate) fn is_rooted(self) -> bool {
self.ptr.as_ptr().cast::<u8>() as usize & 1 != 0
}
/// Returns a pointer with the same address as `self` but rooted.
pub(crate) fn rooted(self) -> Self {
let ptr = self.ptr.as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr));
// SAFETY: ptr must be a non null value.
unsafe { Self::new_unchecked(NonNull::new_unchecked(ptr)) }
}
/// Returns a pointer with the same address as `self` but unrooted.
pub(crate) fn unrooted(self) -> Self {
let ptr = self.ptr.as_ptr();
let data = ptr.cast::<u8>();
let addr = data as isize;
let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr));
// SAFETY: ptr must be a non null value
unsafe { Self::new_unchecked(NonNull::new_unchecked(ptr)) }
}
/// Acquires the underlying `NonNull` pointer.
pub(crate) fn as_ptr(self) -> NonNull<T> {
self.unrooted().ptr
}
/// Returns a shared reference to the pointee.
///
/// # Safety
///
/// See [`NonNull::as_ref`].
pub(crate) unsafe fn as_ref(&self) -> &T {
// SAFETY: it is the caller's job to ensure the safety of this operation.
unsafe { self.as_ptr().as_ref() }
}
}
// Technically, this function is safe, since we're just modifying the address of a pointer without
// dereferencing it.
fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
// SAFETY: this should be safe as ptr must be a valid nonnull
unsafe {
ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::<u8>());
}
ptr
}

23
boa_gc/src/pointers/weak.rs

@ -3,35 +3,28 @@ use crate::{Ephemeron, Finalize, Gc, Trace};
/// A weak reference to a [`Gc`].
///
/// This type allows keeping references to [`Gc`] managed values without keeping them alive for
/// garbage collections. However, this also means [`WeakGc::value`] can return `None` at any moment.
/// garbage collections. However, this also means [`WeakGc::upgrade`] could return `None` at any moment.
#[derive(Debug, Trace, Finalize)]
#[repr(transparent)]
pub struct WeakGc<T: Trace + ?Sized + 'static> {
inner: Ephemeron<T, ()>,
inner: Ephemeron<T, Gc<T>>,
}
impl<T: Trace + ?Sized> WeakGc<T> {
impl<T: Trace> WeakGc<T> {
/// Creates a new weak pointer for a garbage collected value.
pub fn new(value: &Gc<T>) -> Self {
Self {
inner: Ephemeron::new(value, ()),
inner: Ephemeron::new(value, value.clone()),
}
}
}
impl<T: Trace + ?Sized> WeakGc<T> {
/// Gets the value of this weak pointer, or `None` if the value was already garbage collected.
pub fn value(&self) -> Option<&T> {
self.inner.key()
}
/// Upgrade returns a `Gc` pointer for the internal value if valid, or None if the value was already garbage collected.
pub fn upgrade(&self) -> Option<Gc<T>> {
self.inner.upgrade_key()
self.inner.value()
}
}
impl<T: Trace + ?Sized> Clone for WeakGc<T> {
impl<T: Trace> Clone for WeakGc<T> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
@ -39,8 +32,8 @@ impl<T: Trace + ?Sized> Clone for WeakGc<T> {
}
}
impl<T: Trace + ?Sized> From<Ephemeron<T, ()>> for WeakGc<T> {
fn from(inner: Ephemeron<T, ()>) -> Self {
impl<T: Trace> From<Ephemeron<T, Gc<T>>> for WeakGc<T> {
fn from(inner: Ephemeron<T, Gc<T>>) -> Self {
Self { inner }
}
}

4
boa_gc/src/test/allocation.rs

@ -1,10 +1,10 @@
use super::{run_test, Harness};
use crate::{force_collect, Gc, GcCell};
use crate::{force_collect, Gc, GcRefCell};
#[test]
fn gc_basic_cell_allocation() {
run_test(|| {
let gc_cell = Gc::new(GcCell::new(16_u16));
let gc_cell = Gc::new(GcRefCell::new(16_u16));
force_collect();
Harness::assert_collections(1);

6
boa_gc/src/test/cell.rs

@ -1,13 +1,13 @@
use super::run_test;
use crate::{Gc, GcCell};
use crate::{Gc, GcRefCell};
#[test]
fn boa_borrow_mut_test() {
run_test(|| {
let v = Gc::new(GcCell::new(Vec::new()));
let v = Gc::new(GcRefCell::new(Vec::new()));
for _ in 1..=259 {
let cell = Gc::new(GcCell::new([0u8; 10]));
let cell = Gc::new(GcRefCell::new([0u8; 10]));
v.borrow_mut().push(cell);
}
});

9
boa_gc/src/test/mod.rs

@ -18,7 +18,7 @@ impl Harness {
BOA_GC.with(|current| {
let gc = current.borrow();
assert!(gc.adult_start.get().is_none());
assert!(gc.strong_start.get().is_none());
assert!(gc.runtime.bytes_allocated == 0);
});
}
@ -29,6 +29,13 @@ impl Harness {
assert!(gc.runtime.bytes_allocated > 0);
});
}
fn assert_exact_bytes_allocated(bytes: usize) {
BOA_GC.with(|current| {
let gc = current.borrow();
assert_eq!(gc.runtime.bytes_allocated, bytes);
});
}
}
fn run_test(test: impl FnOnce() + Send + 'static) {

128
boa_gc/src/test/weak.rs

@ -1,5 +1,7 @@
use super::run_test;
use crate::{force_collect, Ephemeron, Gc, WeakGc};
use crate::{
force_collect, test::Harness, Ephemeron, Finalize, Gc, GcBox, GcRefCell, Trace, WeakGc,
};
#[test]
fn eph_weak_gc_test() {
@ -11,15 +13,15 @@ fn eph_weak_gc_test() {
let weak = WeakGc::new(&cloned_gc);
assert_eq!(*weak.value().expect("Is live currently"), 3);
assert_eq!(*weak.upgrade().expect("Is live currently"), 3);
drop(cloned_gc);
force_collect();
assert_eq!(*weak.value().expect("WeakGc is still live here"), 3);
assert_eq!(*weak.upgrade().expect("WeakGc is still live here"), 3);
drop(gc_value);
force_collect();
assert!(weak.value().is_none());
assert!(weak.upgrade().is_none());
}
});
}
@ -34,16 +36,21 @@ fn eph_ephemeron_test() {
let ephemeron = Ephemeron::new(&cloned_gc, String::from("Hello World!"));
assert_eq!(*ephemeron.key().expect("Ephemeron is live"), 3);
assert_eq!(*ephemeron.value(), String::from("Hello World!"));
assert_eq!(
*ephemeron.value().expect("Ephemeron is live"),
String::from("Hello World!")
);
drop(cloned_gc);
force_collect();
assert_eq!(*ephemeron.key().expect("Ephemeron is still live here"), 3);
assert_eq!(
*ephemeron.value().expect("Ephemeron is still live here"),
String::from("Hello World!")
);
drop(gc_value);
force_collect();
assert!(ephemeron.key().is_none());
assert!(ephemeron.value().is_none());
}
});
}
@ -58,25 +65,19 @@ fn eph_allocation_chains() {
let weak = WeakGc::new(&cloned_gc);
let wrap = Gc::new(weak);
assert_eq!(wrap.value().expect("weak is live"), &String::from("foo"));
assert_eq!(*wrap.upgrade().expect("weak is live"), "foo");
let eph = Ephemeron::new(&wrap, 3);
drop(cloned_gc);
force_collect();
assert_eq!(
eph.key()
.expect("eph is still live")
.value()
.expect("weak is still live"),
&String::from("foo")
);
assert_eq!(eph.value().expect("weak is still live"), 3);
drop(gc_value);
force_collect();
assert!(eph.key().expect("eph is still live").value().is_none());
assert!(eph.value().is_none());
}
});
}
@ -90,7 +91,7 @@ fn eph_basic_alloc_dump_test() {
let eph = Ephemeron::new(&gc_value, 4);
let _fourth = Gc::new("tail");
assert_eq!(*eph.key().expect("must be live"), String::from("gc here"));
assert_eq!(eph.value().expect("must be live"), 4);
});
}
@ -123,10 +124,97 @@ fn eph_basic_clone_test() {
drop(weak);
force_collect();
assert_eq!(*new_gc, *new_weak.value().expect("weak should be live"));
assert_eq!(*new_gc, *new_weak.upgrade().expect("weak should be live"));
assert_eq!(
*init_gc,
*new_weak.value().expect("weak_should be live still")
*new_weak.upgrade().expect("weak_should be live still")
);
});
}
#[test]
fn eph_self_referential() {
#[derive(Trace, Finalize, Clone)]
struct InnerCell {
inner: GcRefCell<Option<Ephemeron<InnerCell, TestCell>>>,
}
#[derive(Trace, Finalize, Clone)]
struct TestCell {
inner: Gc<InnerCell>,
}
run_test(|| {
let root = TestCell {
inner: Gc::new(InnerCell {
inner: GcRefCell::new(None),
}),
};
let root_size = std::mem::size_of::<GcBox<InnerCell>>();
Harness::assert_exact_bytes_allocated(root_size);
{
// Generate a self-referential ephemeron
let eph = Ephemeron::new(&root.inner, root.clone());
*root.inner.inner.borrow_mut() = Some(eph.clone());
assert!(eph.value().is_some());
Harness::assert_exact_bytes_allocated(80);
}
*root.inner.inner.borrow_mut() = None;
force_collect();
Harness::assert_exact_bytes_allocated(root_size);
});
}
#[test]
fn eph_self_referential_chain() {
#[derive(Trace, Finalize, Clone)]
struct TestCell {
inner: Gc<GcRefCell<Option<Ephemeron<u8, TestCell>>>>,
}
run_test(|| {
let root = Gc::new(GcRefCell::new(None));
let root_size = std::mem::size_of::<GcBox<GcRefCell<Option<Ephemeron<u8, TestCell>>>>>();
Harness::assert_exact_bytes_allocated(root_size);
let watched = Gc::new(0);
{
// Generate a self-referential loop of weak and non-weak pointers
let chain1 = TestCell {
inner: Gc::new(GcRefCell::new(None)),
};
let chain2 = TestCell {
inner: Gc::new(GcRefCell::new(None)),
};
let eph_start = Ephemeron::new(&watched, chain1.clone());
let eph_chain2 = Ephemeron::new(&watched, chain2.clone());
*chain1.inner.borrow_mut() = Some(eph_chain2.clone());
*chain2.inner.borrow_mut() = Some(eph_start.clone());
*root.borrow_mut() = Some(eph_start.clone());
force_collect();
assert!(eph_start.value().is_some());
assert!(eph_chain2.value().is_some());
Harness::assert_exact_bytes_allocated(240);
}
*root.borrow_mut() = None;
force_collect();
drop(watched);
force_collect();
Harness::assert_exact_bytes_allocated(root_size);
});
}

28
boa_gc/src/trace.rs

@ -39,13 +39,6 @@ pub unsafe trait Trace: Finalize {
/// See [`Trace`].
unsafe fn trace(&self);
/// Marks all contained weak references of a `Gc`.
///
/// # Safety
///
/// See [`Trace`].
unsafe fn weak_trace(&self);
/// Increments the root-count of all contained `Gc`s.
///
/// # Safety
@ -60,12 +53,6 @@ pub unsafe trait Trace: Finalize {
/// See [`Trace`].
unsafe fn unroot(&self);
/// Checks if an ephemeron's key is marked.
#[doc(hidden)]
fn is_marked_ephemeron(&self) -> bool {
false
}
/// Runs [`Finalize::finalize`] on this object and all
/// contained subobjects.
fn run_finalizer(&self);
@ -80,8 +67,6 @@ macro_rules! empty_trace {
#[inline]
unsafe fn trace(&self) {}
#[inline]
unsafe fn weak_trace(&self) {}
#[inline]
unsafe fn root(&self) {}
#[inline]
unsafe fn unroot(&self) {}
@ -116,17 +101,6 @@ macro_rules! custom_trace {
$body
}
#[inline]
unsafe fn weak_trace(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `weak_trace` is correctly implemented.
unsafe {
$crate::Trace::weak_trace(it);
}
}
let $this = self;
$body
}
#[inline]
unsafe fn root(&self) {
fn mark<T: $crate::Trace + ?Sized>(it: &T) {
// SAFETY: The implementor must ensure that `root` is correctly implemented.
@ -456,6 +430,8 @@ impl<T: Trace> Finalize for Cell<Option<T>> {
}
}
// SAFETY: This implementation is safe, because `take` leaves `None` in the
// place of our value, making it possible to trace through it safely.
unsafe impl<T: Trace> Trace for Cell<Option<T>> {
custom_trace!(this, {
if let Some(t) = this.take() {

10
boa_macros/src/lib.rs

@ -104,16 +104,6 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
match *self { #trace_body }
}
#[inline]
unsafe fn weak_trace(&self) {
#[allow(dead_code, unreachable_code)]
fn mark<T: ::boa_gc::Trace + ?Sized>(it: &T) {
unsafe {
::boa_gc::Trace::weak_trace(it)
}
}
match *self { #trace_body }
}
#[inline]
unsafe fn root(&self) {
#[allow(dead_code)]
fn mark<T: ::boa_gc::Trace + ?Sized>(it: &T) {

Loading…
Cancel
Save