Browse Source

Implement resizable buffers (#3634)

pull/3646/head
José Julián Espina 10 months ago committed by GitHub
parent
commit
a9aeaa5ba3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 14
      core/engine/src/builtins/array/array_iterator.rs
  2. 31
      core/engine/src/builtins/array/mod.rs
  3. 500
      core/engine/src/builtins/array_buffer/mod.rs
  4. 351
      core/engine/src/builtins/array_buffer/shared.rs
  5. 4
      core/engine/src/builtins/array_buffer/tests.rs
  6. 148
      core/engine/src/builtins/array_buffer/utils.rs
  7. 5
      core/engine/src/builtins/atomics/futex.rs
  8. 225
      core/engine/src/builtins/atomics/mod.rs
  9. 322
      core/engine/src/builtins/dataview/mod.rs
  10. 1962
      core/engine/src/builtins/typed_array/builtin.rs
  11. 4
      core/engine/src/builtins/typed_array/mod.rs
  12. 303
      core/engine/src/builtins/typed_array/object.rs
  13. 2
      core/engine/src/context/hooks.rs
  14. 5
      core/engine/src/object/builtins/jsarraybuffer.rs
  15. 94
      core/engine/src/object/builtins/jsdataview.rs
  16. 5
      core/engine/src/object/builtins/jssharedarraybuffer.rs
  17. 1
      test262_config.toml

14
core/engine/src/builtins/array/array_iterator.rs

@ -117,15 +117,17 @@ impl ArrayIterator {
} }
let len = if let Some(f) = array_iterator.array.downcast_ref::<TypedArray>() { let len = if let Some(f) = array_iterator.array.downcast_ref::<TypedArray>() {
if f.is_detached() { let buf = f.viewed_array_buffer().as_buffer();
let Some(buf) = buf
.bytes(std::sync::atomic::Ordering::SeqCst)
.filter(|buf| !f.is_out_of_bounds(buf.len()))
else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message( .with_message("Cannot get value from out of bounds typed array")
"Cannot get value from typed array that has a detached array buffer",
)
.into()); .into());
} };
f.array_length() f.array_length(buf.len())
} else { } else {
array_iterator.array.length_of_array_like(context)? array_iterator.array.length_of_array_like(context)?
}; };

31
core/engine/src/builtins/array/mod.rs

@ -36,7 +36,7 @@ use crate::{
value::{IntegerOrInfinity, JsValue}, value::{IntegerOrInfinity, JsValue},
Context, JsArgs, JsResult, JsString, Context, JsArgs, JsResult, JsString,
}; };
use std::cmp::{max, min, Ordering}; use std::cmp::{min, Ordering};
use super::{BuiltInBuilder, BuiltInConstructor, IntrinsicObject}; use super::{BuiltInBuilder, BuiltInConstructor, IntrinsicObject};
@ -3216,18 +3216,19 @@ impl Array {
) -> JsResult<u64> { ) -> JsResult<u64> {
// 1. Let relativeStart be ? ToIntegerOrInfinity(start). // 1. Let relativeStart be ? ToIntegerOrInfinity(start).
let relative_start = arg.to_integer_or_infinity(context)?; let relative_start = arg.to_integer_or_infinity(context)?;
match relative_start { let start = match relative_start {
// 2. If relativeStart is -∞, let k be 0. // 2. If relativeStart is -∞, let k be 0.
IntegerOrInfinity::NegativeInfinity => Ok(0), IntegerOrInfinity::NegativeInfinity => 0,
// 3. Else if relativeStart < 0, let k be max(len + relativeStart, 0). // 3. Else if relativeStart < 0, let k be max(len + relativeStart, 0).
IntegerOrInfinity::Integer(i) if i < 0 => Ok(max(len as i64 + i, 0) as u64), IntegerOrInfinity::Integer(i) if i < 0 => len.checked_add_signed(i).unwrap_or(0),
// Both `as` casts are safe as both variables are non-negative
// 4. Else, let k be min(relativeStart, len). // 4. Else, let k be min(relativeStart, len).
IntegerOrInfinity::Integer(i) => Ok(min(i, len as i64) as u64), IntegerOrInfinity::Integer(i) => min(i as u64, len),
// Special case - positive infinity. `len` is always smaller than +inf, thus from (4) // Special case - positive infinity. `len` is always smaller than +inf, thus from (4)
IntegerOrInfinity::PositiveInfinity => Ok(len), IntegerOrInfinity::PositiveInfinity => len,
} };
Ok(start)
} }
/// Represents the algorithm to calculate `relativeEnd` (or `final`) in array functions. /// Represents the algorithm to calculate `relativeEnd` (or `final`) in array functions.
@ -3242,18 +3243,20 @@ impl Array {
} else { } else {
// 1. cont, else let relativeEnd be ? ToIntegerOrInfinity(end). // 1. cont, else let relativeEnd be ? ToIntegerOrInfinity(end).
let relative_end = value.to_integer_or_infinity(context)?; let relative_end = value.to_integer_or_infinity(context)?;
match relative_end { let end = match relative_end {
// 2. If relativeEnd is -∞, let final be 0. // 2. If relativeEnd is -∞, let final be 0.
IntegerOrInfinity::NegativeInfinity => Ok(0), IntegerOrInfinity::NegativeInfinity => 0,
// 3. Else if relativeEnd < 0, let final be max(len + relativeEnd, 0). // 3. Else if relativeEnd < 0, let final be max(len + relativeEnd, 0).
IntegerOrInfinity::Integer(i) if i < 0 => Ok(max(len as i64 + i, 0) as u64), IntegerOrInfinity::Integer(i) if i < 0 => len.checked_add_signed(i).unwrap_or(0),
// 4. Else, let final be min(relativeEnd, len). // 4. Else, let final be min(relativeEnd, len).
// Both `as` casts are safe as both variables are non-negative // Both `as` casts are safe as both variables are non-negative
IntegerOrInfinity::Integer(i) => Ok(min(i, len as i64) as u64), IntegerOrInfinity::Integer(i) => min(i as u64, len),
// Special case - positive infinity. `len` is always smaller than +inf, thus from (4) // Special case - positive infinity. `len` is always smaller than +inf, thus from (4)
IntegerOrInfinity::PositiveInfinity => Ok(len), IntegerOrInfinity::PositiveInfinity => len,
} };
Ok(end)
} }
} }

500
core/engine/src/builtins/array_buffer/mod.rs

@ -19,6 +19,7 @@ mod tests;
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
pub use shared::SharedArrayBuffer; pub use shared::SharedArrayBuffer;
use std::sync::atomic::Ordering;
use crate::{ use crate::{
builtins::BuiltInObject, builtins::BuiltInObject,
@ -30,7 +31,6 @@ use crate::{
realm::Realm, realm::Realm,
string::common::StaticJsStrings, string::common::StaticJsStrings,
symbol::JsSymbol, symbol::JsSymbol,
value::IntegerOrInfinity,
Context, JsArgs, JsData, JsResult, JsString, JsValue, Context, JsArgs, JsData, JsResult, JsString, JsValue,
}; };
use boa_gc::{Finalize, GcRef, GcRefMut, Trace}; use boa_gc::{Finalize, GcRef, GcRefMut, Trace};
@ -39,7 +39,7 @@ use boa_profiler::Profiler;
use self::utils::{SliceRef, SliceRefMut}; use self::utils::{SliceRef, SliceRefMut};
use super::{ use super::{
typed_array::TypedArray, BuiltInBuilder, BuiltInConstructor, DataView, IntrinsicObject, typed_array::TypedArray, Array, BuiltInBuilder, BuiltInConstructor, DataView, IntrinsicObject,
}; };
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
@ -53,15 +53,31 @@ where
B: Deref<Target = ArrayBuffer>, B: Deref<Target = ArrayBuffer>,
S: Deref<Target = SharedArrayBuffer>, S: Deref<Target = SharedArrayBuffer>,
{ {
pub(crate) fn data(&self) -> Option<SliceRef<'_>> { /// Gets the inner data of the buffer.
pub(crate) fn bytes(&self, ordering: Ordering) -> Option<SliceRef<'_>> {
match self { match self {
Self::Buffer(buf) => buf.deref().data().map(SliceRef::Slice), Self::Buffer(buf) => buf.deref().bytes().map(SliceRef::Slice),
Self::SharedBuffer(buf) => Some(SliceRef::AtomicSlice(buf.deref().data())), Self::SharedBuffer(buf) => Some(SliceRef::AtomicSlice(buf.deref().bytes(ordering))),
} }
} }
pub(crate) fn is_detached(&self) -> bool { /// Gets the inner data of the buffer without accessing the current atomic length.
self.data().is_none() ///
/// Returns `None` if the buffer is detached or if the provided `len` is bigger than
/// the allocated buffer.
#[track_caller]
pub(crate) fn bytes_with_len(&self, len: usize) -> Option<SliceRef<'_>> {
match self {
Self::Buffer(buf) => buf.deref().bytes_with_len(len).map(SliceRef::Slice),
Self::SharedBuffer(buf) => Some(SliceRef::AtomicSlice(buf.deref().bytes_with_len(len))),
}
}
pub(crate) fn is_fixed_len(&self) -> bool {
match self {
Self::Buffer(buf) => buf.is_fixed_len(),
Self::SharedBuffer(buf) => buf.is_fixed_len(),
}
} }
} }
@ -76,10 +92,28 @@ where
B: DerefMut<Target = ArrayBuffer>, B: DerefMut<Target = ArrayBuffer>,
S: DerefMut<Target = SharedArrayBuffer>, S: DerefMut<Target = SharedArrayBuffer>,
{ {
pub(crate) fn data_mut(&mut self) -> Option<SliceRefMut<'_>> { pub(crate) fn bytes(&mut self, ordering: Ordering) -> Option<SliceRefMut<'_>> {
match self {
Self::Buffer(buf) => buf.deref_mut().bytes_mut().map(SliceRefMut::Slice),
Self::SharedBuffer(buf) => {
Some(SliceRefMut::AtomicSlice(buf.deref_mut().bytes(ordering)))
}
}
}
/// Gets the mutable inner data of the buffer without accessing the current atomic length.
///
/// Returns `None` if the buffer is detached or if the provided `len` is bigger than
/// the allocated buffer.
pub(crate) fn bytes_with_len(&mut self, len: usize) -> Option<SliceRefMut<'_>> {
match self { match self {
Self::Buffer(buf) => buf.deref_mut().data_mut().map(SliceRefMut::Slice), Self::Buffer(buf) => buf
Self::SharedBuffer(buf) => Some(SliceRefMut::AtomicSlice(buf.deref_mut().data())), .deref_mut()
.bytes_with_len_mut(len)
.map(SliceRefMut::Slice),
Self::SharedBuffer(buf) => Some(SliceRefMut::AtomicSlice(
buf.deref_mut().bytes_with_len(len),
)),
} }
} }
} }
@ -153,7 +187,7 @@ impl BufferObject {
let lhs = lhs.borrow(); let lhs = lhs.borrow();
let rhs = rhs.borrow(); let rhs = rhs.borrow();
std::ptr::eq(lhs.data.data().as_ptr(), rhs.data.data().as_ptr()) std::ptr::eq(lhs.data.as_ptr(), rhs.data.as_ptr())
} }
_ => false, _ => false,
} }
@ -166,6 +200,9 @@ pub struct ArrayBuffer {
/// The `[[ArrayBufferData]]` internal slot. /// The `[[ArrayBufferData]]` internal slot.
data: Option<Vec<u8>>, data: Option<Vec<u8>>,
/// The `[[ArrayBufferMaxByteLength]]` internal slot.
max_byte_len: Option<u64>,
/// The `[[ArrayBufferDetachKey]]` internal slot. /// The `[[ArrayBufferDetachKey]]` internal slot.
detach_key: JsValue, detach_key: JsValue,
} }
@ -174,6 +211,7 @@ impl ArrayBuffer {
pub(crate) fn from_data(data: Vec<u8>, detach_key: JsValue) -> Self { pub(crate) fn from_data(data: Vec<u8>, detach_key: JsValue) -> Self {
Self { Self {
data: Some(data), data: Some(data),
max_byte_len: None,
detach_key, detach_key,
} }
} }
@ -182,14 +220,38 @@ impl ArrayBuffer {
self.data.as_ref().map_or(0, Vec::len) self.data.as_ref().map_or(0, Vec::len)
} }
pub(crate) fn data(&self) -> Option<&[u8]> { pub(crate) fn bytes(&self) -> Option<&[u8]> {
self.data.as_deref() self.data.as_deref()
} }
pub(crate) fn data_mut(&mut self) -> Option<&mut [u8]> { pub(crate) fn bytes_mut(&mut self) -> Option<&mut [u8]> {
self.data.as_deref_mut() self.data.as_deref_mut()
} }
pub(crate) fn vec_mut(&mut self) -> Option<&mut Vec<u8>> {
self.data.as_mut()
}
/// Gets the inner bytes of the buffer without accessing the current atomic length.
#[track_caller]
pub(crate) fn bytes_with_len(&self, len: usize) -> Option<&[u8]> {
if let Some(s) = self.data.as_deref() {
Some(&s[..len])
} else {
None
}
}
/// Gets the mutable inner bytes of the buffer without accessing the current atomic length.
#[track_caller]
pub(crate) fn bytes_with_len_mut(&mut self, len: usize) -> Option<&mut [u8]> {
if let Some(s) = self.data.as_deref_mut() {
Some(&mut s[..len])
} else {
None
}
}
/// Detaches the inner data of this `ArrayBuffer`, returning the original buffer if still /// Detaches the inner data of this `ArrayBuffer`, returning the original buffer if still
/// present. /// present.
/// ///
@ -206,7 +268,7 @@ impl ArrayBuffer {
Ok(self.data.take()) Ok(self.data.take())
} }
/// `25.1.2.2 IsDetachedBuffer ( arrayBuffer )` /// `IsDetachedBuffer ( arrayBuffer )`
/// ///
/// More information: /// More information:
/// - [ECMAScript reference][spec] /// - [ECMAScript reference][spec]
@ -217,6 +279,10 @@ impl ArrayBuffer {
// 2. Return false. // 2. Return false.
self.data.is_none() self.data.is_none()
} }
pub(crate) fn is_fixed_len(&self) -> bool {
self.max_byte_len.is_none()
}
} }
impl IntrinsicObject for ArrayBuffer { impl IntrinsicObject for ArrayBuffer {
@ -233,20 +299,41 @@ impl IntrinsicObject for ArrayBuffer {
.name(js_string!("get byteLength")) .name(js_string!("get byteLength"))
.build(); .build();
let get_resizable = BuiltInBuilder::callable(realm, Self::get_resizable)
.name(js_string!("get resizable"))
.build();
let get_max_byte_length = BuiltInBuilder::callable(realm, Self::get_max_byte_length)
.name(js_string!("get maxByteLength"))
.build();
BuiltInBuilder::from_standard_constructor::<Self>(realm) BuiltInBuilder::from_standard_constructor::<Self>(realm)
.static_accessor(
JsSymbol::species(),
Some(get_species),
None,
Attribute::CONFIGURABLE,
)
.static_method(Self::is_view, js_string!("isView"), 1)
.accessor( .accessor(
js_string!("byteLength"), js_string!("byteLength"),
Some(get_byte_length), Some(get_byte_length),
None, None,
flag_attributes, flag_attributes,
) )
.static_accessor( .accessor(
JsSymbol::species(), js_string!("resizable"),
Some(get_species), Some(get_resizable),
None, None,
Attribute::CONFIGURABLE, flag_attributes,
) )
.static_method(Self::is_view, js_string!("isView"), 1) .accessor(
js_string!("maxByteLength"),
Some(get_max_byte_length),
None,
flag_attributes,
)
.method(Self::resize, js_string!("resize"), 1)
.method(Self::slice, js_string!("slice"), 2) .method(Self::slice, js_string!("slice"), 2)
.property( .property(
JsSymbol::to_string_tag(), JsSymbol::to_string_tag(),
@ -271,7 +358,7 @@ impl BuiltInConstructor for ArrayBuffer {
const STANDARD_CONSTRUCTOR: fn(&StandardConstructors) -> &StandardConstructor = const STANDARD_CONSTRUCTOR: fn(&StandardConstructors) -> &StandardConstructor =
StandardConstructors::array_buffer; StandardConstructors::array_buffer;
/// `25.1.3.1 ArrayBuffer ( length )` /// `ArrayBuffer ( length )`
/// ///
/// More information: /// More information:
/// - [ECMAScript reference][spec] /// - [ECMAScript reference][spec]
@ -290,29 +377,20 @@ impl BuiltInConstructor for ArrayBuffer {
} }
// 2. Let byteLength be ? ToIndex(length). // 2. Let byteLength be ? ToIndex(length).
let byte_length = args.get_or_undefined(0).to_index(context)?; let byte_len = args.get_or_undefined(0).to_index(context)?;
// 3. Let requestedMaxByteLength be ? GetArrayBufferMaxByteLengthOption(options).
let max_byte_len = get_max_byte_len(args.get_or_undefined(1), context)?;
// 3. Return ? AllocateArrayBuffer(NewTarget, byteLength). // 4. Return ? AllocateArrayBuffer(NewTarget, byteLength, requestedMaxByteLength).
Ok(Self::allocate(new_target, byte_length, context)? Ok(Self::allocate(new_target, byte_len, max_byte_len, context)?
.upcast() .upcast()
.into()) .into())
} }
} }
impl ArrayBuffer { impl ArrayBuffer {
/// `25.1.4.3 get ArrayBuffer [ @@species ]` /// `ArrayBuffer.isView ( arg )`
///
/// More information:
/// - [ECMAScript reference][spec]
///
/// [spec]: https://tc39.es/ecma262/#sec-get-arraybuffer-@@species
#[allow(clippy::unnecessary_wraps)]
fn get_species(this: &JsValue, _: &[JsValue], _: &mut Context) -> JsResult<JsValue> {
// 1. Return the this value.
Ok(this.clone())
}
/// `25.1.4.1 ArrayBuffer.isView ( arg )`
/// ///
/// More information: /// More information:
/// - [ECMAScript reference][spec] /// - [ECMAScript reference][spec]
@ -331,7 +409,19 @@ impl ArrayBuffer {
.into()) .into())
} }
/// `25.1.5.1 get ArrayBuffer.prototype.byteLength` /// `get ArrayBuffer [ @@species ]`
///
/// More information:
/// - [ECMAScript reference][spec]
///
/// [spec]: https://tc39.es/ecma262/#sec-get-arraybuffer-@@species
#[allow(clippy::unnecessary_wraps)]
fn get_species(this: &JsValue, _: &[JsValue], _: &mut Context) -> JsResult<JsValue> {
// 1. Return the this value.
Ok(this.clone())
}
/// `get ArrayBuffer.prototype.byteLength`
/// ///
/// More information: /// More information:
/// - [ECMAScript reference][spec] /// - [ECMAScript reference][spec]
@ -350,7 +440,7 @@ impl ArrayBuffer {
.and_then(JsObject::downcast_ref::<Self>) .and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| { .ok_or_else(|| {
JsNativeError::typ() JsNativeError::typ()
.with_message("ArrayBuffer.byteLength called with non `ArrayBuffer` object") .with_message("get ArrayBuffer.prototype.byteLength called with invalid `this`")
})?; })?;
// 4. If IsDetachedBuffer(O) is true, return +0𝔽. // 4. If IsDetachedBuffer(O) is true, return +0𝔽.
@ -359,7 +449,127 @@ impl ArrayBuffer {
Ok(buf.len().into()) Ok(buf.len().into())
} }
/// `25.1.5.3 ArrayBuffer.prototype.slice ( start, end )` /// [`get ArrayBuffer.prototype.maxByteLength`][spec].
///
/// [spec]: https://tc39.es/ecma262/#sec-get-arraybuffer.prototype.maxbytelength
pub(crate) fn get_max_byte_length(
this: &JsValue,
_args: &[JsValue],
_context: &mut Context,
) -> JsResult<JsValue> {
// 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]).
// 3. If IsSharedArrayBuffer(O) is true, throw a TypeError exception.
let buf = this
.as_object()
.and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| {
JsNativeError::typ().with_message(
"get ArrayBuffer.prototype.maxByteLength called with invalid `this`",
)
})?;
// 4. If IsDetachedBuffer(O) is true, return +0𝔽.
let Some(data) = buf.bytes() else {
return Ok(JsValue::from(0));
};
// 5. If IsFixedLengthArrayBuffer(O) is true, then
// a. Let length be O.[[ArrayBufferByteLength]].
// 6. Else,
// a. Let length be O.[[ArrayBufferMaxByteLength]].
// 7. Return 𝔽(length).
Ok(buf.max_byte_len.unwrap_or(data.len() as u64).into())
}
/// [`get ArrayBuffer.prototype.resizable`][spec].
///
/// [spec]: https://tc39.es/ecma262/#sec-get-arraybuffer.prototype.resizable
pub(crate) fn get_resizable(
this: &JsValue,
_args: &[JsValue],
_context: &mut Context,
) -> JsResult<JsValue> {
// 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]).
// 3. If IsSharedArrayBuffer(O) is true, throw a TypeError exception.
let buf = this
.as_object()
.and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| {
JsNativeError::typ()
.with_message("get ArrayBuffer.prototype.resizable called with invalid `this`")
})?;
// 4. If IsFixedLengthArrayBuffer(O) is false, return true; otherwise return false.
Ok(JsValue::from(!buf.is_fixed_len()))
}
/// [`ArrayBuffer.prototype.resize ( newLength )`][spec].
///
/// [spec]: https://tc39.es/ecma262/#sec-arraybuffer.prototype.resize
pub(crate) fn resize(
this: &JsValue,
args: &[JsValue],
context: &mut Context,
) -> JsResult<JsValue> {
// 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferMaxByteLength]]).
// 3. If IsSharedArrayBuffer(O) is true, throw a TypeError exception.
let buf = this
.as_object()
.and_then(|o| o.clone().downcast::<Self>().ok())
.ok_or_else(|| {
JsNativeError::typ()
.with_message("ArrayBuffer.prototype.resize called with invalid `this`")
})?;
let Some(max_byte_len) = buf.borrow().data.max_byte_len else {
return Err(JsNativeError::typ()
.with_message("ArrayBuffer.resize: cannot resize a fixed-length buffer")
.into());
};
// 4. Let newByteLength be ? ToIndex(newLength).
let new_byte_length = args.get_or_undefined(0).to_index(context)?;
let mut buf = buf.borrow_mut();
// 5. If IsDetachedBuffer(O) is true, throw a TypeError exception.
let Some(buf) = buf.data.vec_mut() else {
return Err(JsNativeError::typ()
.with_message("ArrayBuffer.resize: cannot resize a detached buffer")
.into());
};
// 6. If newByteLength > O.[[ArrayBufferMaxByteLength]], throw a RangeError exception.
if new_byte_length > max_byte_len {
return Err(JsNativeError::range()
.with_message(
"ArrayBuffer.resize: new byte length exceeds buffer's maximum byte length",
)
.into());
}
// TODO: 7. Let hostHandled be ? HostResizeArrayBuffer(O, newByteLength).
// 8. If hostHandled is handled, return undefined.
// Used in engines to handle WASM buffers in a special way, but we don't
// have a WASM interpreter in place yet.
// 9. Let oldBlock be O.[[ArrayBufferData]].
// 10. Let newBlock be ? CreateByteDataBlock(newByteLength).
// 11. Let copyLength be min(newByteLength, O.[[ArrayBufferByteLength]]).
// 12. Perform CopyDataBlockBytes(newBlock, 0, oldBlock, 0, copyLength).
// 13. NOTE: Neither creation of the new Data Block nor copying from the old Data Block are observable.
// Implementations may implement this method as in-place growth or shrinkage.
// 14. Set O.[[ArrayBufferData]] to newBlock.
// 15. Set O.[[ArrayBufferByteLength]] to newByteLength.
buf.resize(new_byte_length as usize, 0);
// 16. Return undefined.
Ok(JsValue::undefined())
}
/// `ArrayBuffer.prototype.slice ( start, end )`
/// ///
/// More information: /// More information:
/// - [ECMAScript reference][spec] /// - [ECMAScript reference][spec]
@ -369,89 +579,92 @@ impl ArrayBuffer {
// 1. Let O be the this value. // 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]). // 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]).
// 3. If IsSharedArrayBuffer(O) is true, throw a TypeError exception. // 3. If IsSharedArrayBuffer(O) is true, throw a TypeError exception.
let obj = this.as_object().ok_or_else(|| { let buf = this
JsNativeError::typ().with_message("ArrayBuffer.slice called with non-object value") .as_object()
})?; .and_then(|o| o.clone().downcast::<Self>().ok())
.ok_or_else(|| {
let buf = obj.downcast_ref::<Self>().ok_or_else(|| { JsNativeError::typ()
JsNativeError::typ().with_message("ArrayBuffer.slice called with invalid object") .with_message("ArrayBuffer.slice called with invalid `this` value")
})?; })?;
let len = {
let buf = buf.borrow();
// 4. If IsDetachedBuffer(O) is true, throw a TypeError exception. // 4. If IsDetachedBuffer(O) is true, throw a TypeError exception.
if buf.is_detached() { if buf.data.is_detached() {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer.slice called with detached buffer") .with_message("ArrayBuffer.slice called with detached buffer")
.into()); .into());
} }
// 5. Let len be O.[[ArrayBufferByteLength]].
buf.data.len() as u64
};
let SliceRange { // 6. Let relativeStart be ? ToIntegerOrInfinity(start).
start: first, // 7. If relativeStart = -∞, let first be 0.
length: new_len, // 8. Else if relativeStart < 0, let first be max(len + relativeStart, 0).
} = get_slice_range( // 9. Else, let first be min(relativeStart, len).
buf.len() as u64, let first = Array::get_relative_start(context, args.get_or_undefined(0), len)?;
args.get_or_undefined(0),
args.get_or_undefined(1), // 10. If end is undefined, let relativeEnd be len; else let relativeEnd be ? ToIntegerOrInfinity(end).
context, // 11. If relativeEnd = -∞, let final be 0.
)?; // 12. Else if relativeEnd < 0, let final be max(len + relativeEnd, 0).
// 13. Else, let final be min(relativeEnd, len).
let final_ = Array::get_relative_end(context, args.get_or_undefined(1), len)?;
// 14. Let newLen be max(final - first, 0).
let new_len = final_.saturating_sub(first);
// 15. Let ctor be ? SpeciesConstructor(O, %ArrayBuffer%). // 15. Let ctor be ? SpeciesConstructor(O, %ArrayBuffer%).
let ctor = obj.species_constructor(StandardConstructors::array_buffer, context)?; let ctor = buf
.clone()
.upcast()
.species_constructor(StandardConstructors::array_buffer, context)?;
// 16. Let new be ? Construct(ctor, « 𝔽(newLen) »). // 16. Let new be ? Construct(ctor, « 𝔽(newLen) »).
let new = ctor.construct(&[new_len.into()], Some(&ctor), context)?; let new = ctor.construct(&[new_len.into()], Some(&ctor), context)?;
{
// 17. Perform ? RequireInternalSlot(new, [[ArrayBufferData]]). // 17. Perform ? RequireInternalSlot(new, [[ArrayBufferData]]).
// 18. If IsSharedArrayBuffer(new) is true, throw a TypeError exception. // 18. If IsSharedArrayBuffer(new) is true, throw a TypeError exception.
let new = new.downcast_ref::<Self>().ok_or_else(|| { let Ok(new) = new.downcast::<Self>() else {
JsNativeError::typ().with_message("ArrayBuffer constructor returned invalid object")
})?;
// 19. If IsDetachedBuffer(new) is true, throw a TypeError exception.
if new.is_detached() {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer constructor returned detached ArrayBuffer") .with_message("ArrayBuffer constructor returned invalid object")
.into()); .into());
} };
}
// 20. If SameValue(new, O) is true, throw a TypeError exception. // 20. If SameValue(new, O) is true, throw a TypeError exception.
if this if JsObject::equals(&buf, &new) {
.as_object()
.map(|obj| JsObject::equals(obj, &new))
.unwrap_or_default()
{
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("new ArrayBuffer is the same as this ArrayBuffer") .with_message("new ArrayBuffer is the same as this ArrayBuffer")
.into()); .into());
} }
{ {
let mut new = new // 19. If IsDetachedBuffer(new) is true, throw a TypeError exception.
.downcast_mut::<Self>() // 25. Let toBuf be new.[[ArrayBufferData]].
.expect("Already checked that `new_obj` was an `ArrayBuffer`"); let mut new = new.borrow_mut();
let Some(to_buf) = new.data.bytes_mut() else {
return Err(JsNativeError::typ()
.with_message("ArrayBuffer constructor returned detached ArrayBuffer")
.into());
};
// 21. If new.[[ArrayBufferByteLength]] < newLen, throw a TypeError exception. // 21. If new.[[ArrayBufferByteLength]] < newLen, throw a TypeError exception.
if (new.len() as u64) < new_len { if (to_buf.len() as u64) < new_len {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("new ArrayBuffer length too small") .with_message("new ArrayBuffer length too small")
.into()); .into());
} }
// 22. NOTE: Side-effects of the above steps may have detached O. // 22. NOTE: Side-effects of the above steps may have detached O.
// 24. Let fromBuf be O.[[ArrayBufferData]].
let Some(from_buf) = buf.data() else {
// 23. If IsDetachedBuffer(O) is true, throw a TypeError exception. // 23. If IsDetachedBuffer(O) is true, throw a TypeError exception.
// 24. Let fromBuf be O.[[ArrayBufferData]].
let buf = buf.borrow();
let Some(from_buf) = buf.data.bytes() else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer detached while ArrayBuffer.slice was running") .with_message("ArrayBuffer detached while ArrayBuffer.slice was running")
.into()); .into());
}; };
// 25. Let toBuf be new.[[ArrayBufferData]].
let to_buf = new
.data
.as_mut()
.expect("ArrayBuffer cannot be detached here");
// 26. Perform CopyDataBlockBytes(toBuf, 0, fromBuf, first, newLen). // 26. Perform CopyDataBlockBytes(toBuf, 0, fromBuf, first, newLen).
let first = first as usize; let first = first as usize;
let new_len = new_len as usize; let new_len = new_len as usize;
@ -459,10 +672,10 @@ impl ArrayBuffer {
} }
// 27. Return new. // 27. Return new.
Ok(new.into()) Ok(new.upcast().into())
} }
/// `25.1.2.1 AllocateArrayBuffer ( constructor, byteLength )` /// `AllocateArrayBuffer ( constructor, byteLength )`
/// ///
/// More information: /// More information:
/// - [ECMAScript reference][spec] /// - [ECMAScript reference][spec]
@ -470,86 +683,75 @@ impl ArrayBuffer {
/// [spec]: https://tc39.es/ecma262/#sec-allocatearraybuffer /// [spec]: https://tc39.es/ecma262/#sec-allocatearraybuffer
pub(crate) fn allocate( pub(crate) fn allocate(
constructor: &JsValue, constructor: &JsValue,
byte_length: u64, byte_len: u64,
max_byte_len: Option<u64>,
context: &mut Context, context: &mut Context,
) -> JsResult<JsObject<ArrayBuffer>> { ) -> JsResult<JsObject<ArrayBuffer>> {
// 1. Let obj be ? OrdinaryCreateFromConstructor(constructor, "%ArrayBuffer.prototype%", « [[ArrayBufferData]], [[ArrayBufferByteLength]], [[ArrayBufferDetachKey]] »). // 1. Let slots be « [[ArrayBufferData]], [[ArrayBufferByteLength]], [[ArrayBufferDetachKey]] ».
// 2. If maxByteLength is present and maxByteLength is not empty, let allocatingResizableBuffer be true; otherwise let allocatingResizableBuffer be false.
// 3. If allocatingResizableBuffer is true, then
// a. If byteLength > maxByteLength, throw a RangeError exception.
// b. Append [[ArrayBufferMaxByteLength]] to slots.
if let Some(max_byte_len) = max_byte_len {
if byte_len > max_byte_len {
return Err(JsNativeError::range()
.with_message("`length` cannot be bigger than `maxByteLength`")
.into());
}
}
// 4. Let obj be ? OrdinaryCreateFromConstructor(constructor, "%ArrayBuffer.prototype%", slots).
let prototype = get_prototype_from_constructor( let prototype = get_prototype_from_constructor(
constructor, constructor,
StandardConstructors::array_buffer, StandardConstructors::array_buffer,
context, context,
)?; )?;
// 2. Let block be ? CreateByteDataBlock(byteLength). // 5. Let block be ? CreateByteDataBlock(byteLength).
let block = create_byte_data_block(byte_length, context)?; // Preemptively allocate for `max_byte_len` if possible.
// a. If it is not possible to create a Data Block block consisting of maxByteLength bytes, throw a RangeError exception.
// b. NOTE: Resizable ArrayBuffers are designed to be implementable with in-place growth. Implementations may
// throw if, for example, virtual memory cannot be reserved up front.
let block = create_byte_data_block(byte_len, max_byte_len, context)?;
// 3. Set obj.[[ArrayBufferData]] to block.
// 4. Set obj.[[ArrayBufferByteLength]] to byteLength.
let obj = JsObject::new( let obj = JsObject::new(
context.root_shape(), context.root_shape(),
prototype, prototype,
Self { Self {
// 6. Set obj.[[ArrayBufferData]] to block.
// 7. Set obj.[[ArrayBufferByteLength]] to byteLength.
data: Some(block), data: Some(block),
// 8. If allocatingResizableBuffer is true, then
// c. Set obj.[[ArrayBufferMaxByteLength]] to maxByteLength.
max_byte_len,
detach_key: JsValue::Undefined, detach_key: JsValue::Undefined,
}, },
); );
// 5. Return obj. // 9. Return obj.
Ok(obj) Ok(obj)
} }
} }
/// Utility struct to return the result of the [`get_slice_range`] function. /// Abstract operation [`GetArrayBufferMaxByteLengthOption ( options )`][spec]
#[derive(Debug, Clone, Copy)] ///
struct SliceRange { /// [spec]: https://tc39.es/ecma262/#sec-getarraybuffermaxbytelengthoption
start: u64, fn get_max_byte_len(options: &JsValue, context: &mut Context) -> JsResult<Option<u64>> {
length: u64, // 1. If options is not an Object, return empty.
} let Some(options) = options.as_object() else {
return Ok(None);
/// Gets the slice copy range from the original length, the relative start and the end.
fn get_slice_range(
len: u64,
relative_start: &JsValue,
end: &JsValue,
context: &mut Context,
) -> JsResult<SliceRange> {
// 5. Let len be O.[[ArrayBufferByteLength]].
// 6. Let relativeStart be ? ToIntegerOrInfinity(start).
let relative_start = relative_start.to_integer_or_infinity(context)?;
let first = match relative_start {
// 7. If relativeStart is -∞, let first be 0.
IntegerOrInfinity::NegativeInfinity => 0,
// 8. Else if relativeStart < 0, let first be max(len + relativeStart, 0).
IntegerOrInfinity::Integer(i) if i < 0 => len.checked_add_signed(i).unwrap_or(0),
// 9. Else, let first be min(relativeStart, len).
IntegerOrInfinity::Integer(i) => std::cmp::min(i as u64, len),
IntegerOrInfinity::PositiveInfinity => len,
}; };
// 10. If end is undefined, let relativeEnd be len; else let relativeEnd be ? ToIntegerOrInfinity(end). // 2. Let maxByteLength be ? Get(options, "maxByteLength").
let r#final = if end.is_undefined() { let max_byte_len = options.get(js_string!("maxByteLength"), context)?;
len
} else {
match end.to_integer_or_infinity(context)? {
// 11. If relativeEnd is -∞, let final be 0.
IntegerOrInfinity::NegativeInfinity => 0,
// 12. Else if relativeEnd < 0, let final be max(len + relativeEnd, 0).
IntegerOrInfinity::Integer(i) if i < 0 => len.checked_add_signed(i).unwrap_or(0),
// 13. Else, let final be min(relativeEnd, len).
IntegerOrInfinity::Integer(i) => std::cmp::min(i as u64, len),
IntegerOrInfinity::PositiveInfinity => len,
}
};
// 14. Let newLen be max(final - first, 0). // 3. If maxByteLength is undefined, return empty.
let new_len = r#final.saturating_sub(first); if max_byte_len.is_undefined() {
return Ok(None);
}
Ok(SliceRange { // 4. Return ? ToIndex(maxByteLength).
start: first, max_byte_len.to_index(context).map(Some)
length: new_len,
})
} }
/// `CreateByteDataBlock ( size )` abstract operation. /// `CreateByteDataBlock ( size )` abstract operation.
@ -558,25 +760,37 @@ fn get_slice_range(
/// integer). For more information, check the [spec][spec]. /// integer). For more information, check the [spec][spec].
/// ///
/// [spec]: https://tc39.es/ecma262/#sec-createbytedatablock /// [spec]: https://tc39.es/ecma262/#sec-createbytedatablock
pub(crate) fn create_byte_data_block(size: u64, context: &mut Context) -> JsResult<Vec<u8>> { pub(crate) fn create_byte_data_block(
if size > context.host_hooks().max_buffer_size(context) { size: u64,
max_buffer_size: Option<u64>,
context: &mut Context,
) -> JsResult<Vec<u8>> {
let alloc_size = max_buffer_size.unwrap_or(size);
assert!(size <= alloc_size);
if alloc_size > context.host_hooks().max_buffer_size(context) {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message( .with_message(
"cannot allocate a buffer that exceeds the maximum buffer size".to_string(), "cannot allocate a buffer that exceeds the maximum buffer size".to_string(),
) )
.into()); .into());
} }
// 1. Let db be a new Data Block value consisting of size bytes. If it is impossible to // 1. Let db be a new Data Block value consisting of size bytes. If it is impossible to
// create such a Data Block, throw a RangeError exception. // create such a Data Block, throw a RangeError exception.
let size = size.try_into().map_err(|e| { let alloc_size = alloc_size.try_into().map_err(|e| {
JsNativeError::range().with_message(format!("couldn't allocate the data block: {e}")) JsNativeError::range().with_message(format!("couldn't allocate the data block: {e}"))
})?; })?;
let mut data_block = Vec::new(); let mut data_block = Vec::new();
data_block.try_reserve(size).map_err(|e| { data_block.try_reserve_exact(alloc_size).map_err(|e| {
JsNativeError::range().with_message(format!("couldn't allocate the data block: {e}")) JsNativeError::range().with_message(format!("couldn't allocate the data block: {e}"))
})?; })?;
// since size <= alloc_size, then `size` must also fit inside a `usize`.
let size = size as usize;
// 2. Set all of the bytes of db to 0. // 2. Set all of the bytes of db to 0.
data_block.resize(size, 0); data_block.resize(size, 0);

351
core/engine/src/builtins/array_buffer/shared.rs

@ -1,15 +1,18 @@
#![allow(unstable_name_collisions)] #![allow(unstable_name_collisions)]
use std::{alloc, sync::Arc}; use std::{
alloc,
sync::{atomic::Ordering, Arc},
};
use boa_profiler::Profiler; use boa_profiler::Profiler;
use portable_atomic::AtomicU8; use portable_atomic::{AtomicU8, AtomicUsize};
use boa_gc::{Finalize, Trace}; use boa_gc::{Finalize, Trace};
use sptr::Strict; use sptr::Strict;
use crate::{ use crate::{
builtins::{BuiltInBuilder, BuiltInConstructor, BuiltInObject, IntrinsicObject}, builtins::{Array, BuiltInBuilder, BuiltInConstructor, BuiltInObject, IntrinsicObject},
context::intrinsics::{Intrinsics, StandardConstructor, StandardConstructors}, context::intrinsics::{Intrinsics, StandardConstructor, StandardConstructors},
js_string, js_string,
object::internal_methods::get_prototype_from_constructor, object::internal_methods::get_prototype_from_constructor,
@ -19,7 +22,7 @@ use crate::{
Context, JsArgs, JsData, JsNativeError, JsObject, JsResult, JsString, JsSymbol, JsValue, Context, JsArgs, JsData, JsNativeError, JsObject, JsResult, JsString, JsSymbol, JsValue,
}; };
use super::{get_slice_range, utils::copy_shared_to_shared, SliceRange}; use super::{get_max_byte_len, utils::copy_shared_to_shared};
/// The internal representation of a `SharedArrayBuffer` object. /// The internal representation of a `SharedArrayBuffer` object.
/// ///
@ -27,10 +30,23 @@ use super::{get_slice_range, utils::copy_shared_to_shared, SliceRange};
/// running different JS code at the same time. /// running different JS code at the same time.
#[derive(Debug, Clone, Trace, Finalize, JsData)] #[derive(Debug, Clone, Trace, Finalize, JsData)]
pub struct SharedArrayBuffer { pub struct SharedArrayBuffer {
/// The `[[ArrayBufferData]]` internal slot.
// Shared buffers cannot be detached. // Shared buffers cannot be detached.
#[unsafe_ignore_trace] #[unsafe_ignore_trace]
data: Arc<Box<[AtomicU8]>>, data: Arc<Inner>,
}
#[derive(Debug, Default)]
struct Inner {
// Technically we should have an `[[ArrayBufferData]]` internal slot,
// `[[ArrayBufferByteLengthData]]` and `[[ArrayBufferMaxByteLength]]` slots for growable arrays
// or `[[ArrayBufferByteLength]]` for fixed arrays, but we can save some work
// by just using this representation instead.
//
// The maximum buffer length is represented by `buffer.len()`, and `current_len` has the current
// buffer length, or `None` if this is a fixed buffer; in this case, `buffer.len()` will be
// the true length of the buffer.
buffer: Box<[AtomicU8]>,
current_len: Option<AtomicUsize>,
} }
impl SharedArrayBuffer { impl SharedArrayBuffer {
@ -41,14 +57,33 @@ impl SharedArrayBuffer {
data: Arc::default(), data: Arc::default(),
} }
} }
/// Gets the length of this `SharedArrayBuffer`. /// Gets the length of this `SharedArrayBuffer`.
pub(crate) fn len(&self) -> usize { pub(crate) fn len(&self, ordering: Ordering) -> usize {
self.data.len() self.data
.current_len
.as_ref()
.map_or_else(|| self.data.buffer.len(), |len| len.load(ordering))
} }
/// Gets the inner bytes of this `SharedArrayBuffer`. /// Gets the inner bytes of this `SharedArrayBuffer`.
pub(crate) fn data(&self) -> &[AtomicU8] { pub(crate) fn bytes(&self, ordering: Ordering) -> &[AtomicU8] {
&self.data &self.data.buffer[..self.len(ordering)]
}
/// Gets the inner data of the buffer without accessing the current atomic length.
#[track_caller]
pub(crate) fn bytes_with_len(&self, len: usize) -> &[AtomicU8] {
&self.data.buffer[..len]
}
/// Gets a pointer to the internal shared buffer.
pub(crate) fn as_ptr(&self) -> *const AtomicU8 {
(*self.data.buffer).as_ptr()
}
pub(crate) fn is_fixed_len(&self) -> bool {
self.data.current_len.is_none()
} }
} }
@ -66,20 +101,41 @@ impl IntrinsicObject for SharedArrayBuffer {
.name(js_string!("get byteLength")) .name(js_string!("get byteLength"))
.build(); .build();
let get_growable = BuiltInBuilder::callable(realm, Self::get_growable)
.name(js_string!("get growable"))
.build();
let get_max_byte_length = BuiltInBuilder::callable(realm, Self::get_max_byte_length)
.name(js_string!("get maxByteLength"))
.build();
BuiltInBuilder::from_standard_constructor::<Self>(realm) BuiltInBuilder::from_standard_constructor::<Self>(realm)
.static_accessor(
JsSymbol::species(),
Some(get_species),
None,
Attribute::CONFIGURABLE,
)
.accessor( .accessor(
js_string!("byteLength"), js_string!("byteLength"),
Some(get_byte_length), Some(get_byte_length),
None, None,
flag_attributes, flag_attributes,
) )
.static_accessor( .accessor(
JsSymbol::species(), js_string!("growable"),
Some(get_species), Some(get_growable),
None, None,
Attribute::CONFIGURABLE, flag_attributes,
)
.accessor(
js_string!("maxByteLength"),
Some(get_max_byte_length),
None,
flag_attributes,
) )
.method(Self::slice, js_string!("slice"), 2) .method(Self::slice, js_string!("slice"), 2)
.method(Self::grow, js_string!("grow"), 1)
.property( .property(
JsSymbol::to_string_tag(), JsSymbol::to_string_tag(),
Self::NAME, Self::NAME,
@ -122,10 +178,13 @@ impl BuiltInConstructor for SharedArrayBuffer {
} }
// 2. Let byteLength be ? ToIndex(length). // 2. Let byteLength be ? ToIndex(length).
let byte_length = args.get_or_undefined(0).to_index(context)?; let byte_len = args.get_or_undefined(0).to_index(context)?;
// 3. Return ? AllocateSharedArrayBuffer(NewTarget, byteLength, requestedMaxByteLength). // 3. Let requestedMaxByteLength be ? GetArrayBufferMaxByteLengthOption(options).
Ok(Self::allocate(new_target, byte_length, context)? let max_byte_len = get_max_byte_len(args.get_or_undefined(1), context)?;
// 4. Return ? AllocateSharedArrayBuffer(NewTarget, byteLength, requestedMaxByteLength).
Ok(Self::allocate(new_target, byte_len, max_byte_len, context)?
.upcast() .upcast()
.into()) .into())
} }
@ -166,12 +225,158 @@ impl SharedArrayBuffer {
.with_message("SharedArrayBuffer.byteLength called with invalid value") .with_message("SharedArrayBuffer.byteLength called with invalid value")
})?; })?;
// TODO: 4. Let length be ArrayBufferByteLength(O, seq-cst). // 4. Let length be ArrayBufferByteLength(O, seq-cst).
let len = buf.bytes(Ordering::SeqCst).len() as u64;
// 5. Return 𝔽(length). // 5. Return 𝔽(length).
let len = buf.data().len() as u64;
Ok(len.into()) Ok(len.into())
} }
/// [`get SharedArrayBuffer.prototype.growable`][spec].
///
/// [spec]: https://tc39.es/ecma262/#sec-get-sharedarraybuffer.prototype.growable
pub(crate) fn get_growable(
this: &JsValue,
_args: &[JsValue],
_context: &mut Context,
) -> JsResult<JsValue> {
// 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]).
// 3. If IsSharedArrayBuffer(O) is false, throw a TypeError exception.
let buf = this
.as_object()
.and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| {
JsNativeError::typ()
.with_message("get SharedArrayBuffer.growable called with invalid `this`")
})?;
// 4. If IsFixedLengthArrayBuffer(O) is false, return true; otherwise return false.
Ok(JsValue::from(!buf.is_fixed_len()))
}
/// [`get SharedArrayBuffer.prototype.maxByteLength`][spec].
///
/// [spec]: https://tc39.es/ecma262/#sec-get-sharedarraybuffer.prototype.maxbytelength
pub(crate) fn get_max_byte_length(
this: &JsValue,
_args: &[JsValue],
_context: &mut Context,
) -> JsResult<JsValue> {
// 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]).
// 3. If IsSharedArrayBuffer(O) is false, throw a TypeError exception.
let buf = this
.as_object()
.and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| {
JsNativeError::typ()
.with_message("get SharedArrayBuffer.maxByteLength called with invalid value")
})?;
// 4. If IsFixedLengthArrayBuffer(O) is true, then
// a. Let length be O.[[ArrayBufferByteLength]].
// 5. Else,
// a. Let length be O.[[ArrayBufferMaxByteLength]].
// 6. Return 𝔽(length).
Ok(buf.data.buffer.len().into())
}
/// [`SharedArrayBuffer.prototype.grow ( newLength )`][spec].
///
/// [spec]: https://tc39.es/ecma262/sec-sharedarraybuffer.prototype.grow
pub(crate) fn grow(
this: &JsValue,
args: &[JsValue],
context: &mut Context,
) -> JsResult<JsValue> {
// 1. Let O be the this value.
// 3. If IsSharedArrayBuffer(O) is false, throw a TypeError exception.
let Some(buf) = this
.as_object()
.and_then(|o| o.clone().downcast::<Self>().ok())
else {
return Err(JsNativeError::typ()
.with_message("SharedArrayBuffer.grow called with non-object value")
.into());
};
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferMaxByteLength]]).
if buf.borrow().data.is_fixed_len() {
return Err(JsNativeError::typ()
.with_message("SharedArrayBuffer.grow: cannot grow a fixed-length buffer")
.into());
}
// 4. Let newByteLength be ? ToIndex(newLength).
let new_byte_len = args.get_or_undefined(0).to_index(context)?;
// TODO: 5. Let hostHandled be ? HostGrowSharedArrayBuffer(O, newByteLength).
// 6. If hostHandled is handled, return undefined.
// Used in engines to handle WASM buffers in a special way, but we don't
// have a WASM interpreter in place yet.
// 7. Let isLittleEndian be the value of the [[LittleEndian]] field of the surrounding agent's Agent Record.
// 8. Let byteLengthBlock be O.[[ArrayBufferByteLengthData]].
// 9. Let currentByteLengthRawBytes be GetRawBytesFromSharedBlock(byteLengthBlock, 0, biguint64, true, seq-cst).
// 10. Let newByteLengthRawBytes be NumericToRawBytes(biguint64, ℤ(newByteLength), isLittleEndian).
let buf = buf.borrow();
let buf = &buf.data;
// d. If newByteLength < currentByteLength or newByteLength > O.[[ArrayBufferMaxByteLength]], throw a RangeError exception.
// Extracting this condition outside the CAS since throwing early doesn't affect the correct
// behaviour of the loop.
if new_byte_len > buf.data.buffer.len() as u64 {
return Err(JsNativeError::range()
.with_message(
"SharedArrayBuffer.grow: new length cannot be bigger than `maxByteLength`",
)
.into());
}
let new_byte_len = new_byte_len as usize;
// If we used let-else above to avoid the expect, we would carry a borrow through the `to_index`
// call, which could mutably borrow. Another alternative would be to clone the whole
// `SharedArrayBuffer`, but it's better to avoid contention with the counter in the `Arc` pointer.
let atomic_len = buf
.data
.current_len
.as_ref()
.expect("already checked that the buffer is not fixed-length");
// 11. Repeat,
// a. NOTE: This is a compare-and-exchange loop to ensure that parallel, racing grows of the same buffer are
// totally ordered, are not lost, and do not silently do nothing. The loop exits if it was able to attempt
// to grow uncontended.
// b. Let currentByteLength be ℝ(RawBytesToNumeric(biguint64, currentByteLengthRawBytes, isLittleEndian)).
// c. If newByteLength = currentByteLength, return undefined.
// d. If newByteLength < currentByteLength or newByteLength > O.[[ArrayBufferMaxByteLength]], throw a
// RangeError exception.
// e. Let byteLengthDelta be newByteLength - currentByteLength.
// f. If it is impossible to create a new Shared Data Block value consisting of byteLengthDelta bytes, throw
// a RangeError exception.
// g. NOTE: No new Shared Data Block is constructed and used here. The observable behaviour of growable
// SharedArrayBuffers is specified by allocating a max-sized Shared Data Block at construction time, and
// this step captures the requirement that implementations that run out of memory must throw a RangeError.
// h. Let readByteLengthRawBytes be AtomicCompareExchangeInSharedBlock(byteLengthBlock, 0, 8,
// currentByteLengthRawBytes, newByteLengthRawBytes).
// i. If ByteListEqual(readByteLengthRawBytes, currentByteLengthRawBytes) is true, return undefined.
// j. Set currentByteLengthRawBytes to readByteLengthRawBytes.
// We require SEQ-CST operations because readers of the buffer also use SEQ-CST operations.
atomic_len
.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |prev_byte_len| {
(prev_byte_len <= new_byte_len).then_some(new_byte_len)
})
.map_err(|_| {
JsNativeError::range()
.with_message("SharedArrayBuffer.grow: failed to grow buffer to new length")
})?;
Ok(JsValue::undefined())
}
/// `SharedArrayBuffer.prototype.slice ( start, end )` /// `SharedArrayBuffer.prototype.slice ( start, end )`
/// ///
/// More information: /// More information:
@ -181,32 +386,45 @@ impl SharedArrayBuffer {
fn slice(this: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> { fn slice(this: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
// 1. Let O be the this value. // 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]). // 2. Perform ? RequireInternalSlot(O, [[ArrayBufferData]]).
let obj = this.as_object().ok_or_else(|| {
JsNativeError::typ().with_message("ArrayBuffer.slice called with non-object value")
})?;
// 3. If IsSharedArrayBuffer(O) is false, throw a TypeError exception. // 3. If IsSharedArrayBuffer(O) is false, throw a TypeError exception.
let buf = obj.downcast_ref::<Self>().ok_or_else(|| { let buf = this
JsNativeError::typ().with_message("ArrayBuffer.slice called with invalid object") .as_object()
.and_then(|o| o.clone().downcast::<Self>().ok())
.ok_or_else(|| {
JsNativeError::typ()
.with_message("SharedArrayBuffer.slice called with invalid `this` value")
})?; })?;
let SliceRange { // 4. Let len be ArrayBufferByteLength(O, seq-cst).
start: first, let len = buf.borrow().data.len(Ordering::SeqCst);
length: new_len,
} = get_slice_range( // 5. Let relativeStart be ? ToIntegerOrInfinity(start).
buf.len() as u64, // 6. If relativeStart = -∞, let first be 0.
args.get_or_undefined(0), // 7. Else if relativeStart < 0, let first be max(len + relativeStart, 0).
args.get_or_undefined(1), // 8. Else, let first be min(relativeStart, len).
context, let first = Array::get_relative_start(context, args.get_or_undefined(0), len as u64)?;
)?;
// 9. If end is undefined, let relativeEnd be len; else let relativeEnd be ? ToIntegerOrInfinity(end).
// 10. If relativeEnd = -∞, let final be 0.
// 11. Else if relativeEnd < 0, let final be max(len + relativeEnd, 0).
// 12. Else, let final be min(relativeEnd, len).
let final_ = Array::get_relative_end(context, args.get_or_undefined(1), len as u64)?;
// 13. Let newLen be max(final - first, 0).
let new_len = final_.saturating_sub(first);
// 14. Let ctor be ? SpeciesConstructor(O, %SharedArrayBuffer%). // 14. Let ctor be ? SpeciesConstructor(O, %SharedArrayBuffer%).
let ctor = obj.species_constructor(StandardConstructors::shared_array_buffer, context)?; let ctor = buf
.clone()
.upcast()
.species_constructor(StandardConstructors::shared_array_buffer, context)?;
// 15. Let new be ? Construct(ctor, « 𝔽(newLen) »). // 15. Let new be ? Construct(ctor, « 𝔽(newLen) »).
let new = ctor.construct(&[new_len.into()], Some(&ctor), context)?; let new = ctor.construct(&[new_len.into()], Some(&ctor), context)?;
{ {
let buf = buf.borrow();
let buf = &buf.data;
// 16. Perform ? RequireInternalSlot(new, [[ArrayBufferData]]). // 16. Perform ? RequireInternalSlot(new, [[ArrayBufferData]]).
// 17. If IsSharedArrayBuffer(new) is false, throw a TypeError exception. // 17. If IsSharedArrayBuffer(new) is false, throw a TypeError exception.
let new = new.downcast_ref::<Self>().ok_or_else(|| { let new = new.downcast_ref::<Self>().ok_or_else(|| {
@ -215,33 +433,37 @@ impl SharedArrayBuffer {
})?; })?;
// 18. If new.[[ArrayBufferData]] is O.[[ArrayBufferData]], throw a TypeError exception. // 18. If new.[[ArrayBufferData]] is O.[[ArrayBufferData]], throw a TypeError exception.
if std::ptr::eq(buf.data().as_ptr(), new.data().as_ptr()) { if std::ptr::eq(buf.as_ptr(), new.as_ptr()) {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("cannot reuse the same `SharedArrayBuffer` for a slice operation") .with_message("cannot reuse the same SharedArrayBuffer for a slice operation")
.into()); .into());
} }
// TODO: 19. If ArrayBufferByteLength(new, seq-cst) < newLen, throw a TypeError exception. // 19. If ArrayBufferByteLength(new, seq-cst) < newLen, throw a TypeError exception.
if (new.len() as u64) < new_len { if (new.len(Ordering::SeqCst) as u64) < new_len {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("invalid size of constructed shared array") .with_message("invalid size of constructed SharedArrayBuffer")
.into()); .into());
} }
let first = first as usize;
let new_len = new_len as usize;
// 20. Let fromBuf be O.[[ArrayBufferData]]. // 20. Let fromBuf be O.[[ArrayBufferData]].
let from_buf = buf.data(); let from_buf = &buf.bytes_with_len(len)[first..];
// 21. Let toBuf be new.[[ArrayBufferData]]. // 21. Let toBuf be new.[[ArrayBufferData]].
let to_buf = new.data(); let to_buf = new;
// 22. Perform CopyDataBlockBytes(toBuf, 0, fromBuf, first, newLen). // Sanity check to ensure there is enough space inside `from_buf` for
let first = first as usize; // `new_len` elements.
let new_len = new_len as usize; debug_assert!(from_buf.len() >= new_len);
// 22. Perform CopyDataBlockBytes(toBuf, 0, fromBuf, first, newLen).
// SAFETY: `get_slice_range` will always return indices that are in-bounds. // SAFETY: `get_slice_range` will always return indices that are in-bounds.
// This also means that the newly created buffer will have at least `new_len` elements // This also means that the newly created buffer will have at least `new_len` elements
// to write to. // to write to.
unsafe { copy_shared_to_shared(&from_buf[first..], to_buf, new_len) } unsafe { copy_shared_to_shared(from_buf.as_ptr(), to_buf.as_ptr(), new_len) }
} }
// 23. Return new. // 23. Return new.
@ -256,10 +478,10 @@ impl SharedArrayBuffer {
/// [spec]: https://tc39.es/ecma262/#sec-allocatesharedarraybuffer /// [spec]: https://tc39.es/ecma262/#sec-allocatesharedarraybuffer
pub(crate) fn allocate( pub(crate) fn allocate(
constructor: &JsValue, constructor: &JsValue,
byte_length: u64, byte_len: u64,
max_byte_len: Option<u64>,
context: &mut Context, context: &mut Context,
) -> JsResult<JsObject<SharedArrayBuffer>> { ) -> JsResult<JsObject<SharedArrayBuffer>> {
// TODO:
// 1. Let slots be « [[ArrayBufferData]] ». // 1. Let slots be « [[ArrayBufferData]] ».
// 2. If maxByteLength is present and maxByteLength is not empty, let allocatingGrowableBuffer // 2. If maxByteLength is present and maxByteLength is not empty, let allocatingGrowableBuffer
// be true; otherwise let allocatingGrowableBuffer be false. // be true; otherwise let allocatingGrowableBuffer be false.
@ -268,6 +490,13 @@ impl SharedArrayBuffer {
// b. Append [[ArrayBufferByteLengthData]] and [[ArrayBufferMaxByteLength]] to slots. // b. Append [[ArrayBufferByteLengthData]] and [[ArrayBufferMaxByteLength]] to slots.
// 4. Else, // 4. Else,
// a. Append [[ArrayBufferByteLength]] to slots. // a. Append [[ArrayBufferByteLength]] to slots.
if let Some(max_byte_len) = max_byte_len {
if byte_len > max_byte_len {
return Err(JsNativeError::range()
.with_message("`length` cannot be bigger than `maxByteLength`")
.into());
}
}
// 5. Let obj be ? OrdinaryCreateFromConstructor(constructor, "%SharedArrayBuffer.prototype%", slots). // 5. Let obj be ? OrdinaryCreateFromConstructor(constructor, "%SharedArrayBuffer.prototype%", slots).
let prototype = get_prototype_from_constructor( let prototype = get_prototype_from_constructor(
@ -276,24 +505,36 @@ impl SharedArrayBuffer {
context, context,
)?; )?;
// TODO: 6. If allocatingGrowableBuffer is true, let allocLength be maxByteLength; // 6. If allocatingGrowableBuffer is true, let allocLength be maxByteLength;
// otherwise let allocLength be byteLength. // otherwise let allocLength be byteLength.
let alloc_len = max_byte_len.unwrap_or(byte_len);
// 7. Let block be ? CreateSharedByteDataBlock(allocLength). // 7. Let block be ? CreateSharedByteDataBlock(allocLength).
// 8. Set obj.[[ArrayBufferData]] to block. // 8. Set obj.[[ArrayBufferData]] to block.
let data = create_shared_byte_data_block(byte_length, context)?; let block = create_shared_byte_data_block(alloc_len, context)?;
// TODO:
// 9. If allocatingGrowableBuffer is true, then // 9. If allocatingGrowableBuffer is true, then
// `byte_len` must fit inside an `usize` thanks to the checks inside
// `create_shared_byte_data_block`.
// a. Assert: byteLength ≤ maxByteLength. // a. Assert: byteLength ≤ maxByteLength.
// b. Let byteLengthBlock be ? CreateSharedByteDataBlock(8). // b. Let byteLengthBlock be ? CreateSharedByteDataBlock(8).
// c. Perform SetValueInBuffer(byteLengthBlock, 0, biguint64, ℤ(byteLength), true, seq-cst). // c. Perform SetValueInBuffer(byteLengthBlock, 0, biguint64, ℤ(byteLength), true, seq-cst).
// d. Set obj.[[ArrayBufferByteLengthData]] to byteLengthBlock. // d. Set obj.[[ArrayBufferByteLengthData]] to byteLengthBlock.
// e. Set obj.[[ArrayBufferMaxByteLength]] to maxByteLength. // e. Set obj.[[ArrayBufferMaxByteLength]] to maxByteLength.
let current_len = max_byte_len.map(|_| AtomicUsize::new(byte_len as usize));
// 10. Else, // 10. Else,
// a. Set obj.[[ArrayBufferByteLength]] to byteLength. // a. Set obj.[[ArrayBufferByteLength]] to byteLength.
let obj = JsObject::new(context.root_shape(), prototype, Self { data }); let obj = JsObject::new(
context.root_shape(),
prototype,
Self {
data: Arc::new(Inner {
buffer: block,
current_len,
}),
},
);
// 11. Return obj. // 11. Return obj.
Ok(obj) Ok(obj)
@ -310,7 +551,7 @@ impl SharedArrayBuffer {
pub(crate) fn create_shared_byte_data_block( pub(crate) fn create_shared_byte_data_block(
size: u64, size: u64,
context: &mut Context, context: &mut Context,
) -> JsResult<Arc<Box<[AtomicU8]>>> { ) -> JsResult<Box<[AtomicU8]>> {
if size > context.host_hooks().max_buffer_size(context) { if size > context.host_hooks().max_buffer_size(context) {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message( .with_message(
@ -327,7 +568,7 @@ pub(crate) fn create_shared_byte_data_block(
if size == 0 { if size == 0 {
// Must ensure we don't allocate a zero-sized buffer. // Must ensure we don't allocate a zero-sized buffer.
return Ok(Arc::new(Box::new([]))); return Ok(Box::default());
} }
// 2. Let execution be the [[CandidateExecution]] field of the surrounding agent's Agent Record. // 2. Let execution be the [[CandidateExecution]] field of the surrounding agent's Agent Record.
@ -371,5 +612,5 @@ pub(crate) fn create_shared_byte_data_block(
assert_eq!(buffer.as_ptr().addr() % std::mem::align_of::<u64>(), 0); assert_eq!(buffer.as_ptr().addr() % std::mem::align_of::<u64>(), 0);
// 3. Return db. // 3. Return db.
Ok(Arc::new(buffer)) Ok(buffer)
} }

4
core/engine/src/builtins/array_buffer/tests.rs

@ -4,10 +4,10 @@ use crate::Context;
fn create_byte_data_block() { fn create_byte_data_block() {
let context = &mut Context::default(); let context = &mut Context::default();
// Sunny day // Sunny day
assert!(super::create_byte_data_block(100, context).is_ok()); assert!(super::create_byte_data_block(100, None, context).is_ok());
// Rainy day // Rainy day
assert!(super::create_byte_data_block(u64::MAX, context).is_err()); assert!(super::create_byte_data_block(u64::MAX, None, context).is_err());
} }
#[test] #[test]

148
core/engine/src/builtins/array_buffer/utils.rs

@ -1,6 +1,6 @@
#![allow(unstable_name_collisions)] #![allow(unstable_name_collisions)]
use std::{ptr, slice::SliceIndex, sync::atomic}; use std::{ptr, slice::SliceIndex, sync::atomic::Ordering};
use portable_atomic::AtomicU8; use portable_atomic::AtomicU8;
@ -11,6 +11,18 @@ use crate::{
use super::ArrayBuffer; use super::ArrayBuffer;
#[derive(Clone, Copy)]
pub(crate) enum BytesConstPtr {
Bytes(*const u8),
AtomicBytes(*const AtomicU8),
}
#[derive(Clone, Copy)]
pub(crate) enum BytesMutPtr {
Bytes(*mut u8),
AtomicBytes(*const AtomicU8),
}
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub(crate) enum SliceRef<'a> { pub(crate) enum SliceRef<'a> {
Slice(&'a [u8]), Slice(&'a [u8]),
@ -49,6 +61,14 @@ impl SliceRef<'_> {
} }
} }
/// Gets a pointer to the underlying slice.
pub(crate) fn as_ptr(&self) -> BytesConstPtr {
match self {
SliceRef::Slice(s) => BytesConstPtr::Bytes(s.as_ptr()),
SliceRef::AtomicSlice(s) => BytesConstPtr::AtomicBytes(s.as_ptr()),
}
}
/// [`GetValueFromBuffer ( arrayBuffer, byteIndex, type, isTypedArray, order [ , isLittleEndian ] )`][spec] /// [`GetValueFromBuffer ( arrayBuffer, byteIndex, type, isTypedArray, order [ , isLittleEndian ] )`][spec]
/// ///
/// The start offset is determined by the input buffer instead of a `byteIndex` parameter. /// The start offset is determined by the input buffer instead of a `byteIndex` parameter.
@ -62,9 +82,9 @@ impl SliceRef<'_> {
pub(crate) unsafe fn get_value( pub(crate) unsafe fn get_value(
&self, &self,
kind: TypedArrayKind, kind: TypedArrayKind,
order: atomic::Ordering, order: Ordering,
) -> TypedArrayElement { ) -> TypedArrayElement {
unsafe fn read_elem<T: Element>(buffer: SliceRef<'_>, order: atomic::Ordering) -> T { unsafe fn read_elem<T: Element>(buffer: SliceRef<'_>, order: Ordering) -> T {
// <https://tc39.es/ecma262/#sec-getvaluefrombuffer> // <https://tc39.es/ecma262/#sec-getvaluefrombuffer>
// 1. Assert: IsDetachedBuffer(arrayBuffer) is false. // 1. Assert: IsDetachedBuffer(arrayBuffer) is false.
@ -116,10 +136,7 @@ impl SliceRef<'_> {
} }
} }
/// `25.1.2.4 CloneArrayBuffer ( srcBuffer, srcByteOffset, srcLength )` /// [`CloneArrayBuffer ( srcBuffer, srcByteOffset, srcLength )`][spec]
///
/// More information:
/// - [ECMAScript reference][spec]
/// ///
/// [spec]: https://tc39.es/ecma262/#sec-clonearraybuffer /// [spec]: https://tc39.es/ecma262/#sec-clonearraybuffer
pub(crate) fn clone(&self, context: &mut Context) -> JsResult<JsObject<ArrayBuffer>> { pub(crate) fn clone(&self, context: &mut Context) -> JsResult<JsObject<ArrayBuffer>> {
@ -135,6 +152,7 @@ impl SliceRef<'_> {
.constructor() .constructor()
.into(), .into(),
self.len() as u64, self.len() as u64,
None,
context, context,
)?; )?;
@ -145,14 +163,20 @@ impl SliceRef<'_> {
let mut target_buffer = target_buffer.borrow_mut(); let mut target_buffer = target_buffer.borrow_mut();
let target_block = target_buffer let target_block = target_buffer
.data .data
.data_mut() .bytes_mut()
.expect("ArrayBuffer cannot be detached here"); .expect("ArrayBuffer cannot be detached here");
// 5. Perform CopyDataBlockBytes(targetBlock, 0, srcBlock, srcByteOffset, srcLength). // 5. Perform CopyDataBlockBytes(targetBlock, 0, srcBlock, srcByteOffset, srcLength).
// SAFETY: Both buffers are of the same length, `buffer.len()`, which makes this operation // SAFETY: Both buffers are of the same length, `buffer.len()`, which makes this operation
// safe. // safe.
unsafe { memcpy(*self, SliceRefMut::Slice(target_block), self.len()) } unsafe {
memcpy(
self.as_ptr(),
BytesMutPtr::Bytes(target_block.as_mut_ptr()),
self.len(),
);
}
} }
// 6. Return targetBuffer. // 6. Return targetBuffer.
@ -180,7 +204,6 @@ pub(crate) enum SliceRefMut<'a> {
impl SliceRefMut<'_> { impl SliceRefMut<'_> {
/// Gets the byte length of this `SliceRefMut`. /// Gets the byte length of this `SliceRefMut`.
#[cfg(debug_assertions)]
pub(crate) fn len(&self) -> usize { pub(crate) fn len(&self) -> usize {
match self { match self {
Self::Slice(buf) => buf.len(), Self::Slice(buf) => buf.len(),
@ -213,6 +236,14 @@ impl SliceRefMut<'_> {
} }
} }
/// Gets a pointer to the underlying slice.
pub(crate) fn as_ptr(&mut self) -> BytesMutPtr {
match self {
Self::Slice(s) => BytesMutPtr::Bytes(s.as_mut_ptr()),
Self::AtomicSlice(s) => BytesMutPtr::AtomicBytes(s.as_ptr()),
}
}
/// `25.1.2.12 SetValueInBuffer ( arrayBuffer, byteIndex, type, value, isTypedArray, order [ , isLittleEndian ] )` /// `25.1.2.12 SetValueInBuffer ( arrayBuffer, byteIndex, type, value, isTypedArray, order [ , isLittleEndian ] )`
/// ///
/// The start offset is determined by the input buffer instead of a `byteIndex` parameter. /// The start offset is determined by the input buffer instead of a `byteIndex` parameter.
@ -230,12 +261,8 @@ impl SliceRefMut<'_> {
/// - [ECMAScript reference][spec] /// - [ECMAScript reference][spec]
/// ///
/// [spec]: https://tc39.es/ecma262/#sec-setvalueinbuffer /// [spec]: https://tc39.es/ecma262/#sec-setvalueinbuffer
pub(crate) unsafe fn set_value(&mut self, value: TypedArrayElement, order: atomic::Ordering) { pub(crate) unsafe fn set_value(&mut self, value: TypedArrayElement, order: Ordering) {
unsafe fn write_elem<T: Element>( unsafe fn write_elem<T: Element>(buffer: SliceRefMut<'_>, value: T, order: Ordering) {
buffer: SliceRefMut<'_>,
value: T,
order: atomic::Ordering,
) {
// <https://tc39.es/ecma262/#sec-setvalueinbuffer> // <https://tc39.es/ecma262/#sec-setvalueinbuffer>
// 1. Assert: IsDetachedBuffer(arrayBuffer) is false. // 1. Assert: IsDetachedBuffer(arrayBuffer) is false.
@ -309,15 +336,16 @@ impl<'a> From<&'a [AtomicU8]> for SliceRefMut<'a> {
/// ///
/// - Both `src` and `dest` must have at least `count` bytes to read and write, /// - Both `src` and `dest` must have at least `count` bytes to read and write,
/// respectively. /// respectively.
pub(super) unsafe fn copy_shared_to_shared(src: &[AtomicU8], dest: &[AtomicU8], count: usize) { pub(super) unsafe fn copy_shared_to_shared(
src: *const AtomicU8,
dest: *const AtomicU8,
count: usize,
) {
// TODO: this could be optimized with batches of writes using `u32/u64` stores instead. // TODO: this could be optimized with batches of writes using `u32/u64` stores instead.
for i in 0..count { for i in 0..count {
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
unsafe { unsafe {
dest.get_unchecked(i).store( (*dest.add(i)).store((*src.add(i)).load(Ordering::Relaxed), Ordering::Relaxed);
src.get_unchecked(i).load(atomic::Ordering::Relaxed),
atomic::Ordering::Relaxed,
);
} }
} }
} }
@ -328,60 +356,48 @@ pub(super) unsafe fn copy_shared_to_shared(src: &[AtomicU8], dest: &[AtomicU8],
/// ///
/// - Both `src` and `dest` must have at least `count` bytes to read and write, /// - Both `src` and `dest` must have at least `count` bytes to read and write,
/// respectively. /// respectively.
unsafe fn copy_shared_to_shared_backwards(src: &[AtomicU8], dest: &[AtomicU8], count: usize) { unsafe fn copy_shared_to_shared_backwards(
src: *const AtomicU8,
dest: *const AtomicU8,
count: usize,
) {
for i in (0..count).rev() { for i in (0..count).rev() {
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
unsafe { unsafe {
dest.get_unchecked(i).store( (*dest.add(i)).store((*src.add(i)).load(Ordering::Relaxed), Ordering::Relaxed);
src.get_unchecked(i).load(atomic::Ordering::Relaxed),
atomic::Ordering::Relaxed,
);
} }
} }
} }
/// Copies `count` bytes from the buffer `src` into the buffer `dest`, using the atomic ordering `order` /// Copies `count` bytes from the buffer `src` into the buffer `dest`, using the atomic ordering
/// if any of the buffers are atomic. /// `Ordering::Relaxed` if any of the buffers are atomic.
/// ///
/// # Safety /// # Safety
/// ///
/// - Both `src` and `dest` must have at least `count` bytes to read and write, respectively. /// - Both `src` and `dest` must have at least `count` bytes to read and write, respectively.
/// - The region of memory referenced by `src` must not overlap with the region of memory /// - The region of memory referenced by `src` must not overlap with the region of memory
/// referenced by `dest`. This is guaranteed if either of them are slices /// referenced by `dest`.
/// (you cannot borrow and mutably borrow a slice at the same time), but cannot be guaranteed pub(crate) unsafe fn memcpy(src: BytesConstPtr, dest: BytesMutPtr, count: usize) {
/// for atomic slices.
pub(crate) unsafe fn memcpy(src: SliceRef<'_>, dest: SliceRefMut<'_>, count: usize) {
#[cfg(debug_assertions)]
{
assert!(src.len() >= count);
assert!(dest.len() >= count);
let src_range = src.addr()..src.addr() + src.len();
let dest_range = dest.addr()..dest.addr() + dest.len();
assert!(!src_range.contains(&dest_range.start));
assert!(!src_range.contains(&dest_range.end));
}
// TODO: this could be optimized with batches of writes using `u32/u64` stores instead. // TODO: this could be optimized with batches of writes using `u32/u64` stores instead.
match (src, dest) { match (src, dest) {
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
(SliceRef::Slice(src), SliceRefMut::Slice(dest)) => unsafe { (BytesConstPtr::Bytes(src), BytesMutPtr::Bytes(dest)) => unsafe {
ptr::copy_nonoverlapping(src.as_ptr(), dest.as_mut_ptr(), count); ptr::copy_nonoverlapping(src, dest, count);
}, },
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
(SliceRef::Slice(src), SliceRefMut::AtomicSlice(dest)) => unsafe { (BytesConstPtr::Bytes(src), BytesMutPtr::AtomicBytes(dest)) => unsafe {
for i in 0..count { for i in 0..count {
dest.get_unchecked(i) (*dest.add(i)).store(*src.add(i), Ordering::Relaxed);
.store(*src.get_unchecked(i), atomic::Ordering::Relaxed);
} }
}, },
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
(SliceRef::AtomicSlice(src), SliceRefMut::Slice(dest)) => unsafe { (BytesConstPtr::AtomicBytes(src), BytesMutPtr::Bytes(dest)) => unsafe {
for i in 0..count { for i in 0..count {
*dest.get_unchecked_mut(i) = src.get_unchecked(i).load(atomic::Ordering::Relaxed); *dest.add(i) = (*src.add(i)).load(Ordering::Relaxed);
} }
}, },
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
(SliceRef::AtomicSlice(src), SliceRefMut::AtomicSlice(dest)) => unsafe { (BytesConstPtr::AtomicBytes(src), BytesMutPtr::AtomicBytes(dest)) => unsafe {
copy_shared_to_shared(src, dest, count); copy_shared_to_shared(src, dest, count);
}, },
} }
@ -391,28 +407,20 @@ pub(crate) unsafe fn memcpy(src: SliceRef<'_>, dest: SliceRefMut<'_>, count: usi
/// ///
/// # Safety /// # Safety
/// ///
/// - `buffer` must contain at least `from + count` bytes to be read. /// - `ptr` must be valid from the offset `ptr + from` for `count` reads of bytes.
/// - `buffer` must contain at least `to + count` bytes to be written. /// - `ptr` must be valid from the offset `ptr + to` for `count` writes of bytes.
pub(crate) unsafe fn memmove(buffer: SliceRefMut<'_>, from: usize, to: usize, count: usize) { pub(crate) unsafe fn memmove(ptr: BytesMutPtr, from: usize, to: usize, count: usize) {
#[cfg(debug_assertions)] match ptr {
{
assert!(from + count <= buffer.len());
assert!(to + count <= buffer.len());
}
match buffer {
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
SliceRefMut::Slice(buf) => unsafe { BytesMutPtr::Bytes(ptr) => unsafe {
let ptr = buf.as_mut_ptr(); let src = ptr.add(from);
let src_ptr = ptr.add(from); let dest = ptr.add(to);
let dest_ptr = ptr.add(to); ptr::copy(src, dest, count);
ptr::copy(src_ptr, dest_ptr, count);
}, },
// SAFETY: The invariants of this operation are ensured by the caller of the function. // SAFETY: The invariants of this operation are ensured by the caller of the function.
SliceRefMut::AtomicSlice(buf) => unsafe { BytesMutPtr::AtomicBytes(ptr) => unsafe {
let src = buf.get_unchecked(from..); let src = ptr.add(from);
let dest = buf.get_unchecked(to..); let dest = ptr.add(to);
// Let's draw a simple array. // Let's draw a simple array.
// //
// | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | // | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
@ -453,7 +461,7 @@ pub(crate) unsafe fn memmove(buffer: SliceRefMut<'_>, from: usize, to: usize, co
// | 0 | 1 | 0 | 1 | 2 | 3 | 6 | 7 | 8 | // | 0 | 1 | 0 | 1 | 2 | 3 | 6 | 7 | 8 |
// ^ ^ // ^ ^
// from to // from to
if from < to && to < from + count { if src < dest {
copy_shared_to_shared_backwards(src, dest, count); copy_shared_to_shared_backwards(src, dest, count);
} else { } else {
copy_shared_to_shared(src, dest, count); copy_shared_to_shared(src, dest, count);

5
core/engine/src/builtins/atomics/futex.rs

@ -273,6 +273,7 @@ pub(super) enum AtomicsWaitResult {
// our implementation guarantees that `SharedArrayBuffer` is always aligned to `u64` at minimum. // our implementation guarantees that `SharedArrayBuffer` is always aligned to `u64` at minimum.
pub(super) unsafe fn wait<E: Element + PartialEq>( pub(super) unsafe fn wait<E: Element + PartialEq>(
buffer: &SharedArrayBuffer, buffer: &SharedArrayBuffer,
buf_len: usize,
offset: usize, offset: usize,
check: E, check: E,
timeout: Option<Duration>, timeout: Option<Duration>,
@ -287,7 +288,7 @@ pub(super) unsafe fn wait<E: Element + PartialEq>(
let time_info = timeout.map(|timeout| (Instant::now(), timeout)); let time_info = timeout.map(|timeout| (Instant::now(), timeout));
let buffer = &buffer.data()[offset..]; let buffer = &buffer.bytes_with_len(buf_len)[offset..];
// 13. Let elementType be TypedArrayElementType(typedArray). // 13. Let elementType be TypedArrayElementType(typedArray).
// 14. Let w be GetValueFromBuffer(buffer, indexedPosition, elementType, true, SeqCst). // 14. Let w be GetValueFromBuffer(buffer, indexedPosition, elementType, true, SeqCst).
@ -380,7 +381,7 @@ pub(super) unsafe fn wait<E: Element + PartialEq>(
/// Notifies at most `count` agents waiting on the memory address pointed to by `buffer[offset..]`. /// Notifies at most `count` agents waiting on the memory address pointed to by `buffer[offset..]`.
pub(super) fn notify(buffer: &SharedArrayBuffer, offset: usize, count: u64) -> JsResult<u64> { pub(super) fn notify(buffer: &SharedArrayBuffer, offset: usize, count: u64) -> JsResult<u64> {
let addr = buffer.data()[offset..].as_ptr().addr(); let addr = buffer.as_ptr().addr() + offset;
// 7. Let WL be GetWaiterList(block, indexedPosition). // 7. Let WL be GetWaiterList(block, indexedPosition).
// 8. Perform EnterCriticalSection(WL). // 8. Perform EnterCriticalSection(WL).

225
core/engine/src/builtins/atomics/mod.rs

@ -20,11 +20,11 @@ use crate::{
sys::time::Duration, value::IntegerOrInfinity, Context, JsArgs, JsNativeError, JsResult, sys::time::Duration, value::IntegerOrInfinity, Context, JsArgs, JsNativeError, JsResult,
JsString, JsValue, JsString, JsValue,
}; };
use boa_gc::GcRef;
use boa_profiler::Profiler; use boa_profiler::Profiler;
use super::{ use super::{
array_buffer::BufferRef, array_buffer::{BufferObject, BufferRef},
typed_array::{Atomic, ContentType, Element, TypedArray, TypedArrayElement, TypedArrayKind}, typed_array::{Atomic, ContentType, Element, TypedArray, TypedArrayElement, TypedArrayKind},
BuiltInBuilder, IntrinsicObject, BuiltInBuilder, IntrinsicObject,
}; };
@ -75,19 +75,35 @@ macro_rules! atomic_op {
let index = args.get_or_undefined(1); let index = args.get_or_undefined(1);
let value = args.get_or_undefined(2); let value = args.get_or_undefined(2);
let ii = validate_integer_typed_array(array, false)?; // AtomicReadModifyWrite ( typedArray, index, value, op )
let pos = validate_atomic_access(&ii, index, context)?; // <https://tc39.es/ecma262/#sec-atomicreadmodifywrite>
let value = ii.kind().get_element(value, context)?;
// 1. Let buffer be ? ValidateIntegerTypedArray(typedArray).
let (ta, buf_len) = validate_integer_typed_array(array, false)?;
// 2. Let indexedPosition be ? ValidateAtomicAccess(typedArray, index).
let access = validate_atomic_access(&ta, buf_len, index, context)?;
// 3. If typedArray.[[ContentType]] is BigInt, let v be ? ToBigInt(value).
// 4. Otherwise, let v be 𝔽(? ToIntegerOrInfinity(value)).
// 7. Let elementType be TypedArrayElementType(typedArray).
let value = access.kind.get_element(value, context)?;
// revalidate // 5. If IsDetachedBuffer(buffer) is true, throw a TypeError exception.
let mut buffer = ii.viewed_array_buffer().as_buffer_mut(); // 6. NOTE: The above check is not redundant with the check in ValidateIntegerTypedArray because the call
let Some(mut data) = buffer.data_mut() else { // to ToBigInt or ToIntegerOrInfinity on the preceding lines can have arbitrary side effects, which could
// cause the buffer to become detached.
let ta = ta.borrow();
let ta = &ta.data;
let mut buffer = ta.viewed_array_buffer().as_buffer_mut();
let Some(mut data) = buffer.bytes_with_len(buf_len) else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("cannot execute atomic operation in detached buffer") .with_message("cannot execute atomic operation in detached buffer")
.into()); .into());
}; };
let data = data.subslice_mut(pos..); let data = data.subslice_mut(access.byte_offset..);
// 8. Return GetModifySetValueInBuffer(buffer, indexedPosition, elementType, v, op).
// SAFETY: The integer indexed object guarantees that the buffer is aligned. // SAFETY: The integer indexed object guarantees that the buffer is aligned.
// The call to `validate_atomic_access` guarantees that the index is in-bounds. // The call to `validate_atomic_access` guarantees that the index is in-bounds.
let value: TypedArrayElement = unsafe { let value: TypedArrayElement = unsafe {
@ -161,24 +177,26 @@ impl Atomics {
let index = args.get_or_undefined(1); let index = args.get_or_undefined(1);
// 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index). // 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index).
let ii = validate_integer_typed_array(array, false)?; let (ta, buf_len) = validate_integer_typed_array(array, false)?;
let pos = validate_atomic_access(&ii, index, context)?; let access = validate_atomic_access(&ta, buf_len, index, context)?;
// 2. Perform ? RevalidateAtomicAccess(typedArray, indexedPosition). // 2. Perform ? RevalidateAtomicAccess(typedArray, indexedPosition).
let buffer = ii.viewed_array_buffer().as_buffer(); let ta = ta.borrow();
let Some(data) = buffer.data() else { let ta = &ta.data;
let buffer = ta.viewed_array_buffer().as_buffer();
let Some(data) = buffer.bytes_with_len(buf_len) else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("cannot execute atomic operation in detached buffer") .with_message("cannot execute atomic operation in detached buffer")
.into()); .into());
}; };
let data = data.subslice(pos..); let data = data.subslice(access.byte_offset..);
// 3. Let buffer be typedArray.[[ViewedArrayBuffer]]. // 3. Let buffer be typedArray.[[ViewedArrayBuffer]].
// 4. Let elementType be TypedArrayElementType(typedArray). // 4. Let elementType be TypedArrayElementType(typedArray).
// 5. Return GetValueFromBuffer(buffer, indexedPosition, elementType, true, seq-cst). // 5. Return GetValueFromBuffer(buffer, indexedPosition, elementType, true, seq-cst).
// SAFETY: The integer indexed object guarantees that the buffer is aligned. // SAFETY: The integer indexed object guarantees that the buffer is aligned.
// The call to `validate_atomic_access` guarantees that the index is in-bounds. // The call to `validate_atomic_access` guarantees that the index is in-bounds.
let value = unsafe { data.get_value(ii.kind(), Ordering::SeqCst) }; let value = unsafe { data.get_value(access.kind, Ordering::SeqCst) };
Ok(value.into()) Ok(value.into())
} }
@ -192,12 +210,12 @@ impl Atomics {
let value = args.get_or_undefined(2); let value = args.get_or_undefined(2);
// 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index). // 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index).
let ii = validate_integer_typed_array(array, false)?; let (ta, buf_len) = validate_integer_typed_array(array, false)?;
let pos = validate_atomic_access(&ii, index, context)?; let access = validate_atomic_access(&ta, buf_len, index, context)?;
// bit of a hack to preserve the converted value // bit of a hack to preserve the converted value
// 2. If typedArray.[[ContentType]] is bigint, let v be ? ToBigInt(value). // 2. If typedArray.[[ContentType]] is bigint, let v be ? ToBigInt(value).
let converted: JsValue = if ii.kind().content_type() == ContentType::BigInt { let converted: JsValue = if access.kind.content_type() == ContentType::BigInt {
value.to_bigint(context)?.into() value.to_bigint(context)?.into()
} else { } else {
// 3. Otherwise, let v be 𝔽(? ToIntegerOrInfinity(value)). // 3. Otherwise, let v be 𝔽(? ToIntegerOrInfinity(value)).
@ -208,16 +226,18 @@ impl Atomics {
} }
.into() .into()
}; };
let value = ii.kind().get_element(&converted, context)?; let value = access.kind.get_element(&converted, context)?;
// 4. Perform ? RevalidateAtomicAccess(typedArray, indexedPosition). // 4. Perform ? RevalidateAtomicAccess(typedArray, indexedPosition).
let mut buffer = ii.viewed_array_buffer().as_buffer_mut(); let ta = ta.borrow();
let Some(mut buffer) = buffer.data_mut() else { let ta = &ta.data;
let mut buffer = ta.viewed_array_buffer().as_buffer_mut();
let Some(mut buffer) = buffer.bytes_with_len(buf_len) else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("cannot execute atomic operation in detached buffer") .with_message("cannot execute atomic operation in detached buffer")
.into()); .into());
}; };
let mut data = buffer.subslice_mut(pos..); let mut data = buffer.subslice_mut(access.byte_offset..);
// 5. Let buffer be typedArray.[[ViewedArrayBuffer]]. // 5. Let buffer be typedArray.[[ViewedArrayBuffer]].
// 6. Let elementType be TypedArrayElementType(typedArray). // 6. Let elementType be TypedArrayElementType(typedArray).
@ -244,9 +264,8 @@ impl Atomics {
// 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index). // 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index).
// 2. Let buffer be typedArray.[[ViewedArrayBuffer]]. // 2. Let buffer be typedArray.[[ViewedArrayBuffer]].
// 3. Let block be buffer.[[ArrayBufferData]]. // 3. Let block be buffer.[[ArrayBufferData]].
let ii = validate_integer_typed_array(array, false)?; let (ta, buf_len) = validate_integer_typed_array(array, false)?;
let pos = validate_atomic_access(&ii, index, context)?; let access = validate_atomic_access(&ta, buf_len, index, context)?;
let typed_array_kind = ii.kind();
// 4. If typedArray.[[ContentType]] is bigint, then // 4. If typedArray.[[ContentType]] is bigint, then
// a. Let expected be ? ToBigInt(expectedValue). // a. Let expected be ? ToBigInt(expectedValue).
@ -254,19 +273,19 @@ impl Atomics {
// 5. Else, // 5. Else,
// a. Let expected be 𝔽(? ToIntegerOrInfinity(expectedValue)). // a. Let expected be 𝔽(? ToIntegerOrInfinity(expectedValue)).
// b. Let replacement be 𝔽(? ToIntegerOrInfinity(replacementValue)). // b. Let replacement be 𝔽(? ToIntegerOrInfinity(replacementValue)).
let exp = typed_array_kind.get_element(expected, context)?.to_bytes(); let exp = access.kind.get_element(expected, context)?.to_bits();
let rep = typed_array_kind let rep = access.kind.get_element(replacement, context)?.to_bits();
.get_element(replacement, context)?
.to_bytes();
// 6. Perform ? RevalidateAtomicAccess(typedArray, indexedPosition). // 6. Perform ? RevalidateAtomicAccess(typedArray, indexedPosition).
let mut buffer = ii.viewed_array_buffer().as_buffer_mut(); let ta = ta.borrow();
let Some(mut data) = buffer.data_mut() else { let ta = &ta.data;
let mut buffer = ta.viewed_array_buffer().as_buffer_mut();
let Some(mut buffer) = buffer.bytes_with_len(buf_len) else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("cannot execute atomic operation in detached buffer") .with_message("cannot execute atomic operation in detached buffer")
.into()); .into());
}; };
let data = data.subslice_mut(pos..); let data = buffer.subslice_mut(access.byte_offset..);
// 7. Let elementType be TypedArrayElementType(typedArray). // 7. Let elementType be TypedArrayElementType(typedArray).
// 8. Let elementSize be TypedArrayElementSize(typedArray). // 8. Let elementSize be TypedArrayElementSize(typedArray).
@ -280,10 +299,11 @@ impl Atomics {
// b. If ByteListEqual(rawBytesRead, expectedBytes) is true, then // b. If ByteListEqual(rawBytesRead, expectedBytes) is true, then
// i. Store the individual bytes of replacementBytes into block, starting at block[indexedPosition]. // i. Store the individual bytes of replacementBytes into block, starting at block[indexedPosition].
// 14. Return RawBytesToNumeric(elementType, rawBytesRead, isLittleEndian). // 14. Return RawBytesToNumeric(elementType, rawBytesRead, isLittleEndian).
// SAFETY: The integer indexed object guarantees that the buffer is aligned. // SAFETY: The integer indexed object guarantees that the buffer is aligned.
// The call to `validate_atomic_access` guarantees that the index is in-bounds. // The call to `validate_atomic_access` guarantees that the index is in-bounds.
let value: TypedArrayElement = unsafe { let value: TypedArrayElement = unsafe {
match typed_array_kind { match access.kind {
TypedArrayKind::Int8 => i8::read_mut(data) TypedArrayKind::Int8 => i8::read_mut(data)
.compare_exchange(exp as i8, rep as i8, Ordering::SeqCst) .compare_exchange(exp as i8, rep as i8, Ordering::SeqCst)
.into(), .into(),
@ -321,23 +341,6 @@ impl Atomics {
// =========== Atomics.ops start =========== // =========== Atomics.ops start ===========
// Most of the operations here follow the same list of steps:
//
// AtomicReadModifyWrite ( typedArray, index, value, op )
// <https://tc39.es/ecma262/#sec-atomicreadmodifywrite>
//
// 1. Let buffer be ? ValidateIntegerTypedArray(typedArray).
// 2. Let indexedPosition be ? ValidateAtomicAccess(typedArray, index).
// 3. If typedArray.[[ContentType]] is BigInt, let v be ? ToBigInt(value).
// 4. Otherwise, let v be 𝔽(? ToIntegerOrInfinity(value)).
// 5. If IsDetachedBuffer(buffer) is true, throw a TypeError exception.
// 6. NOTE: The above check is not redundant with the check in ValidateIntegerTypedArray because the call to ToBigInt or ToIntegerOrInfinity on the preceding lines can have arbitrary side effects, which could cause the buffer to become detached.
// 7. Let elementType be TypedArrayElementType(typedArray).
// 8. Return GetModifySetValueInBuffer(buffer, indexedPosition, elementType, v, op).
//
// However, our impementation differs significantly from this, which is why these steps are
// just here for documentation purposes.
atomic_op! { atomic_op! {
/// [`Atomics.add ( typedArray, index, value )`][spec] /// [`Atomics.add ( typedArray, index, value )`][spec]
/// ///
@ -383,28 +386,32 @@ impl Atomics {
/// [`Atomics.wait ( typedArray, index, value, timeout )`][spec] /// [`Atomics.wait ( typedArray, index, value, timeout )`][spec]
/// ///
/// [spec]: https://tc39.es/ecma262/#sec-atomics.wait /// [spec]: https://tc39.es/ecma262/#sec-atomics.wait
// TODO: rewrite this to support Atomics.waitAsync
fn wait(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> { fn wait(_: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
let array = args.get_or_undefined(0); let array = args.get_or_undefined(0);
let index = args.get_or_undefined(1); let index = args.get_or_undefined(1);
let value = args.get_or_undefined(2); let value = args.get_or_undefined(2);
let timeout = args.get_or_undefined(3); let timeout = args.get_or_undefined(3);
// 1. Let buffer be ? ValidateIntegerTypedArray(typedArray, true). // 1. Let taRecord be ? ValidateIntegerTypedArray(typedArray, true).
let ii = validate_integer_typed_array(array, true)?; let (ta, buf_len) = validate_integer_typed_array(array, true)?;
let buffer = ii.viewed_array_buffer().as_buffer();
// 2. Let buffer be taRecord.[[Object]].[[ViewedArrayBuffer]].
// 2. If IsSharedArrayBuffer(buffer) is false, throw a TypeError exception. // 2. If IsSharedArrayBuffer(buffer) is false, throw a TypeError exception.
let BufferRef::SharedBuffer(buffer) = buffer else { let buffer = match ta.borrow().data.viewed_array_buffer() {
BufferObject::SharedBuffer(buf) => buf.clone(),
BufferObject::Buffer(_) => {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("cannot use `ArrayBuffer` for an atomic wait") .with_message("cannot use `ArrayBuffer` for an atomic wait")
.into()); .into())
}
}; };
// 3. Let indexedPosition be ? ValidateAtomicAccess(typedArray, index). // 3. Let indexedPosition be ? ValidateAtomicAccess(typedArray, index).
let offset = validate_atomic_access(&ii, index, context)?; let access = validate_atomic_access(&ta, buf_len, index, context)?;
// spec expects the evaluation of this first, then the timeout. // spec expects the evaluation of this first, then the timeout.
let value = if ii.kind() == TypedArrayKind::BigInt64 { let value = if access.kind == TypedArrayKind::BigInt64 {
// 4. If typedArray.[[TypedArrayName]] is "BigInt64Array", let v be ? ToBigInt64(value). // 4. If typedArray.[[TypedArrayName]] is "BigInt64Array", let v be ? ToBigInt64(value).
value.to_big_int64(context)? value.to_big_int64(context)?
} else { } else {
@ -435,11 +442,23 @@ impl Atomics {
// SAFETY: the validity of `addr` is verified by our call to `validate_atomic_access`. // SAFETY: the validity of `addr` is verified by our call to `validate_atomic_access`.
let result = unsafe { let result = unsafe {
if ii.kind() == TypedArrayKind::BigInt64 { if access.kind == TypedArrayKind::BigInt64 {
futex::wait(&buffer, offset, value, timeout)? futex::wait(
&buffer.borrow().data,
buf_len,
access.byte_offset,
value,
timeout,
)?
} else { } else {
// value must fit into `i32` since it came from an `i32` above. // value must fit into `i32` since it came from an `i32` above.
futex::wait(&buffer, offset, value as i32, timeout)? futex::wait(
&buffer.borrow().data,
buf_len,
access.byte_offset,
value as i32,
timeout,
)?
} }
}; };
@ -460,8 +479,8 @@ impl Atomics {
let count = args.get_or_undefined(2); let count = args.get_or_undefined(2);
// 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index, true). // 1. Let indexedPosition be ? ValidateAtomicAccessOnIntegerTypedArray(typedArray, index, true).
let ii = validate_integer_typed_array(array, true)?; let (ta, buf_len) = validate_integer_typed_array(array, true)?;
let offset = validate_atomic_access(&ii, index, context)?; let access = validate_atomic_access(&ta, buf_len, index, context)?;
// 2. If count is undefined, then // 2. If count is undefined, then
let count = if count.is_undefined() { let count = if count.is_undefined() {
@ -481,11 +500,12 @@ impl Atomics {
// 4. Let buffer be typedArray.[[ViewedArrayBuffer]]. // 4. Let buffer be typedArray.[[ViewedArrayBuffer]].
// 5. Let block be buffer.[[ArrayBufferData]]. // 5. Let block be buffer.[[ArrayBufferData]].
// 6. If IsSharedArrayBuffer(buffer) is false, return +0𝔽. // 6. If IsSharedArrayBuffer(buffer) is false, return +0𝔽.
let BufferRef::SharedBuffer(shared) = ii.viewed_array_buffer().as_buffer() else { let ta = ta.borrow();
let BufferRef::SharedBuffer(shared) = ta.data.viewed_array_buffer().as_buffer() else {
return Ok(0.into()); return Ok(0.into());
}; };
let count = futex::notify(&shared, offset, count)?; let count = futex::notify(&shared, access.byte_offset, count)?;
// 12. Let n be the number of elements in S. // 12. Let n be the number of elements in S.
// 13. Return 𝔽(n). // 13. Return 𝔽(n).
@ -493,41 +513,33 @@ impl Atomics {
} }
} }
/// [`ValidateIntegerTypedArray ( typedArray [ , waitable ] )`][spec] /// [`ValidateIntegerTypedArray ( typedArray, waitable )`][spec]
/// ///
/// [spec]: https://tc39.es/ecma262/#sec-validateintegertypedarray /// [spec]: https://tc39.es/ecma262/#sec-validateintegertypedarray
fn validate_integer_typed_array( fn validate_integer_typed_array(
array: &JsValue, array: &JsValue,
waitable: bool, waitable: bool,
) -> JsResult<GcRef<'_, TypedArray>> { ) -> JsResult<(JsObject<TypedArray>, usize)> {
// 1. If waitable is not present, set waitable to false. // 1. Let taRecord be ? ValidateTypedArray(typedArray, unordered).
// 2. Perform ? ValidateTypedArray(typedArray). // 2. NOTE: Bounds checking is not a synchronizing operation when typedArray's backing buffer is a growable SharedArrayBuffer.
let ii = array let ta_record = TypedArray::validate(array, Ordering::Relaxed)?;
.as_object()
.and_then(JsObject::downcast_ref::<TypedArray>)
.ok_or_else(|| JsNativeError::typ().with_message("value is not a typed array object"))?;
if ii.is_detached() {
return Err(JsNativeError::typ()
.with_message("Buffer of the typed array is detached")
.into());
}
// 3. Let buffer be typedArray.[[ViewedArrayBuffer]]. {
let array = ta_record.0.borrow();
// 3. If waitable is true, then
if waitable { if waitable {
// 4. If waitable is true, then
// a. If typedArray.[[TypedArrayName]] is neither "Int32Array" nor "BigInt64Array", throw a TypeError exception. // a. If typedArray.[[TypedArrayName]] is neither "Int32Array" nor "BigInt64Array", throw a TypeError exception.
if ![TypedArrayKind::Int32, TypedArrayKind::BigInt64].contains(&ii.kind()) { if ![TypedArrayKind::Int32, TypedArrayKind::BigInt64].contains(&array.data.kind()) {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("can only atomically wait using Int32 or BigInt64 arrays") .with_message("can only atomically wait using Int32 or BigInt64 arrays")
.into()); .into());
} }
} else { } else {
// 5. Else, // 4. Else,
// a. Let type be TypedArrayElementType(typedArray). // a. Let type be TypedArrayElementType(typedArray).
// b. If IsUnclampedIntegerElementType(type) is false and IsBigIntElementType(type) is // b. If IsUnclampedIntegerElementType(type) is false and IsBigIntElementType(type) is false, throw a TypeError exception.
// false, throw a TypeError exception. if !array.data.kind().supports_atomic_ops() {
if !ii.kind().supports_atomic_ops() {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message( .with_message(
"platform doesn't support atomic operations on the provided `TypedArray`", "platform doesn't support atomic operations on the provided `TypedArray`",
@ -535,21 +547,40 @@ fn validate_integer_typed_array(
.into()); .into());
} }
} }
}
// 5. Return taRecord.
Ok(ta_record)
}
// 6. Return buffer. struct AtomicAccess {
Ok(ii) byte_offset: usize,
kind: TypedArrayKind,
} }
/// [`ValidateAtomicAccess ( iieoRecord, requestIndex )`][spec] /// [`ValidateAtomicAccess ( taRecord, requestIndex )`][spec]
/// ///
/// [spec]: https://tc39.es/ecma262/#sec-validateatomicaccess /// [spec]: https://tc39.es/ecma262/#sec-validateatomicaccess
fn validate_atomic_access( fn validate_atomic_access(
array: &TypedArray, array: &JsObject<TypedArray>,
buf_len: usize,
request_index: &JsValue, request_index: &JsValue,
context: &mut Context, context: &mut Context,
) -> JsResult<usize> { ) -> JsResult<AtomicAccess> {
// 5. Let typedArray be taRecord.[[Object]].
let (length, kind, offset) = {
let array = array.borrow();
let array = &array.data;
// 1. Let length be typedArray.[[ArrayLength]]. // 1. Let length be typedArray.[[ArrayLength]].
let length = array.array_length(); // 6. Let elementSize be TypedArrayElementSize(typedArray).
// 7. Let offset be typedArray.[[ByteOffset]].
(
array.array_length(buf_len),
array.kind(),
array.byte_offset(),
)
};
// 2. Let accessIndex be ? ToIndex(requestIndex). // 2. Let accessIndex be ? ToIndex(requestIndex).
let access_index = request_index.to_index(context)?; let access_index = request_index.to_index(context)?;
@ -564,12 +595,10 @@ fn validate_atomic_access(
.into()); .into());
} }
// 5. Let elementSize be TypedArrayElementSize(typedArray). // 8. Return (accessIndex × elementSize) + offset.
let element_size = array.kind().element_size(); let offset = ((access_index * kind.element_size()) + offset) as usize;
Ok(AtomicAccess {
// 6. Let offset be typedArray.[[ByteOffset]]. byte_offset: offset,
let offset = array.byte_offset(); kind,
})
// 7. Return (accessIndex × elementSize) + offset.
Ok(((access_index * element_size) + offset) as usize)
} }

322
core/engine/src/builtins/dataview/mod.rs

@ -7,7 +7,7 @@
//! [spec]: https://tc39.es/ecma262/#sec-dataview-objects //! [spec]: https://tc39.es/ecma262/#sec-dataview-objects
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView //! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView
use std::mem; use std::{mem, sync::atomic::Ordering};
use crate::{ use crate::{
builtins::BuiltInObject, builtins::BuiltInObject,
@ -27,7 +27,7 @@ use bytemuck::{bytes_of, bytes_of_mut};
use super::{ use super::{
array_buffer::{ array_buffer::{
utils::{memcpy, SliceRef, SliceRefMut}, utils::{memcpy, BytesConstPtr, BytesMutPtr},
BufferObject, BufferObject,
}, },
typed_array::{self, TypedArrayElement}, typed_array::{self, TypedArrayElement},
@ -38,10 +38,61 @@ use super::{
#[derive(Debug, Clone, Trace, Finalize, JsData)] #[derive(Debug, Clone, Trace, Finalize, JsData)]
pub struct DataView { pub struct DataView {
pub(crate) viewed_array_buffer: BufferObject, pub(crate) viewed_array_buffer: BufferObject,
pub(crate) byte_length: u64, pub(crate) byte_length: Option<u64>,
pub(crate) byte_offset: u64, pub(crate) byte_offset: u64,
} }
impl DataView {
/// Abstract operation [`GetViewByteLength ( viewRecord )`][spec].
///
/// [spec]: https://tc39.es/ecma262/#sec-getviewbytelength
fn byte_length(&self, buf_byte_len: usize) -> u64 {
// 1. Assert: IsViewOutOfBounds(viewRecord) is false.
debug_assert!(!self.is_out_of_bounds(buf_byte_len));
// 2. Let view be viewRecord.[[Object]].
// 3. If view.[[ByteLength]] is not auto, return view.[[ByteLength]].
if let Some(byte_length) = self.byte_length {
return byte_length;
}
// 4. Assert: IsFixedLengthArrayBuffer(view.[[ViewedArrayBuffer]]) is false.
// 5. Let byteOffset be view.[[ByteOffset]].
// 6. Let byteLength be viewRecord.[[CachedBufferByteLength]].
// 7. Assert: byteLength is not detached.
// 8. Return byteLength - byteOffset.
buf_byte_len as u64 - self.byte_offset
}
/// Abstract operation [`IsViewOutOfBounds ( viewRecord )`][spec].
///
/// [spec]: https://tc39.es/ecma262/#sec-isviewoutofbounds
fn is_out_of_bounds(&self, buf_byte_len: usize) -> bool {
let buf_byte_len = buf_byte_len as u64;
// 1. Let view be viewRecord.[[Object]].
// 2. Let bufferByteLength be viewRecord.[[CachedBufferByteLength]].
// 3. Assert: IsDetachedBuffer(view.[[ViewedArrayBuffer]]) is true if and only if bufferByteLength is detached.
// 4. If bufferByteLength is detached, return true.
// handled by the caller
// 5. Let byteOffsetStart be view.[[ByteOffset]].
// 6. If view.[[ByteLength]] is auto, then
// a. Let byteOffsetEnd be bufferByteLength.
// 7. Else,
// a. Let byteOffsetEnd be byteOffsetStart + view.[[ByteLength]].
let byte_offset_end = self
.byte_length
.map_or(buf_byte_len, |byte_length| byte_length + self.byte_offset);
// 8. If byteOffsetStart > bufferByteLength or byteOffsetEnd > bufferByteLength, return true.
// 9. NOTE: 0-length DataViews are not considered out-of-bounds.
// 10. Return false.
self.byte_offset > buf_byte_len || byte_offset_end > buf_byte_len
}
}
impl IntrinsicObject for DataView { impl IntrinsicObject for DataView {
fn init(realm: &Realm) { fn init(realm: &Realm) {
let flag_attributes = Attribute::CONFIGURABLE | Attribute::NON_ENUMERABLE; let flag_attributes = Attribute::CONFIGURABLE | Attribute::NON_ENUMERABLE;
@ -120,7 +171,7 @@ impl BuiltInConstructor for DataView {
const STANDARD_CONSTRUCTOR: fn(&StandardConstructors) -> &StandardConstructor = const STANDARD_CONSTRUCTOR: fn(&StandardConstructors) -> &StandardConstructor =
StandardConstructors::data_view; StandardConstructors::data_view;
/// `25.3.2.1 DataView ( buffer [ , byteOffset [ , byteLength ] ] )` /// `DataView ( buffer [ , byteOffset [ , byteLength ] ] )`
/// ///
/// The `DataView` view provides a low-level interface for reading and writing multiple number /// The `DataView` view provides a low-level interface for reading and writing multiple number
/// types in a binary `ArrayBuffer`, without having to care about the platform's endianness. /// types in a binary `ArrayBuffer`, without having to care about the platform's endianness.
@ -136,93 +187,124 @@ impl BuiltInConstructor for DataView {
args: &[JsValue], args: &[JsValue],
context: &mut Context, context: &mut Context,
) -> JsResult<JsValue> { ) -> JsResult<JsValue> {
let byte_length = args.get_or_undefined(2);
// 1. If NewTarget is undefined, throw a TypeError exception. // 1. If NewTarget is undefined, throw a TypeError exception.
if new_target.is_undefined() { if new_target.is_undefined() {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("new target is undefined") .with_message("cannot call `DataView` constructor without `new`")
.into()); .into());
} }
let byte_len = args.get_or_undefined(2);
// 2. Perform ? RequireInternalSlot(buffer, [[ArrayBufferData]]). // 2. Perform ? RequireInternalSlot(buffer, [[ArrayBufferData]]).
let buffer_obj = args let buffer = args
.get_or_undefined(0) .get_or_undefined(0)
.as_object() .as_object()
.and_then(|o| o.clone().into_buffer_object().ok()) .and_then(|o| o.clone().into_buffer_object().ok())
.ok_or_else(|| JsNativeError::typ().with_message("buffer must be an ArrayBuffer"))?; .ok_or_else(|| JsNativeError::typ().with_message("buffer must be an ArrayBuffer"))?;
let (offset, view_byte_length) = {
let buffer = buffer_obj.as_buffer();
// 3. Let offset be ? ToIndex(byteOffset). // 3. Let offset be ? ToIndex(byteOffset).
let offset = args.get_or_undefined(1).to_index(context)?; let offset = args.get_or_undefined(1).to_index(context)?;
let (buf_byte_len, is_fixed_len) = {
let buffer = buffer.as_buffer();
// 4. If IsDetachedBuffer(buffer) is true, throw a TypeError exception. // 4. If IsDetachedBuffer(buffer) is true, throw a TypeError exception.
let Some(buffer) = buffer.data() else { let Some(slice) = buffer.bytes(Ordering::SeqCst) else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer is detached") .with_message("ArrayBuffer is detached")
.into()); .into());
}; };
// 5. Let bufferByteLength be buffer.[[ArrayBufferByteLength]]. // 5. Let bufferByteLength be ArrayBufferByteLength(buffer, seq-cst).
let buffer_byte_length = buffer.len() as u64; let buf_len = slice.len() as u64;
// 6. If offset > bufferByteLength, throw a RangeError exception. // 6. If offset > bufferByteLength, throw a RangeError exception.
if offset > buffer_byte_length { if offset > buf_len {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message("Start offset is outside the bounds of the buffer") .with_message("Start offset is outside the bounds of the buffer")
.into()); .into());
} }
// 7. If byteLength is undefined, then
let view_byte_length = if byte_length.is_undefined() { // 7. Let bufferIsFixedLength be IsFixedLengthArrayBuffer(buffer).
// a. Let viewByteLength be bufferByteLength - offset.
buffer_byte_length - offset (buf_len, buffer.is_fixed_len())
};
// 8. If byteLength is undefined, then
let view_byte_len = if byte_len.is_undefined() {
// a. If bufferIsFixedLength is true, then
// i. Let viewByteLength be bufferByteLength - offset.
// b. Else,
// i. Let viewByteLength be auto.
is_fixed_len.then_some(buf_byte_len - offset)
} else { } else {
// 8.a. Let viewByteLength be ? ToIndex(byteLength). // 9. Else,
let view_byte_length = byte_length.to_index(context)?; // a. Let viewByteLength be ? ToIndex(byteLength).
// 8.b. If offset + viewByteLength > bufferByteLength, throw a RangeError exception. let byte_len = byte_len.to_index(context)?;
if offset + view_byte_length > buffer_byte_length {
// b. If offset + viewByteLength > bufferByteLength, throw a RangeError exception.
if offset + byte_len > buf_byte_len {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message("Invalid data view length") .with_message("Invalid data view length")
.into()); .into());
} }
Some(byte_len)
view_byte_length
};
(offset, view_byte_length)
}; };
// 9. Let O be ? OrdinaryCreateFromConstructor(NewTarget, "%DataView.prototype%", « [[DataView]], [[ViewedArrayBuffer]], [[ByteLength]], [[ByteOffset]] »). // 10. Let O be ? OrdinaryCreateFromConstructor(NewTarget, "%DataView.prototype%",
// « [[DataView]], [[ViewedArrayBuffer]], [[ByteLength]], [[ByteOffset]] »).
let prototype = let prototype =
get_prototype_from_constructor(new_target, StandardConstructors::data_view, context)?; get_prototype_from_constructor(new_target, StandardConstructors::data_view, context)?;
// 10. If IsDetachedBuffer(buffer) is true, throw a TypeError exception. // 11. If IsDetachedBuffer(buffer) is true, throw a TypeError exception.
if buffer_obj.as_buffer().is_detached() { // 12. Set bufferByteLength to ArrayBufferByteLength(buffer, seq-cst).
let Some(buf_byte_len) = buffer
.as_buffer()
.bytes(Ordering::SeqCst)
.map(|s| s.len() as u64)
else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer can't be detached") .with_message("ArrayBuffer can't be detached")
.into()); .into());
};
// 13. If offset > bufferByteLength, throw a RangeError exception.
if offset > buf_byte_len {
return Err(JsNativeError::range()
.with_message("DataView offset outside of buffer array bounds")
.into());
}
// 14. If byteLength is not undefined, then
if let Some(view_byte_len) = view_byte_len.filter(|_| !byte_len.is_undefined()) {
// a. If offset + viewByteLength > bufferByteLength, throw a RangeError exception.
if offset + view_byte_len > buf_byte_len {
return Err(JsNativeError::range()
.with_message("DataView offset outside of buffer array bounds")
.into());
}
} }
let obj = JsObject::from_proto_and_data_with_shared_shape( let obj = JsObject::from_proto_and_data_with_shared_shape(
context.root_shape(), context.root_shape(),
prototype, prototype,
Self { Self {
// 11. Set O.[[ViewedArrayBuffer]] to buffer. // 15. Set O.[[ViewedArrayBuffer]] to buffer.
viewed_array_buffer: buffer_obj, viewed_array_buffer: buffer,
// 12. Set O.[[ByteLength]] to viewByteLength. // 16. Set O.[[ByteLength]] to viewByteLength.
byte_length: view_byte_length, byte_length: view_byte_len,
// 13. Set O.[[ByteOffset]] to offset. // 17. Set O.[[ByteOffset]] to offset.
byte_offset: offset, byte_offset: offset,
}, },
); );
// 14. Return O. // 18. Return O.
Ok(obj.into()) Ok(obj.into())
} }
} }
impl DataView { impl DataView {
/// `25.3.4.1 get DataView.prototype.buffer` /// `get DataView.prototype.buffer`
/// ///
/// The buffer accessor property represents the `ArrayBuffer` or `SharedArrayBuffer` referenced /// The buffer accessor property represents the `ArrayBuffer` or `SharedArrayBuffer` referenced
/// by the `DataView` at construction time. /// by the `DataView` at construction time.
@ -251,7 +333,7 @@ impl DataView {
Ok(buffer.into()) Ok(buffer.into())
} }
/// `25.3.4.1 get DataView.prototype.byteLength` /// `get DataView.prototype.byteLength`
/// ///
/// The `byteLength` accessor property represents the length (in bytes) of the dataview. /// The `byteLength` accessor property represents the length (in bytes) of the dataview.
/// ///
@ -268,26 +350,32 @@ impl DataView {
) -> JsResult<JsValue> { ) -> JsResult<JsValue> {
// 1. Let O be the this value. // 1. Let O be the this value.
// 2. Perform ? RequireInternalSlot(O, [[DataView]]). // 2. Perform ? RequireInternalSlot(O, [[DataView]]).
// 3. Assert: O has a [[ViewedArrayBuffer]] internal slot.
let view = this let view = this
.as_object() .as_object()
.and_then(JsObject::downcast_ref::<Self>) .and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?; .ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?;
// 3. Assert: O has a [[ViewedArrayBuffer]] internal slot.
// 4. Let buffer be O.[[ViewedArrayBuffer]]. // 4. Let viewRecord be MakeDataViewWithBufferWitnessRecord(O, seq-cst).
// 5. If IsViewOutOfBounds(viewRecord) is true, throw a TypeError exception.
let buffer = view.viewed_array_buffer.as_buffer(); let buffer = view.viewed_array_buffer.as_buffer();
// 5. If IsDetachedBuffer(buffer) is true, throw a TypeError exception. let Some(slice) = buffer
if buffer.is_detached() { .bytes(Ordering::SeqCst)
.filter(|s| !view.is_out_of_bounds(s.len()))
else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer is detached") .with_message("view out of bounds for its inner buffer")
.into()); .into());
} };
// 6. Let size be O.[[ByteLength]].
let size = view.byte_length; // 6. Let size be GetViewByteLength(viewRecord).
let size = view.byte_length(slice.len());
// 7. Return 𝔽(size). // 7. Return 𝔽(size).
Ok(size.into()) Ok(size.into())
} }
/// `25.3.4.1 get DataView.prototype.byteOffset` /// `get DataView.prototype.byteOffset`
/// ///
/// The `byteOffset` accessor property represents the offset (in bytes) of this view from the /// The `byteOffset` accessor property represents the offset (in bytes) of this view from the
/// start of its `ArrayBuffer` or `SharedArrayBuffer`. /// start of its `ArrayBuffer` or `SharedArrayBuffer`.
@ -309,22 +397,28 @@ impl DataView {
.as_object() .as_object()
.and_then(JsObject::downcast_ref::<Self>) .and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?; .ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?;
// 3. Assert: O has a [[ViewedArrayBuffer]] internal slot. // 3. Assert: O has a [[ViewedArrayBuffer]] internal slot.
// 4. Let buffer be O.[[ViewedArrayBuffer]].
let buffer = view.viewed_array_buffer.as_buffer(); let buffer = view.viewed_array_buffer.as_buffer();
// 5. If IsDetachedBuffer(buffer) is true, throw a TypeError exception. // 4. Let viewRecord be MakeDataViewWithBufferWitnessRecord(O, seq-cst).
if buffer.is_detached() { // 5. If IsViewOutOfBounds(viewRecord) is true, throw a TypeError exception.
if buffer
.bytes(Ordering::SeqCst)
.filter(|b| !view.is_out_of_bounds(b.len()))
.is_none()
{
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("Buffer is detached") .with_message("data view is outside the bounds of its inner buffer")
.into()); .into());
} }
// 6. Let offset be O.[[ByteOffset]]. // 6. Let offset be O.[[ByteOffset]].
let offset = view.byte_offset; let offset = view.byte_offset;
// 7. Return 𝔽(offset). // 7. Return 𝔽(offset).
Ok(offset.into()) Ok(offset.into())
} }
/// `25.3.1.1 GetViewValue ( view, requestIndex, isLittleEndian, type )` /// `GetViewValue ( view, requestIndex, isLittleEndian, type )`
/// ///
/// The abstract operation `GetViewValue` takes arguments view, requestIndex, `isLittleEndian`, /// The abstract operation `GetViewValue` takes arguments view, requestIndex, `isLittleEndian`,
/// and type. It is used by functions on `DataView` instances to retrieve values from the /// and type. It is used by functions on `DataView` instances to retrieve values from the
@ -346,48 +440,56 @@ impl DataView {
.as_object() .as_object()
.and_then(JsObject::downcast_ref::<Self>) .and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?; .ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?;
// 3. Let getIndex be ? ToIndex(requestIndex). // 3. Let getIndex be ? ToIndex(requestIndex).
let get_index = request_index.to_index(context)?; let get_index = request_index.to_index(context)?;
// 4. Set isLittleEndian to ! ToBoolean(isLittleEndian). // 4. Set isLittleEndian to ToBoolean(isLittleEndian).
let is_little_endian = is_little_endian.to_boolean(); let is_little_endian = is_little_endian.to_boolean();
// 5. Let buffer be view.[[ViewedArrayBuffer]]. // 6. Let viewRecord be MakeDataViewWithBufferWitnessRecord(view, unordered).
// 7. NOTE: Bounds checking is not a synchronizing operation when view's backing buffer is a growable SharedArrayBuffer.
// 8. If IsViewOutOfBounds(viewRecord) is true, throw a TypeError exception.
let buffer = view.viewed_array_buffer.as_buffer(); let buffer = view.viewed_array_buffer.as_buffer();
let Some(data) = buffer
// 6. If IsDetachedBuffer(buffer) is true, throw a TypeError exception. .bytes(Ordering::Relaxed)
let Some(data) = buffer.data() else { .filter(|buf| !view.is_out_of_bounds(buf.len()))
else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer is detached") .with_message("view out of bounds for its inner buffer")
.into()); .into());
}; };
// 7. Let viewOffset be view.[[ByteOffset]]. // 5. Let viewOffset be view.[[ByteOffset]].
let view_offset = view.byte_offset; let view_offset = view.byte_offset;
// 8. Let viewSize be view.[[ByteLength]]. // 9. Let viewSize be GetViewByteLength(viewRecord).
let view_size = view.byte_length; let view_size = view.byte_length(data.len());
// 9. Let elementSize be the Element Size value specified in Table 72 for Element Type type. // 10. Let elementSize be the Element Size value specified in Table 71 for Element Type type.
let element_size = mem::size_of::<T>() as u64; let element_size = mem::size_of::<T>() as u64;
// 10. If getIndex + elementSize > viewSize, throw a RangeError exception. // 11. If getIndex + elementSize > viewSize, throw a RangeError exception.
if get_index + element_size > view_size { if get_index + element_size > view_size {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message("Offset is outside the bounds of the DataView") .with_message("Offset is outside the bounds of the DataView")
.into()); .into());
} }
// 11. Let bufferIndex be getIndex + viewOffset. // 12. Let bufferIndex be getIndex + viewOffset.
let buffer_index = (get_index + view_offset) as usize; let buffer_index = (get_index + view_offset) as usize;
// 12. Return GetValueFromBuffer(buffer, bufferIndex, type, false, Unordered, isLittleEndian). let src = data.subslice(buffer_index..);
debug_assert!(src.len() >= mem::size_of::<T>());
// 13. Return GetValueFromBuffer(view.[[ViewedArrayBuffer]], bufferIndex, type, false, unordered, isLittleEndian).
// SAFETY: All previous checks ensure the element fits in the buffer. // SAFETY: All previous checks ensure the element fits in the buffer.
let value: TypedArrayElement = unsafe { let value: TypedArrayElement = unsafe {
let mut value = T::zeroed(); let mut value = T::zeroed();
memcpy( memcpy(
data.subslice(buffer_index..), src.as_ptr(),
SliceRefMut::Slice(bytes_of_mut(&mut value)), BytesMutPtr::Bytes(bytes_of_mut(&mut value).as_mut_ptr()),
mem::size_of::<T>(), mem::size_of::<T>(),
); );
@ -402,7 +504,7 @@ impl DataView {
Ok(value.into()) Ok(value.into())
} }
/// `25.3.4.5 DataView.prototype.getBigInt64 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getBigInt64 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getBigInt64()` method gets a signed 64-bit integer (long long) at the specified byte /// The `getBigInt64()` method gets a signed 64-bit integer (long long) at the specified byte
/// offset from the start of the `DataView`. /// offset from the start of the `DataView`.
@ -425,7 +527,7 @@ impl DataView {
Self::get_view_value::<i64>(this, byte_offset, is_little_endian, context) Self::get_view_value::<i64>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.6 DataView.prototype.getBigUint64 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getBigUint64 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getBigUint64()` method gets an unsigned 64-bit integer (unsigned long long) at the /// The `getBigUint64()` method gets an unsigned 64-bit integer (unsigned long long) at the
/// specified byte offset from the start of the `DataView`. /// specified byte offset from the start of the `DataView`.
@ -448,7 +550,7 @@ impl DataView {
Self::get_view_value::<u64>(this, byte_offset, is_little_endian, context) Self::get_view_value::<u64>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.7 DataView.prototype.getBigUint64 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getBigUint64 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getFloat32()` method gets a signed 32-bit float (float) at the specified byte offset /// The `getFloat32()` method gets a signed 32-bit float (float) at the specified byte offset
/// from the start of the `DataView`. /// from the start of the `DataView`.
@ -471,7 +573,7 @@ impl DataView {
Self::get_view_value::<f32>(this, byte_offset, is_little_endian, context) Self::get_view_value::<f32>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.8 DataView.prototype.getFloat64 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getFloat64 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getFloat64()` method gets a signed 64-bit float (double) at the specified byte offset /// The `getFloat64()` method gets a signed 64-bit float (double) at the specified byte offset
/// from the start of the `DataView`. /// from the start of the `DataView`.
@ -494,7 +596,7 @@ impl DataView {
Self::get_view_value::<f64>(this, byte_offset, is_little_endian, context) Self::get_view_value::<f64>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.9 DataView.prototype.getInt8 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getInt8 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getInt8()` method gets a signed 8-bit integer (byte) at the specified byte offset /// The `getInt8()` method gets a signed 8-bit integer (byte) at the specified byte offset
/// from the start of the `DataView`. /// from the start of the `DataView`.
@ -517,7 +619,7 @@ impl DataView {
Self::get_view_value::<i8>(this, byte_offset, is_little_endian, context) Self::get_view_value::<i8>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.10 DataView.prototype.getInt16 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getInt16 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getInt16()` method gets a signed 16-bit integer (short) at the specified byte offset /// The `getInt16()` method gets a signed 16-bit integer (short) at the specified byte offset
/// from the start of the `DataView`. /// from the start of the `DataView`.
@ -540,7 +642,7 @@ impl DataView {
Self::get_view_value::<i16>(this, byte_offset, is_little_endian, context) Self::get_view_value::<i16>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.11 DataView.prototype.getInt32 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getInt32 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getInt32()` method gets a signed 32-bit integer (long) at the specified byte offset /// The `getInt32()` method gets a signed 32-bit integer (long) at the specified byte offset
/// from the start of the `DataView`. /// from the start of the `DataView`.
@ -563,7 +665,7 @@ impl DataView {
Self::get_view_value::<i32>(this, byte_offset, is_little_endian, context) Self::get_view_value::<i32>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.12 DataView.prototype.getUint8 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getUint8 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getUint8()` method gets an unsigned 8-bit integer (unsigned byte) at the specified /// The `getUint8()` method gets an unsigned 8-bit integer (unsigned byte) at the specified
/// byte offset from the start of the `DataView`. /// byte offset from the start of the `DataView`.
@ -586,7 +688,7 @@ impl DataView {
Self::get_view_value::<u8>(this, byte_offset, is_little_endian, context) Self::get_view_value::<u8>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.13 DataView.prototype.getUint16 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getUint16 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getUint16()` method gets an unsigned 16-bit integer (unsigned short) at the specified /// The `getUint16()` method gets an unsigned 16-bit integer (unsigned short) at the specified
/// byte offset from the start of the `DataView`. /// byte offset from the start of the `DataView`.
@ -609,7 +711,7 @@ impl DataView {
Self::get_view_value::<u16>(this, byte_offset, is_little_endian, context) Self::get_view_value::<u16>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.4.14 DataView.prototype.getUint32 ( byteOffset [ , littleEndian ] )` /// `DataView.prototype.getUint32 ( byteOffset [ , littleEndian ] )`
/// ///
/// The `getUint32()` method gets an unsigned 32-bit integer (unsigned long) at the specified /// The `getUint32()` method gets an unsigned 32-bit integer (unsigned long) at the specified
/// byte offset from the start of the `DataView`. /// byte offset from the start of the `DataView`.
@ -632,7 +734,7 @@ impl DataView {
Self::get_view_value::<u32>(this, byte_offset, is_little_endian, context) Self::get_view_value::<u32>(this, byte_offset, is_little_endian, context)
} }
/// `25.3.1.1 SetViewValue ( view, requestIndex, isLittleEndian, type )` /// `SetViewValue ( view, requestIndex, isLittleEndian, type )`
/// ///
/// The abstract operation `SetViewValue` takes arguments view, requestIndex, `isLittleEndian`, /// The abstract operation `SetViewValue` takes arguments view, requestIndex, `isLittleEndian`,
/// type, and value. It is used by functions on `DataView` instances to store values into the /// type, and value. It is used by functions on `DataView` instances to store values into the
@ -655,44 +757,55 @@ impl DataView {
.as_object() .as_object()
.and_then(JsObject::downcast_ref::<Self>) .and_then(JsObject::downcast_ref::<Self>)
.ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?; .ok_or_else(|| JsNativeError::typ().with_message("`this` is not a DataView"))?;
// 3. Let getIndex be ? ToIndex(requestIndex). // 3. Let getIndex be ? ToIndex(requestIndex).
let get_index = request_index.to_index(context)?; let get_index = request_index.to_index(context)?;
// 4. If ! IsBigIntElementType(type) is true, let numberValue be ? ToBigInt(value). // 4. If IsBigIntElementType(type) is true, let numberValue be ? ToBigInt(value).
// 5. Otherwise, let numberValue be ? ToNumber(value). // 5. Otherwise, let numberValue be ? ToNumber(value).
let value = T::from_js_value(value, context)?; let value = T::from_js_value(value, context)?;
// 6. Set isLittleEndian to ! ToBoolean(isLittleEndian). // 6. Set isLittleEndian to ToBoolean(isLittleEndian).
let is_little_endian = is_little_endian.to_boolean(); let is_little_endian = is_little_endian.to_boolean();
// 7. Let buffer be view.[[ViewedArrayBuffer]].
// 8. Let viewRecord be MakeDataViewWithBufferWitnessRecord(view, unordered).
// 9. NOTE: Bounds checking is not a synchronizing operation when view's backing buffer is a growable SharedArrayBuffer.
// 10. If IsViewOutOfBounds(viewRecord) is true, throw a TypeError exception.
let mut buffer = view.viewed_array_buffer.as_buffer_mut(); let mut buffer = view.viewed_array_buffer.as_buffer_mut();
// 8. If IsDetachedBuffer(buffer) is true, throw a TypeError exception. let Some(mut data) = buffer
let Some(mut data) = buffer.data_mut() else { .bytes(Ordering::Relaxed)
.filter(|buf| !view.is_out_of_bounds(buf.len()))
else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer is detached") .with_message("view out of bounds for its inner buffer")
.into()); .into());
}; };
// 9. Let viewOffset be view.[[ByteOffset]]. // 11. Let viewSize be GetViewByteLength(viewRecord).
let view_size = view.byte_length(data.len());
// 7. Let viewOffset be view.[[ByteOffset]].
let view_offset = view.byte_offset; let view_offset = view.byte_offset;
// 10. Let viewSize be view.[[ByteLength]]. // 12. Let elementSize be the Element Size value specified in Table 71 for Element Type type.
let view_size = view.byte_length; let elem_size = mem::size_of::<T>();
// 11. Let elementSize be the Element Size value specified in Table 72 for Element Type type. // 13. If getIndex + elementSize > viewSize, throw a RangeError exception.
// 12. If getIndex + elementSize > viewSize, throw a RangeError exception. if get_index + elem_size as u64 > view_size {
if get_index + mem::size_of::<T>() as u64 > view_size {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message("Offset is outside the bounds of DataView") .with_message("Offset is outside the bounds of DataView")
.into()); .into());
} }
// 13. Let bufferIndex be getIndex + viewOffset. // 14. Let bufferIndex be getIndex + viewOffset.
let buffer_index = (get_index + view_offset) as usize; let buffer_index = (get_index + view_offset) as usize;
// 14. Return SetValueInBuffer(buffer, bufferIndex, type, numberValue, false, Unordered, isLittleEndian). let mut target = data.subslice_mut(buffer_index..);
debug_assert!(target.len() >= mem::size_of::<T>());
// 15. Perform SetValueInBuffer(view.[[ViewedArrayBuffer]], bufferIndex, type, numberValue, false, unordered, isLittleEndian).
// SAFETY: All previous checks ensure the element fits in the buffer. // SAFETY: All previous checks ensure the element fits in the buffer.
unsafe { unsafe {
let value = if is_little_endian { let value = if is_little_endian {
@ -702,16 +815,17 @@ impl DataView {
}; };
memcpy( memcpy(
SliceRef::Slice(bytes_of(&value)), BytesConstPtr::Bytes(bytes_of(&value).as_ptr()),
data.subslice_mut(buffer_index..), target.as_ptr(),
mem::size_of::<T>(), mem::size_of::<T>(),
); );
} }
// 16. Return undefined.
Ok(JsValue::undefined()) Ok(JsValue::undefined())
} }
/// `25.3.4.15 DataView.prototype.setBigInt64 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setBigInt64 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setBigInt64()` method stores a signed 64-bit integer (long long) value at the /// The `setBigInt64()` method stores a signed 64-bit integer (long long) value at the
/// specified byte offset from the start of the `DataView`. /// specified byte offset from the start of the `DataView`.
@ -735,7 +849,7 @@ impl DataView {
Self::set_view_value::<i64>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<i64>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.16 DataView.prototype.setBigUint64 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setBigUint64 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setBigUint64()` method stores an unsigned 64-bit integer (unsigned long long) value at /// The `setBigUint64()` method stores an unsigned 64-bit integer (unsigned long long) value at
/// the specified byte offset from the start of the `DataView`. /// the specified byte offset from the start of the `DataView`.
@ -759,7 +873,7 @@ impl DataView {
Self::set_view_value::<u64>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<u64>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.17 DataView.prototype.setFloat32 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setFloat32 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setFloat32()` method stores a signed 32-bit float (float) value at the specified byte /// The `setFloat32()` method stores a signed 32-bit float (float) value at the specified byte
/// offset from the start of the `DataView`. /// offset from the start of the `DataView`.
@ -783,7 +897,7 @@ impl DataView {
Self::set_view_value::<f32>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<f32>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.18 DataView.prototype.setFloat64 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setFloat64 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setFloat64()` method stores a signed 64-bit float (double) value at the specified byte /// The `setFloat64()` method stores a signed 64-bit float (double) value at the specified byte
/// offset from the start of the `DataView`. /// offset from the start of the `DataView`.
@ -807,7 +921,7 @@ impl DataView {
Self::set_view_value::<f64>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<f64>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.19 DataView.prototype.setInt8 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setInt8 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setInt8()` method stores a signed 8-bit integer (byte) value at the specified byte /// The `setInt8()` method stores a signed 8-bit integer (byte) value at the specified byte
/// offset from the start of the `DataView`. /// offset from the start of the `DataView`.
@ -831,7 +945,7 @@ impl DataView {
Self::set_view_value::<i8>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<i8>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.20 DataView.prototype.setInt16 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setInt16 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setInt16()` method stores a signed 16-bit integer (short) value at the specified byte /// The `setInt16()` method stores a signed 16-bit integer (short) value at the specified byte
/// offset from the start of the `DataView`. /// offset from the start of the `DataView`.
@ -855,7 +969,7 @@ impl DataView {
Self::set_view_value::<i16>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<i16>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.21 DataView.prototype.setInt32 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setInt32 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setInt32()` method stores a signed 32-bit integer (long) value at the specified byte /// The `setInt32()` method stores a signed 32-bit integer (long) value at the specified byte
/// offset from the start of the `DataView`. /// offset from the start of the `DataView`.
@ -879,7 +993,7 @@ impl DataView {
Self::set_view_value::<i32>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<i32>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.22 DataView.prototype.setUint8 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setUint8 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setUint8()` method stores an unsigned 8-bit integer (byte) value at the specified byte /// The `setUint8()` method stores an unsigned 8-bit integer (byte) value at the specified byte
/// offset from the start of the `DataView`. /// offset from the start of the `DataView`.
@ -903,7 +1017,7 @@ impl DataView {
Self::set_view_value::<u8>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<u8>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.23 DataView.prototype.setUint16 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setUint16 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setUint16()` method stores an unsigned 16-bit integer (unsigned short) value at the /// The `setUint16()` method stores an unsigned 16-bit integer (unsigned short) value at the
/// specified byte offset from the start of the `DataView`. /// specified byte offset from the start of the `DataView`.
@ -927,7 +1041,7 @@ impl DataView {
Self::set_view_value::<u16>(this, byte_offset, is_little_endian, value, context) Self::set_view_value::<u16>(this, byte_offset, is_little_endian, value, context)
} }
/// `25.3.4.24 DataView.prototype.setUint32 ( byteOffset, value [ , littleEndian ] )` /// `DataView.prototype.setUint32 ( byteOffset, value [ , littleEndian ] )`
/// ///
/// The `setUint32()` method stores an unsigned 32-bit integer (unsigned long) value at the /// The `setUint32()` method stores an unsigned 32-bit integer (unsigned long) value at the
/// specified byte offset from the start of the `DataView`. /// specified byte offset from the start of the `DataView`.

1962
core/engine/src/builtins/typed_array/builtin.rs

File diff suppressed because it is too large Load Diff

4
core/engine/src/builtins/typed_array/mod.rs

@ -511,11 +511,11 @@ pub(crate) enum TypedArrayElement {
} }
impl TypedArrayElement { impl TypedArrayElement {
/// Converts the element into its extended bytes representation as a `u64`. /// Converts the element into its extended bytes representation as an `u64`.
/// ///
/// This is guaranteed to never fail, since all numeric types supported by JS are less than /// This is guaranteed to never fail, since all numeric types supported by JS are less than
/// 8 bytes long. /// 8 bytes long.
pub(crate) fn to_bytes(self) -> u64 { pub(crate) fn to_bits(self) -> u64 {
#[allow(clippy::cast_lossless)] #[allow(clippy::cast_lossless)]
match self { match self {
TypedArrayElement::Int8(num) => num as u64, TypedArrayElement::Int8(num) => num as u64,

303
core/engine/src/builtins/typed_array/object.rs

@ -1,6 +1,6 @@
//! This module implements the `TypedArray` exotic object. //! This module implements the `TypedArray` exotic object.
use std::sync::atomic; use std::sync::atomic::{self, Ordering};
use crate::{ use crate::{
builtins::{array_buffer::BufferObject, Number}, builtins::{array_buffer::BufferObject, Number},
@ -13,7 +13,7 @@ use crate::{
JsData, JsObject, JsData, JsObject,
}, },
property::{PropertyDescriptor, PropertyKey}, property::{PropertyDescriptor, PropertyKey},
Context, JsResult, JsString, JsValue, Context, JsNativeError, JsResult, JsString, JsValue,
}; };
use boa_gc::{Finalize, Trace}; use boa_gc::{Finalize, Trace};
use boa_macros::utf16; use boa_macros::utf16;
@ -32,8 +32,8 @@ pub struct TypedArray {
viewed_array_buffer: BufferObject, viewed_array_buffer: BufferObject,
kind: TypedArrayKind, kind: TypedArrayKind,
byte_offset: u64, byte_offset: u64,
byte_length: u64, byte_length: Option<u64>,
array_length: u64, array_length: Option<u64>,
} }
impl JsData for TypedArray { impl JsData for TypedArray {
@ -58,8 +58,8 @@ impl TypedArray {
viewed_array_buffer: BufferObject, viewed_array_buffer: BufferObject,
kind: TypedArrayKind, kind: TypedArrayKind,
byte_offset: u64, byte_offset: u64,
byte_length: u64, byte_length: Option<u64>,
array_length: u64, array_length: Option<u64>,
) -> Self { ) -> Self {
Self { Self {
viewed_array_buffer, viewed_array_buffer,
@ -70,16 +70,42 @@ impl TypedArray {
} }
} }
/// Abstract operation `IsDetachedBuffer ( arrayBuffer )`. /// Returns `true` if the typed array has an automatic array length.
/// pub(crate) fn is_auto_length(&self) -> bool {
/// Check if `[[ArrayBufferData]]` is null. self.array_length.is_none()
/// }
/// More information:
/// - [ECMAScript reference][spec] /// Abstract operation [`IsTypedArrayOutOfBounds ( taRecord )`][spec].
/// ///
/// [spec]: https://tc39.es/ecma262/#sec-isdetachedbuffer /// [spec]: https://tc39.es/ecma262/sec-istypedarrayoutofbounds
pub(crate) fn is_detached(&self) -> bool { pub(crate) fn is_out_of_bounds(&self, buf_byte_len: usize) -> bool {
self.viewed_array_buffer.as_buffer().is_detached() // Checks when allocating the buffer ensure the length fits inside an `u64`.
let buf_byte_len = buf_byte_len as u64;
// 1. Let O be taRecord.[[Object]].
// 2. Let bufferByteLength be taRecord.[[CachedBufferByteLength]].
// 3. Assert: IsDetachedBuffer(O.[[ViewedArrayBuffer]]) is true if and only if bufferByteLength is detached.
// 4. If bufferByteLength is detached, return true.
// Handled by the caller
// 5. Let byteOffsetStart be O.[[ByteOffset]].
let byte_start = self.byte_offset;
// 6. If O.[[ArrayLength]] is auto, then
// a. Let byteOffsetEnd be bufferByteLength.
let byte_end = self.array_length.map_or(buf_byte_len, |arr_len| {
// 7. Else,
// a. Let elementSize be TypedArrayElementSize(O).
let element_size = self.kind.element_size();
// b. Let byteOffsetEnd be byteOffsetStart + O.[[ArrayLength]] × elementSize.
byte_start + arr_len * element_size
});
// 8. If byteOffsetStart > bufferByteLength or byteOffsetEnd > bufferByteLength, return true.
// 9. NOTE: 0-length TypedArrays are not considered out-of-bounds.
// 10. Return false.
byte_start > buf_byte_len || byte_end > buf_byte_len
} }
/// Get the `TypedArray` object's byte offset. /// Get the `TypedArray` object's byte offset.
@ -99,16 +125,133 @@ impl TypedArray {
&self.viewed_array_buffer &self.viewed_array_buffer
} }
/// [`TypedArrayByteLength ( taRecord )`][spec].
///
/// Get the `TypedArray` object's byte length. /// Get the `TypedArray` object's byte length.
///
/// [spec]: https://tc39.es/ecma262/#sec-typedarraybytelength
#[must_use] #[must_use]
pub const fn byte_length(&self) -> u64 { pub fn byte_length(&self, buf_byte_len: usize) -> u64 {
self.byte_length // 1. If IsTypedArrayOutOfBounds(taRecord) is true, return 0.
if self.is_out_of_bounds(buf_byte_len) {
return 0;
}
// 2. Let length be TypedArrayLength(taRecord).
let length = self.array_length(buf_byte_len);
// 3. If length = 0, return 0.
if length == 0 {
return 0;
}
// 4. Let O be taRecord.[[Object]].
// 5. If O.[[ByteLength]] is not auto, return O.[[ByteLength]].
if let Some(byte_length) = self.byte_length {
return byte_length;
}
// 6. Let elementSize be TypedArrayElementSize(O).
let elem_size = self.kind.element_size();
// 7. Return length × elementSize.
// Should not overflow thanks to the checks at creation time.
length * elem_size
} }
/// [`TypedArrayLength ( taRecord )`][spec].
///
/// Get the `TypedArray` object's array length. /// Get the `TypedArray` object's array length.
///
/// [spec]: https://tc39.es/ecma262/#sec-typedarraylength
#[must_use] #[must_use]
pub const fn array_length(&self) -> u64 { pub fn array_length(&self, buf_byte_len: usize) -> u64 {
self.array_length // 1. Assert: IsTypedArrayOutOfBounds(taRecord) is false.
debug_assert!(!self.is_out_of_bounds(buf_byte_len));
let buf_byte_len = buf_byte_len as u64;
// 2. Let O be taRecord.[[Object]].
// 3. If O.[[ArrayLength]] is not auto, return O.[[ArrayLength]].
if let Some(array_length) = self.array_length {
return array_length;
}
// 4. Assert: IsFixedLengthArrayBuffer(O.[[ViewedArrayBuffer]]) is false.
// 5. Let byteOffset be O.[[ByteOffset]].
let byte_offset = self.byte_offset;
// 6. Let elementSize be TypedArrayElementSize(O).
let elem_size = self.kind.element_size();
// 7. Let byteLength be taRecord.[[CachedBufferByteLength]].
// 8. Assert: byteLength is not detached.
// 9. Return floor((byteLength - byteOffset) / elementSize).
(buf_byte_len - byte_offset) / elem_size
}
/// Abstract operation [`ValidateTypedArray ( O, order )`][spec].
///
/// [spec]: https://tc39.es/ecma262/sec-validatetypedarray
pub(crate) fn validate(this: &JsValue, order: Ordering) -> JsResult<(JsObject<Self>, usize)> {
// 1. Perform ? RequireInternalSlot(O, [[TypedArrayName]]).
let obj = this
.as_object()
.and_then(|o| o.clone().downcast::<Self>().ok())
.ok_or_else(|| {
JsNativeError::typ().with_message("`this` is not a typed array object")
})?;
let len = {
let array = obj.borrow();
let buffer = array.data.viewed_array_buffer().as_buffer();
// 2. Assert: O has a [[ViewedArrayBuffer]] internal slot.
// 3. Let taRecord be MakeTypedArrayWithBufferWitnessRecord(O, order).
// 4. If IsTypedArrayOutOfBounds(taRecord) is true, throw a TypeError exception.
let Some(buf) = buffer
.bytes(order)
.filter(|buf| !array.data.is_out_of_bounds(buf.len()))
else {
return Err(JsNativeError::typ()
.with_message("typed array is outside the bounds of its inner buffer")
.into());
};
buf.len()
};
// 5. Return taRecord.
Ok((obj, len))
}
/// Validates `index` to be in bounds for the inner buffer of this `TypedArray`.
///
/// Note: if this is only used for bounds checking, it is recommended to use
/// the `Ordering::Relaxed` ordering to get the buffer slice.
pub(crate) fn validate_index(&self, index: f64, buf_len: usize) -> Option<u64> {
// 2. If IsIntegralNumber(index) is false, return false.
if index.is_nan() || index.is_infinite() || index.fract() != 0.0 {
return None;
}
// 3. If index is -0𝔽, return false.
if index == 0.0 && index.is_sign_negative() {
return None;
}
// 6. If IsTypedArrayOutOfBounds(taRecord) is true, return false.
if self.is_out_of_bounds(buf_len) {
return None;
}
// 7. Let length be TypedArrayLength(taRecord).
let length = self.array_length(buf_len);
// 8. If ℝ(index) < 0 or ℝ(index) ≥ length, return false.
if index < 0.0 || index >= length as f64 {
return None;
}
// 9. Return true.
Some(index as u64)
} }
} }
@ -393,24 +536,32 @@ pub(crate) fn typed_array_exotic_own_property_keys(
.downcast_ref::<TypedArray>() .downcast_ref::<TypedArray>()
.expect("TypedArray exotic method should only be callable from TypedArray objects"); .expect("TypedArray exotic method should only be callable from TypedArray objects");
// 1. Let keys be a new empty List. // 1. Let taRecord be MakeTypedArrayWithBufferWitnessRecord(O, seq-cst).
let mut keys = if inner.is_detached() { // 2. Let keys be a new empty List.
vec![] // 3. If IsTypedArrayOutOfBounds(taRecord) is false, then
} else { let mut keys = match inner
// 2. If IsDetachedBuffer(O.[[ViewedArrayBuffer]]) is false, then .viewed_array_buffer
// a. For each integer i starting with 0 such that i < O.[[ArrayLength]], in ascending order, do .as_buffer()
// i. Add ! ToString(𝔽(i)) as the last element of keys. .bytes(Ordering::SeqCst)
(0..inner.array_length()).map(PropertyKey::from).collect() {
Some(buf) if !inner.is_out_of_bounds(buf.len()) => {
// a. Let length be TypedArrayLength(taRecord).
let length = inner.array_length(buf.len());
// b. For each integer i such that 0 ≤ i < length, in ascending order, do
// i. Append ! ToString(𝔽(i)) to keys.
(0..length).map(PropertyKey::from).collect()
}
_ => Vec::new(),
}; };
// 3. For each own property key P of O such that Type(P) is String and P is not an array index, in ascending chronological order of property creation, do // 4. For each own property key P of O such that P is a String and P is not an integer index, in ascending chronological order of property creation, do
// a. Add P as the last element of keys. // a. Append P to keys.
// // 5. For each own property key P of O such that P is a Symbol, in ascending chronological order of property creation, do
// 4. For each own property key P of O such that Type(P) is Symbol, in ascending chronological order of property creation, do // a. Append P to keys.
// a. Add P as the last element of keys.
keys.extend(obj.properties.shape.keys()); keys.extend(obj.properties.shape.keys());
// 5. Return keys. // 6. Return keys.
Ok(keys) Ok(keys)
} }
@ -421,40 +572,38 @@ pub(crate) fn typed_array_exotic_own_property_keys(
/// ///
/// [spec]: https://tc39.es/ecma262/sec-typedarraygetelement /// [spec]: https://tc39.es/ecma262/sec-typedarraygetelement
fn typed_array_get_element(obj: &JsObject, index: f64) -> Option<JsValue> { fn typed_array_get_element(obj: &JsObject, index: f64) -> Option<JsValue> {
// 1. If ! IsValidIntegerIndex(O, index) is false, return undefined.
if !is_valid_integer_index(obj, index) {
return None;
}
let inner = obj let inner = obj
.downcast_ref::<TypedArray>() .downcast_ref::<TypedArray>()
.expect("Must be an TypedArray object"); .expect("Must be an TypedArray object");
let buffer = inner.viewed_array_buffer(); let buffer = inner.viewed_array_buffer();
let buffer = buffer.as_buffer(); let buffer = buffer.as_buffer();
let buffer = buffer
.data() // 1. If IsValidIntegerIndex(O, index) is false, return undefined.
.expect("already checked that it's not detached"); let Some(buffer) = buffer.bytes(Ordering::Relaxed) else {
return None;
};
let Some(index) = inner.validate_index(index, buffer.len()) else {
return None;
};
// 2. Let offset be O.[[ByteOffset]]. // 2. Let offset be O.[[ByteOffset]].
let offset = inner.byte_offset(); let offset = inner.byte_offset();
// 3. Let arrayTypeName be the String value of O.[[TypedArrayName]]. // 3. Let elementSize be TypedArrayElementSize(O).
// 6. Let elementType be the Element Type value in Table 73 for arrayTypeName. let size = inner.kind.element_size();
let elem_type = inner.kind();
// 4. Let elementSize be the Element Size value specified in Table 73 for arrayTypeName.
let size = elem_type.element_size();
// 5. Let indexedPosition be (ℝ(index) × elementSize) + offset. // 4. Let byteIndexInBuffer be (ℝ(index) × elementSize) + offset.
let indexed_position = ((index as u64 * size) + offset) as usize; let byte_index = ((index * size) + offset) as usize;
// 7. Return GetValueFromBuffer(O.[[ViewedArrayBuffer]], indexedPosition, elementType, true, Unordered). // 5. Let elementType be TypedArrayElementType(O).
let elem_type = inner.kind();
// 6. Return GetValueFromBuffer(O.[[ViewedArrayBuffer]], byteIndexInBuffer, elementType, true, unordered).
// SAFETY: The TypedArray object guarantees that the buffer is aligned. // SAFETY: The TypedArray object guarantees that the buffer is aligned.
// The call to `is_valid_integer_index` guarantees that the index is in-bounds. // The call to `is_valid_integer_index` guarantees that the index is in-bounds.
let value = unsafe { let value = unsafe {
buffer buffer
.subslice(indexed_position..) .subslice(byte_index..)
.get_value(elem_type, atomic::Ordering::Relaxed) .get_value(elem_type, atomic::Ordering::Relaxed)
}; };
@ -473,49 +622,47 @@ pub(crate) fn typed_array_set_element(
value: &JsValue, value: &JsValue,
context: &mut InternalMethodContext<'_>, context: &mut InternalMethodContext<'_>,
) -> JsResult<()> { ) -> JsResult<()> {
let obj_borrow = obj.borrow(); let obj = obj
let inner = obj_borrow .clone()
.downcast_ref::<TypedArray>() .downcast::<TypedArray>()
.expect("TypedArray exotic method should only be callable from TypedArray objects"); .expect("function can only be called for typed array objects");
// b. Let arrayTypeName be the String value of O.[[TypedArrayName]].
// e. Let elementType be the Element Type value in Table 73 for arrayTypeName.
let elem_type = obj.borrow().data.kind();
// 1. If O.[[ContentType]] is BigInt, let numValue be ? ToBigInt(value). // 1. If O.[[ContentType]] is BigInt, let numValue be ? ToBigInt(value).
// 2. Otherwise, let numValue be ? ToNumber(value). // 2. Otherwise, let numValue be ? ToNumber(value).
let value = inner.kind().get_element(value, context)?; let value = elem_type.get_element(value, context)?;
if !is_valid_integer_index(obj, index) { // 3. If IsValidIntegerIndex(O, index) is true, then
let array = obj.borrow();
let mut buffer = array.data.viewed_array_buffer().as_buffer_mut();
let Some(mut buffer) = buffer.bytes(Ordering::Relaxed) else {
return Ok(()); return Ok(());
} };
let Some(index) = array.data.validate_index(index, buffer.len()) else {
return Ok(());
};
// 3. If ! IsValidIntegerIndex(O, index) is true, then
// a. Let offset be O.[[ByteOffset]]. // a. Let offset be O.[[ByteOffset]].
let offset = inner.byte_offset(); let offset = array.data.byte_offset();
// b. Let arrayTypeName be the String value of O.[[TypedArrayName]].
// e. Let elementType be the Element Type value in Table 73 for arrayTypeName.
let elem_type = inner.kind();
// c. Let elementSize be the Element Size value specified in Table 73 for arrayTypeName. // b. Let elementSize be TypedArrayElementSize(O).
let size = elem_type.element_size(); let size = elem_type.element_size();
// d. Let indexedPosition be (ℝ(index) × elementSize) + offset. // c. Let byteIndexInBuffer be (ℝ(index) × elementSize) + offset.
let indexed_position = ((index as u64 * size) + offset) as usize; let byte_index = ((index * size) + offset) as usize;
let buffer = inner.viewed_array_buffer();
let mut buffer = buffer.as_buffer_mut();
let mut buffer = buffer
.data_mut()
.expect("already checked that it's not detached");
// f. Perform SetValueInBuffer(O.[[ViewedArrayBuffer]], indexedPosition, elementType, numValue, true, Unordered).
// e. Perform SetValueInBuffer(O.[[ViewedArrayBuffer]], byteIndexInBuffer, elementType, numValue, true, unordered).
// SAFETY: The TypedArray object guarantees that the buffer is aligned. // SAFETY: The TypedArray object guarantees that the buffer is aligned.
// The call to `is_valid_integer_index` guarantees that the index is in-bounds. // The call to `validate_index` guarantees that the index is in-bounds.
unsafe { unsafe {
buffer buffer
.subslice_mut(indexed_position..) .subslice_mut(byte_index..)
.set_value(value, atomic::Ordering::Relaxed); .set_value(value, atomic::Ordering::Relaxed);
} }
// 4. Return NormalCompletion(undefined). // 4. Return unused.
Ok(()) Ok(())
} }

2
core/engine/src/context/hooks.rs

@ -210,7 +210,7 @@ pub trait HostHooks {
/// exhaust the virtual memory address space and to reduce interoperability risk. /// exhaust the virtual memory address space and to reduce interoperability risk.
/// ///
/// ///
/// [specification]: https://tc39.es/ecma262/multipage/structured-data.html#sec-resizable-arraybuffer-guidelines /// [specification]: https://tc39.es/ecma262/#sec-resizable-arraybuffer-guidelines
fn max_buffer_size(&self, _context: &mut Context) -> u64 { fn max_buffer_size(&self, _context: &mut Context) -> u64 {
1_610_612_736 // 1.5 GiB 1_610_612_736 // 1.5 GiB
} }

5
core/engine/src/object/builtins/jsarraybuffer.rs

@ -61,6 +61,7 @@ impl JsArrayBuffer {
.constructor() .constructor()
.into(), .into(),
byte_length as u64, byte_length as u64,
None,
context, context,
)?; )?;
@ -236,7 +237,7 @@ impl JsArrayBuffer {
#[inline] #[inline]
#[must_use] #[must_use]
pub fn data(&self) -> Option<GcRef<'_, [u8]>> { pub fn data(&self) -> Option<GcRef<'_, [u8]>> {
GcRef::try_map(self.inner.borrow(), |o| o.data.data()) GcRef::try_map(self.inner.borrow(), |o| o.data.bytes())
} }
/// Get a mutable reference to the [`JsArrayBuffer`]'s data. /// Get a mutable reference to the [`JsArrayBuffer`]'s data.
@ -269,7 +270,7 @@ impl JsArrayBuffer {
#[inline] #[inline]
#[must_use] #[must_use]
pub fn data_mut(&self) -> Option<GcRefMut<'_, Object<ArrayBuffer>, [u8]>> { pub fn data_mut(&self) -> Option<GcRefMut<'_, Object<ArrayBuffer>, [u8]>> {
GcRefMut::try_map(self.inner.borrow_mut(), |o| o.data.data_mut()) GcRefMut::try_map(self.inner.borrow_mut(), |o| o.data.bytes_mut())
} }
} }

94
core/engine/src/object/builtins/jsdataview.rs

@ -1,10 +1,7 @@
//! A Rust API wrapper for Boa's `DataView` Builtin ECMAScript Object //! A Rust API wrapper for Boa's `DataView` Builtin ECMAScript Object
use crate::{ use crate::{
builtins::{array_buffer::BufferObject, DataView}, builtins::{array_buffer::BufferObject, DataView},
context::intrinsics::StandardConstructors, object::{JsArrayBuffer, JsObject, JsObjectType},
object::{
internal_methods::get_prototype_from_constructor, JsArrayBuffer, JsObject, JsObjectType,
},
value::TryFromJs, value::TryFromJs,
Context, JsNativeError, JsResult, JsValue, Context, JsNativeError, JsResult, JsValue,
}; };
@ -55,66 +52,101 @@ impl From<JsObject<DataView>> for JsDataView {
impl JsDataView { impl JsDataView {
/// Create a new `JsDataView` object from an existing `JsArrayBuffer`. /// Create a new `JsDataView` object from an existing `JsArrayBuffer`.
pub fn from_js_array_buffer( pub fn from_js_array_buffer(
array_buffer: JsArrayBuffer, buffer: JsArrayBuffer,
offset: Option<u64>, offset: Option<u64>,
byte_length: Option<u64>, byte_len: Option<u64>,
context: &mut Context, context: &mut Context,
) -> JsResult<Self> { ) -> JsResult<Self> {
let (byte_offset, byte_length) = { let offset = offset.unwrap_or_default();
let buffer = array_buffer.borrow();
let provided_offset = offset.unwrap_or(0_u64); let (buf_byte_len, is_fixed_len) = {
let buffer = buffer.borrow();
let buffer = &buffer.data;
// Check if buffer is detached. // 4. If IsDetachedBuffer(buffer) is true, throw a TypeError exception.
if buffer.data.is_detached() { let Some(slice) = buffer.bytes() else {
return Err(JsNativeError::typ() return Err(JsNativeError::typ()
.with_message("ArrayBuffer is detached") .with_message("ArrayBuffer is detached")
.into()); .into());
}; };
let array_buffer_length = buffer.data.len() as u64; // 5. Let bufferByteLength be ArrayBufferByteLength(buffer, seq-cst).
let buf_len = slice.len() as u64;
if provided_offset > array_buffer_length { // 6. If offset > bufferByteLength, throw a RangeError exception.
if offset > buf_len {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message("Provided offset is outside the bounds of the buffer") .with_message("Start offset is outside the bounds of the buffer")
.into()); .into());
} }
let view_byte_length = if let Some(provided_length) = byte_length { // 7. Let bufferIsFixedLength be IsFixedLengthArrayBuffer(buffer).
// Check that the provided length and offset does not exceed the bounds of the ArrayBuffer (buf_len, buffer.is_fixed_len())
if provided_offset + provided_length > array_buffer_length { };
// 8. If byteLength is undefined, then
let view_byte_len = if let Some(byte_len) = byte_len {
// 9. Else,
// a. Let viewByteLength be ? ToIndex(byteLength).
// b. If offset + viewByteLength > bufferByteLength, throw a RangeError exception.
if offset + byte_len > buf_byte_len {
return Err(JsNativeError::range() return Err(JsNativeError::range()
.with_message("Invalid data view length") .with_message("Invalid data view length")
.into()); .into());
} }
provided_length Some(byte_len)
} else { } else {
array_buffer_length - provided_offset // a. If bufferIsFixedLength is true, then
// i. Let viewByteLength be bufferByteLength - offset.
// b. Else,
// i. Let viewByteLength be auto.
is_fixed_len.then_some(buf_byte_len - offset)
}; };
(provided_offset, view_byte_length) // 10. Let O be ? OrdinaryCreateFromConstructor(NewTarget, "%DataView.prototype%",
// « [[DataView]], [[ViewedArrayBuffer]], [[ByteLength]], [[ByteOffset]] »).
let prototype = context.intrinsics().constructors().data_view().prototype();
// 11. If IsDetachedBuffer(buffer) is true, throw a TypeError exception.
// 12. Set bufferByteLength to ArrayBufferByteLength(buffer, seq-cst).
let Some(buf_byte_len) = buffer.borrow().data.bytes().map(|s| s.len() as u64) else {
return Err(JsNativeError::typ()
.with_message("ArrayBuffer is detached")
.into());
}; };
let constructor = context // 13. If offset > bufferByteLength, throw a RangeError exception.
.intrinsics() if offset > buf_byte_len {
.constructors() return Err(JsNativeError::range()
.data_view() .with_message("DataView offset outside of buffer array bounds")
.constructor() .into());
.into(); }
let prototype = // 14. If byteLength is not undefined, then
get_prototype_from_constructor(&constructor, StandardConstructors::data_view, context)?; if let Some(view_byte_len) = view_byte_len.filter(|_| byte_len.is_some()) {
// a. If offset + viewByteLength > bufferByteLength, throw a RangeError exception.
if offset + view_byte_len > buf_byte_len {
return Err(JsNativeError::range()
.with_message("DataView offset outside of buffer array bounds")
.into());
}
}
let obj = JsObject::new( let obj = JsObject::new(
context.root_shape(), context.root_shape(),
prototype, prototype,
DataView { DataView {
viewed_array_buffer: BufferObject::Buffer(array_buffer.into()), // 15. Set O.[[ViewedArrayBuffer]] to buffer.
byte_length, viewed_array_buffer: BufferObject::Buffer(buffer.into()),
byte_offset, // 16. Set O.[[ByteLength]] to viewByteLength.
byte_length: view_byte_len,
// 17. Set O.[[ByteOffset]] to offset.
byte_offset: offset,
}, },
); );
// 18. Return O.
Ok(Self { inner: obj }) Ok(Self { inner: obj })
} }

5
core/engine/src/object/builtins/jssharedarraybuffer.rs

@ -7,7 +7,7 @@ use crate::{
Context, JsResult, JsValue, Context, JsResult, JsValue,
}; };
use boa_gc::{Finalize, Trace}; use boa_gc::{Finalize, Trace};
use std::ops::Deref; use std::{ops::Deref, sync::atomic::Ordering};
/// `JsSharedArrayBuffer` provides a wrapper for Boa's implementation of the ECMAScript `ArrayBuffer` object /// `JsSharedArrayBuffer` provides a wrapper for Boa's implementation of the ECMAScript `ArrayBuffer` object
#[derive(Debug, Clone, Trace, Finalize)] #[derive(Debug, Clone, Trace, Finalize)]
@ -42,6 +42,7 @@ impl JsSharedArrayBuffer {
.constructor() .constructor()
.into(), .into(),
byte_length as u64, byte_length as u64,
None,
context, context,
)?; )?;
@ -83,7 +84,7 @@ impl JsSharedArrayBuffer {
#[inline] #[inline]
#[must_use] #[must_use]
pub fn byte_length(&self) -> usize { pub fn byte_length(&self) -> usize {
self.borrow().data.len() self.borrow().data.len(Ordering::SeqCst)
} }
/// Gets the raw buffer of this `JsSharedArrayBuffer`. /// Gets the raw buffer of this `JsSharedArrayBuffer`.

1
test262_config.toml

@ -9,7 +9,6 @@ features = [
"FinalizationRegistry", "FinalizationRegistry",
"IsHTMLDDA", "IsHTMLDDA",
"resizable-arraybuffer",
"symbols-as-weakmap-keys", "symbols-as-weakmap-keys",
"intl-normative-optional", "intl-normative-optional",
"Intl.DisplayNames", "Intl.DisplayNames",

Loading…
Cancel
Save