Skip to content

Commit

Permalink
Enable sm test suite and fix ephemeron bug
Browse files Browse the repository at this point in the history
  • Loading branch information
jedel1043 committed Jan 4, 2025
1 parent 13344ce commit ce95477
Show file tree
Hide file tree
Showing 11 changed files with 238 additions and 121 deletions.
2 changes: 1 addition & 1 deletion .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"program": "${workspaceFolder}/target/debug/boa_tester.exe"
},
"program": "${workspaceFolder}/target/debug/boa_tester",
"args": ["run", "-s", "${input:testPath}", "-vvv"],
"args": ["run", "-s", "${input:testPath}", "-vvv", "-d"],
"sourceLanguages": ["rust"],
"preLaunchTask": "Cargo Build boa_tester"
}
Expand Down
1 change: 1 addition & 0 deletions core/engine/src/builtins/array_buffer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,7 @@ impl BufferObject {

/// Gets the mutable buffer data of the object
#[inline]
#[track_caller]
pub(crate) fn as_buffer_mut(
&self,
) -> BufferRefMut<
Expand Down
41 changes: 41 additions & 0 deletions core/engine/src/builtins/array_buffer/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,19 @@ impl SliceRefMut<'_> {
}
}

/// Gets a subslice of this `SliceRefMut`.
pub(crate) fn subslice<I>(&self, index: I) -> SliceRef<'_>
where
I: SliceIndex<[u8], Output = [u8]> + SliceIndex<[AtomicU8], Output = [AtomicU8]>,
{
match self {
Self::Slice(buffer) => SliceRef::Slice(buffer.get(index).expect("index out of bounds")),
Self::AtomicSlice(buffer) => {
SliceRef::AtomicSlice(buffer.get(index).expect("index out of bounds"))
}
}
}

/// Gets a mutable subslice of this `SliceRefMut`.
pub(crate) fn subslice_mut<I>(&mut self, index: I) -> SliceRefMut<'_>
where
Expand Down Expand Up @@ -403,6 +416,34 @@ pub(crate) unsafe fn memcpy(src: BytesConstPtr, dest: BytesMutPtr, count: usize)
}
}

/// Copies `count` bytes from the position `from` to the position `to` in `buffer`, but always
/// copying from left to right.
///
///
/// # Safety
///
/// - `ptr` must be valid from the offset `ptr + from` for `count` reads of bytes.
/// - `ptr` must be valid from the offset `ptr + to` for `count` writes of bytes.
// This looks like a worse version of `memmove`... and it is exactly that...
// but it's the correct behaviour for a weird usage of `%TypedArray%.prototype.slice` so ¯\_(ツ)_/¯.
// Obviously don't use this if you need to implement something that requires a "proper" memmove.
pub(crate) unsafe fn memmove_naive(ptr: BytesMutPtr, from: usize, to: usize, count: usize) {
match ptr {
// SAFETY: The invariants of this operation are ensured by the caller of the function.
BytesMutPtr::Bytes(ptr) => unsafe {
for i in 0..count {
ptr::copy(ptr.add(from + i), ptr.add(to + i), 1);
}
},
// SAFETY: The invariants of this operation are ensured by the caller of the function.
BytesMutPtr::AtomicBytes(ptr) => unsafe {
let src = ptr.add(from);
let dest = ptr.add(to);
copy_shared_to_shared(src, dest, count);
},
}
}

/// Copies `count` bytes from the position `from` to the position `to` in `buffer`.
///
/// # Safety
Expand Down
138 changes: 80 additions & 58 deletions core/engine/src/builtins/typed_array/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use num_traits::Zero;
use super::{
object::typed_array_set_element, ContentType, TypedArray, TypedArrayKind, TypedArrayMarker,
};
use crate::value::JsVariant;
use crate::{builtins::array_buffer::utils::memmove_naive, value::JsVariant};
use crate::{
builtins::{
array::{find_via_predicate, ArrayIterator, Direction},
Expand Down Expand Up @@ -2048,8 +2048,8 @@ impl BuiltinTypedArray {

// a. Set taRecord to MakeTypedArrayWithBufferWitnessRecord(O, seq-cst).
// b. If IsTypedArrayOutOfBounds(taRecord) is true, throw a TypeError exception.
let src_buf_borrow = src_borrow.data.viewed_array_buffer().as_buffer();
let Some(src_buf) = src_buf_borrow
let mut src_buf_borrow = src_borrow.data.viewed_array_buffer().as_buffer_mut();
let Some(mut src_buf) = src_buf_borrow
.bytes(Ordering::SeqCst)
.filter(|s| !src_borrow.data.is_out_of_bounds(s.len()))
else {
Expand All @@ -2067,41 +2067,88 @@ impl BuiltinTypedArray {
// f. Let targetType be TypedArrayElementType(A).
let target_type = target_borrow.data.kind();

if src_type != target_type {
// h. Else,
drop(src_buf_borrow);
drop((src_borrow, target_borrow));

// i. Let n be 0.
// ii. Let k be startIndex.
// iii. Repeat, while k < endIndex,
let src = src.upcast();
let target = target.upcast();
for (n, k) in (start_index..end_index).enumerate() {
// 1. Let Pk be ! ToString(𝔽(k)).
// 2. Let kValue be ! Get(O, Pk).
let k_value = src.get(k, context).expect("Get cannot fail here");

// 3. Perform ! Set(A, ! ToString(𝔽(n)), kValue, true).
target
.set(n, k_value, true, context)
.expect("Set cannot fail here");

// 4. Set k to k + 1.
// 5. Set n to n + 1.
}

// 15. Return A.
return Ok(target.into());
}

// g. If srcType is targetType, then
if src_type == target_type {
{
let byte_count = count * src_type.element_size() as usize;
{
let byte_count = count * src_type.element_size() as usize;

// i. NOTE: The transfer must be performed in a manner that preserves the bit-level encoding of the source data.
// ii. Let srcBuffer be O.[[ViewedArrayBuffer]].
// iii. Let targetBuffer be A.[[ViewedArrayBuffer]].
let target_borrow = target_borrow;
let mut target_buf = target_borrow.data.viewed_array_buffer().as_buffer_mut();
let mut target_buf = target_buf
.bytes_with_len(byte_count)
.expect("newly created array cannot be detached");
// i. NOTE: The transfer must be performed in a manner that preserves the bit-level encoding of the source data.
// ii. Let srcBuffer be O.[[ViewedArrayBuffer]].
// iii. Let targetBuffer be A.[[ViewedArrayBuffer]].

// iv. Let elementSize be TypedArrayElementSize(O).
let element_size = src_type.element_size();

// iv. Let elementSize be TypedArrayElementSize(O).
let element_size = src_type.element_size();
// v. Let srcByteOffset be O.[[ByteOffset]].
let src_byte_offset = src_borrow.data.byte_offset();

// v. Let srcByteOffset be O.[[ByteOffset]].
let src_byte_offset = src_borrow.data.byte_offset();
// vi. Let srcByteIndex be (startIndex × elementSize) + srcByteOffset.
let src_byte_index = (start_index * element_size + src_byte_offset) as usize;

// vi. Let srcByteIndex be (startIndex × elementSize) + srcByteOffset.
let src_byte_index = (start_index * element_size + src_byte_offset) as usize;
// vii. Let targetByteIndex be A.[[ByteOffset]].
let target_byte_index = target_borrow.data.byte_offset() as usize;

// vii. Let targetByteIndex be A.[[ByteOffset]].
let target_byte_index = target_borrow.data.byte_offset() as usize;
// viii. Let endByteIndex be targetByteIndex + (countBytes × elementSize).
// Not needed by the impl.

// viii. Let endByteIndex be targetByteIndex + (countBytes × elementSize).
// Not needed by the impl.
// ix. Repeat, while targetByteIndex < endByteIndex,
// 1. Let value be GetValueFromBuffer(srcBuffer, srcByteIndex, uint8, true, unordered).
// 2. Perform SetValueInBuffer(targetBuffer, targetByteIndex, uint8, value, true, unordered).
// 3. Set srcByteIndex to srcByteIndex + 1.
// 4. Set targetByteIndex to targetByteIndex + 1.
if BufferObject::equals(
src_borrow.data.viewed_array_buffer(),
target_borrow.data.viewed_array_buffer(),
) {
// cannot borrow the target mutably (overlapping bytes), but we can move the data instead.

// ix. Repeat, while targetByteIndex < endByteIndex,
// 1. Let value be GetValueFromBuffer(srcBuffer, srcByteIndex, uint8, true, unordered).
// 2. Perform SetValueInBuffer(targetBuffer, targetByteIndex, uint8, value, true, unordered).
// 3. Set srcByteIndex to srcByteIndex + 1.
// 4. Set targetByteIndex to targetByteIndex + 1.
#[cfg(debug_assertions)]
{
assert!(src_buf.subslice_mut(src_byte_index..).len() >= byte_count);
assert!(src_buf.subslice_mut(target_byte_index..).len() >= byte_count);
}

// SAFETY: All previous checks put the copied bytes at least within the bounds of `src_buf`.
unsafe {
memmove_naive(
src_buf.as_ptr(),
src_byte_index,
target_byte_index,
byte_count,
);
}
} else {
let mut target_buf = target_borrow.data.viewed_array_buffer().as_buffer_mut();
let mut target_buf = target_buf
.bytes(Ordering::SeqCst)
.expect("newly created array cannot be detached");
let src = src_buf.subslice(src_byte_index..);
let mut target = target_buf.subslice_mut(target_byte_index..);

Expand All @@ -2118,36 +2165,11 @@ impl BuiltinTypedArray {
memcpy(src.as_ptr(), target.as_ptr(), byte_count);
}
}

// 15. Return A.
Ok(target.upcast().into())
} else {
// h. Else,
drop(src_buf_borrow);
drop((src_borrow, target_borrow));

// i. Let n be 0.
// ii. Let k be startIndex.
// iii. Repeat, while k < endIndex,
let src = src.upcast();
let target = target.upcast();
for (n, k) in (start_index..end_index).enumerate() {
// 1. Let Pk be ! ToString(𝔽(k)).
// 2. Let kValue be ! Get(O, Pk).
let k_value = src.get(k, context).expect("Get cannot fail here");

// 3. Perform ! Set(A, ! ToString(𝔽(n)), kValue, true).
target
.set(n, k_value, true, context)
.expect("Set cannot fail here");

// 4. Set k to k + 1.
// 5. Set n to n + 1.
}

// 15. Return A.
Ok(target.into())
}

drop(target_borrow);
// 15. Return A.
Ok(target.upcast().into())
}

/// `%TypedArray%.prototype.some ( callbackfn [ , thisArg ] )`
Expand Down
1 change: 1 addition & 0 deletions core/engine/src/environments/runtime/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,7 @@ impl Context {
/// # Panics
///
/// Panics if the environment or binding index are out of range.
#[track_caller]
pub(crate) fn get_binding(&mut self, locator: &BindingLocator) -> JsResult<Option<JsValue>> {
match locator.scope() {
BindingLocatorScope::GlobalObject => {
Expand Down
53 changes: 12 additions & 41 deletions core/gc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,23 +325,9 @@ impl Collector {
if node_ref.is_rooted() {
tracer.enqueue(*node);

while let Some(node) = tracer.next() {
// SAFETY: the gc heap object should be alive if there is a root.
let node_ref = unsafe { node.as_ref() };

if !node_ref.header.is_marked() {
node_ref.header.mark();

// SAFETY: if `GcBox::trace_inner()` has been called, then,
// this box must have been deemed as reachable via tracing
// from a root, which by extension means that value has not
// been dropped either.

let trace_fn = node_ref.trace_fn();

// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe { trace_fn(node, tracer) }
}
// SAFETY: all nodes must be valid as this phase cannot drop any node.
unsafe {
tracer.trace_until_empty();
}
} else if !node_ref.is_marked() {
strong_dead.push(*node);
Expand Down Expand Up @@ -378,14 +364,9 @@ impl Collector {
pending_ephemerons.push(*eph);
}

while let Some(node) = tracer.next() {
// SAFETY: node must be valid as this phase cannot drop any node.
let trace_fn = unsafe { node.as_ref() }.trace_fn();

// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
trace_fn(node, tracer);
}
// SAFETY: all nodes must be valid as this phase cannot drop any node.
unsafe {
tracer.trace_until_empty();
}
}

Expand All @@ -397,14 +378,9 @@ impl Collector {
// SAFETY: The garbage collector ensures that all nodes are valid.
unsafe { node_ref.trace(tracer) };

while let Some(node) = tracer.next() {
// SAFETY: node must be valid as this phase cannot drop any node.
let trace_fn = unsafe { node.as_ref() }.trace_fn();

// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
trace_fn(node, tracer);
}
// SAFETY: all nodes must be valid as this phase cannot drop any node.
unsafe {
tracer.trace_until_empty();
}
}

Expand All @@ -419,14 +395,9 @@ impl Collector {
// SAFETY: the garbage collector ensures `eph_ref` always points to valid data.
let is_key_marked = unsafe { !eph_ref.trace(tracer) };

while let Some(node) = tracer.next() {
// SAFETY: node must be valid as this phase cannot drop any node.
let trace_fn = unsafe { node.as_ref() }.trace_fn();

// SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable.
unsafe {
trace_fn(node, tracer);
}
// SAFETY: all nodes must be valid as this phase cannot drop any node.
unsafe {
tracer.trace_until_empty();
}

is_key_marked
Expand Down
44 changes: 43 additions & 1 deletion core/gc/src/test/weak.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ use std::{cell::Cell, rc::Rc};

use super::run_test;
use crate::{
force_collect, test::Harness, Ephemeron, Finalize, Gc, GcBox, GcRefCell, Trace, WeakGc,
force_collect, internals::EphemeronBox, test::Harness, Ephemeron, Finalize, Gc, GcBox,
GcRefCell, Trace, WeakGc,
};

#[test]
Expand Down Expand Up @@ -290,3 +291,44 @@ fn eph_gc_finalizer() {
assert_eq!(val.inner.get(), 1);
});
}

#[test]
fn eph_strong_self_reference() {
type Inner = GcRefCell<(Option<TestCell>, Option<TestCell>)>;
#[derive(Trace, Finalize, Clone)]
struct TestCell {
inner: Gc<Inner>,
}
run_test(|| {
let root = TestCell {
inner: Gc::new(GcRefCell::new((None, None))),
};
let root_size = size_of::<GcBox<Inner>>();

Harness::assert_exact_bytes_allocated(root_size);

let watched = Gc::new(0);
let watched_size = size_of::<GcBox<i32>>();

{
let eph = Ephemeron::new(&watched, root.clone());
let eph_size = size_of::<EphemeronBox<Gc<i32>, TestCell>>();

root.inner.borrow_mut().0 = Some(root.clone());
root.inner.borrow_mut().1 = Some(root.clone());

force_collect();

assert!(eph.value().is_some());
Harness::assert_exact_bytes_allocated(root_size + eph_size + watched_size);
}

force_collect();

drop(watched);

force_collect();

Harness::assert_exact_bytes_allocated(root_size);
});
}
Loading

0 comments on commit ce95477

Please sign in to comment.