0
0
Fork 0
mirror of https://github.com/denoland/rusty_v8.git synced 2025-01-12 17:09:28 -05:00

Refactor C++ shared pointer wrappers (#355)

* Add `SharedPtr` as a nullable sibling to `SharedRef`.
* Add `Borrow`, `AsRef` and `AsMut` implementations as appropriate.
* `SharedRef<T>` now derefs to `T` rather than to `UnsafeCell<T>`.
* `array_buffer::BackingStore` now derefs to `[Cell<u8>]`.
This commit is contained in:
Bert Belder 2020-04-20 19:34:48 +02:00
parent d3a6c1403b
commit fc582316db
No known key found for this signature in database
GPG key ID: 7A77887B2E2ED461
5 changed files with 251 additions and 167 deletions

View file

@ -1,15 +1,17 @@
// Copyright 2019-2020 the Deno authors. All rights reserved. MIT license.
use std::cell::Cell;
use std::ffi::c_void;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ptr::null_mut;
use std::slice;
use crate::support::long;
use crate::support::Opaque;
use crate::support::Shared;
use crate::support::SharedPtrBase;
use crate::support::SharedRef;
use crate::support::UniquePtr;
use crate::support::UniqueRef;
use crate::ArrayBuffer;
use crate::InIsolate;
@ -49,35 +51,35 @@ extern "C" {
fn v8__BackingStore__DELETE(this: *mut BackingStore);
fn std__shared_ptr__v8__BackingStore__COPY(
ptr: *const SharedRef<BackingStore>,
) -> SharedRef<BackingStore>;
ptr: *const SharedPtrBase<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
unique: UniqueRef<BackingStore>,
) -> SharedRef<BackingStore>;
unique_ptr: UniquePtr<BackingStore>,
) -> SharedPtrBase<BackingStore>;
fn std__shared_ptr__v8__BackingStore__get(
ptr: *const SharedRef<BackingStore>,
ptr: *const SharedPtrBase<BackingStore>,
) -> *mut BackingStore;
fn std__shared_ptr__v8__BackingStore__reset(
ptr: *mut SharedRef<BackingStore>,
ptr: *mut SharedPtrBase<BackingStore>,
);
fn std__shared_ptr__v8__BackingStore__use_count(
ptr: *const SharedRef<BackingStore>,
ptr: *const SharedPtrBase<BackingStore>,
) -> long;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
ptr: *const SharedRef<Allocator>,
) -> SharedRef<Allocator>;
ptr: *const SharedPtrBase<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique: UniqueRef<Allocator>,
) -> SharedRef<Allocator>;
unique_ptr: UniquePtr<Allocator>,
) -> SharedPtrBase<Allocator>;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__get(
ptr: *const SharedRef<Allocator>,
ptr: *const SharedPtrBase<Allocator>,
) -> *mut Allocator;
fn std__shared_ptr__v8__ArrayBuffer__Allocator__reset(
ptr: *mut SharedRef<Allocator>,
ptr: *mut SharedPtrBase<Allocator>,
);
fn std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(
ptr: *const SharedRef<Allocator>,
ptr: *const SharedPtrBase<Allocator>,
) -> long;
}
@ -102,23 +104,23 @@ extern "C" {
pub struct Allocator(Opaque);
impl Shared for Allocator {
fn clone(ptr: *const SharedRef<Self>) -> SharedRef<Self> {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
}
fn from_unique(unique: UniqueRef<Self>) -> SharedRef<Self> {
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
unique,
unique_ptr,
)
}
}
fn deref(ptr: *const SharedRef<Self>) -> *mut Self {
fn get(ptr: &SharedPtrBase<Self>) -> *mut Self {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__get(ptr) }
}
fn reset(ptr: *mut SharedRef<Self>) {
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__reset(ptr) }
}
fn use_count(ptr: *const SharedRef<Self>) -> long {
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__use_count(ptr) }
}
}
@ -127,8 +129,8 @@ impl Shared for Allocator {
pub fn new_default_allocator() -> SharedRef<Allocator> {
unsafe {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewDefaultAllocator())
.make_shared()
}
.make_shared()
}
#[test]
@ -194,20 +196,13 @@ impl BackingStore {
}
impl Deref for BackingStore {
type Target = [u8];
type Target = [Cell<u8>];
/// Returns a [u8] slice refencing the data in the backing store.
fn deref(&self) -> &[u8] {
unsafe { slice::from_raw_parts(self.data() as *mut u8, self.byte_length()) }
}
}
impl DerefMut for BackingStore {
/// Returns a mutable [u8] slice refencing the data in the backing store.
fn deref_mut(&mut self) -> &mut [u8] {
unsafe {
slice::from_raw_parts_mut(self.data() as *mut u8, self.byte_length())
}
fn deref(&self) -> &Self::Target {
let data = self.data() as *mut Cell<u8>;
let len = self.byte_length();
unsafe { slice::from_raw_parts(data, len) }
}
}
@ -218,21 +213,21 @@ impl Drop for BackingStore {
}
impl Shared for BackingStore {
fn clone(ptr: *const SharedRef<Self>) -> SharedRef<Self> {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__BackingStore__COPY(ptr) }
}
fn from_unique(unique: UniqueRef<Self>) -> SharedRef<Self> {
fn from_unique_ptr(unique_ptr: UniquePtr<Self>) -> SharedPtrBase<Self> {
unsafe {
std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique)
std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique_ptr)
}
}
fn deref(ptr: *const SharedRef<Self>) -> *mut Self {
fn get(ptr: &SharedPtrBase<Self>) -> *mut Self {
unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
}
fn reset(ptr: *mut SharedRef<Self>) {
fn reset(ptr: &mut SharedPtrBase<Self>) {
unsafe { std__shared_ptr__v8__BackingStore__reset(ptr) }
}
fn use_count(ptr: *const SharedRef<Self>) -> long {
fn use_count(ptr: &SharedPtrBase<Self>) -> long {
unsafe { std__shared_ptr__v8__BackingStore__use_count(ptr) }
}
}

View file

@ -625,8 +625,9 @@ two_pointers_t std__shared_ptr__v8__BackingStore__COPY(
}
two_pointers_t std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
v8::BackingStore* ptr) {
return make_pod<two_pointers_t>(std::shared_ptr<v8::BackingStore>(ptr));
v8::BackingStore* unique_ptr) {
return make_pod<two_pointers_t>(
std::shared_ptr<v8::BackingStore>(unique_ptr));
}
v8::BackingStore* std__shared_ptr__v8__BackingStore__get(
@ -651,9 +652,9 @@ two_pointers_t std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(
two_pointers_t
std__shared_ptr__v8__ArrayBuffer__Allocator__CONVERT__std__unique_ptr(
v8::ArrayBuffer::Allocator* ptr) {
v8::ArrayBuffer::Allocator* unique_ptr) {
return make_pod<two_pointers_t>(
std::shared_ptr<v8::ArrayBuffer::Allocator>(ptr));
std::shared_ptr<v8::ArrayBuffer::Allocator>(unique_ptr));
}
v8::ArrayBuffer::Allocator* std__shared_ptr__v8__ArrayBuffer__Allocator__get(

View file

@ -158,6 +158,7 @@ pub use snapshot::OwnedStartupData;
pub use snapshot::SnapshotCreator;
pub use snapshot::StartupData;
pub use string::NewStringType;
pub use support::SharedPtr;
pub use support::SharedRef;
pub use support::UniquePtr;
pub use support::UniqueRef;

View file

@ -1,9 +1,13 @@
use std::cell::UnsafeCell;
use std::borrow::Borrow;
use std::borrow::BorrowMut;
use std::convert::AsMut;
use std::convert::AsRef;
use std::marker::PhantomData;
use std::mem::align_of;
use std::mem::forget;
use std::mem::needs_drop;
use std::mem::size_of;
use std::mem::take;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ptr::drop_in_place;
@ -23,16 +27,33 @@ pub type Opaque = [u8; 0];
/// Pointer to object allocated on the C++ heap. The pointer may be null.
#[repr(transparent)]
pub struct UniquePtr<T>(Option<UniqueRef<T>>);
pub struct UniquePtr<T: ?Sized>(Option<UniqueRef<T>>);
impl<T> UniquePtr<T> {
pub fn null() -> Self {
assert_unique_type_compatible::<Self, T>();
Self(None)
impl<T: ?Sized> UniquePtr<T> {
pub fn is_null(&self) -> bool {
self.0.is_none()
}
pub fn as_ref(&self) -> Option<&UniqueRef<T>> {
self.0.as_ref()
}
pub fn as_mut(&mut self) -> Option<&mut UniqueRef<T>> {
self.0.as_mut()
}
pub fn take(&mut self) -> Option<UniqueRef<T>> {
take(&mut self.0)
}
pub fn unwrap(self) -> UniqueRef<T> {
self.0.unwrap()
}
}
impl<T> UniquePtr<T> {
pub unsafe fn from_raw(ptr: *mut T) -> Self {
assert_unique_type_compatible::<Self, T>();
assert_unique_ptr_layout_compatible::<Self, T>();
Self(UniqueRef::try_from_raw(ptr))
}
@ -42,42 +63,40 @@ impl<T> UniquePtr<T> {
.map(|unique_ref| unique_ref.into_raw())
.unwrap_or_else(null_mut)
}
}
pub fn unwrap(self) -> UniqueRef<T> {
self.0.unwrap()
impl<T: Shared> UniquePtr<T> {
pub fn make_shared(self) -> SharedPtr<T> {
self.into()
}
}
impl<T> Default for UniquePtr<T> {
fn default() -> Self {
assert_unique_ptr_layout_compatible::<Self, T>();
Self(None)
}
}
impl<T> From<UniqueRef<T>> for UniquePtr<T> {
fn from(unique_ref: UniqueRef<T>) -> Self {
assert_unique_ptr_layout_compatible::<Self, T>();
Self(Some(unique_ref))
}
}
impl<T> Deref for UniquePtr<T> {
type Target = Option<UniqueRef<T>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for UniquePtr<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// Pointer to object allocated on the C++ heap. The pointer may not be null.
#[repr(transparent)]
pub struct UniqueRef<T>(NonNull<T>);
pub struct UniqueRef<T: ?Sized>(NonNull<T>);
impl<T> UniqueRef<T> {
unsafe fn try_from_raw(ptr: *mut T) -> Option<Self> {
assert_unique_type_compatible::<Self, T>();
assert_unique_ptr_layout_compatible::<Self, T>();
NonNull::new(ptr).map(Self)
}
pub unsafe fn from_raw(ptr: *mut T) -> Self {
assert_unique_ptr_layout_compatible::<Self, T>();
Self::try_from_raw(ptr).unwrap()
}
@ -86,37 +105,60 @@ impl<T> UniqueRef<T> {
forget(self);
ptr
}
}
pub fn make_shared(self) -> SharedRef<T>
where
T: Shared,
{
impl<T: Shared> UniqueRef<T> {
pub fn make_shared(self) -> SharedRef<T> {
self.into()
}
}
impl<T> Deref for UniqueRef<T> {
impl<T: ?Sized> Drop for UniqueRef<T> {
fn drop(&mut self) {
unsafe { drop_in_place(self.0.as_ptr()) }
}
}
impl<T: ?Sized> Deref for UniqueRef<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { self.0.as_ref() }
}
}
impl<T> DerefMut for UniqueRef<T> {
impl<T: ?Sized> DerefMut for UniqueRef<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { self.0.as_mut() }
}
}
impl<T> Drop for UniqueRef<T> {
fn drop(&mut self) {
unsafe { drop_in_place(self.0.as_ptr()) }
impl<T: ?Sized> AsRef<T> for UniqueRef<T> {
fn as_ref(&self) -> &T {
&**self
}
}
fn assert_unique_type_compatible<U, T>() {
impl<T: ?Sized> AsMut<T> for UniqueRef<T> {
fn as_mut(&mut self) -> &mut T {
&mut **self
}
}
impl<T: ?Sized> Borrow<T> for UniqueRef<T> {
fn borrow(&self) -> &T {
&**self
}
}
impl<T: ?Sized> BorrowMut<T> for UniqueRef<T> {
fn borrow_mut(&mut self) -> &mut T {
&mut **self
}
}
fn assert_unique_ptr_layout_compatible<U, T>() {
// Assert that `U` (a `UniqueRef` or `UniquePtr`) has the same memory layout
// as a pointer to `T`.
// as a raw C pointer.
assert_eq!(size_of::<U>(), size_of::<*mut T>());
assert_eq!(align_of::<U>(), align_of::<*mut T>());
@ -129,74 +171,118 @@ pub trait Shared
where
Self: Sized,
{
fn clone(shared_ptr: *const SharedRef<Self>) -> SharedRef<Self>;
fn from_unique(unique: UniqueRef<Self>) -> SharedRef<Self>;
fn deref(shared_ptr: *const SharedRef<Self>) -> *mut Self;
fn reset(shared_ptr: *mut SharedRef<Self>);
fn use_count(shared_ptr: *const SharedRef<Self>) -> long;
fn clone(shared_ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self>;
fn from_unique_ptr(shared_ptr: UniquePtr<Self>) -> SharedPtrBase<Self>;
fn get(shared_ptr: &SharedPtrBase<Self>) -> *mut Self;
fn reset(shared_ptr: &mut SharedPtrBase<Self>);
fn use_count(shared_ptr: &SharedPtrBase<Self>) -> long;
}
/// Private base type which is shared by the `SharedPtr` and `SharedRef`
/// implementations.
#[repr(C)]
pub struct SharedPtrBase<T: Shared>([usize; 2], PhantomData<T>);
unsafe impl<T: Shared + Sync> Send for SharedPtrBase<T> {}
unsafe impl<T: Shared + Sync> Sync for SharedPtrBase<T> {}
impl<T: Shared> Default for SharedPtrBase<T> {
fn default() -> Self {
Self([0usize; 2], PhantomData)
}
}
impl<T: Shared> Drop for SharedPtrBase<T> {
fn drop(&mut self) {
<T as Shared>::reset(self);
}
}
/// Wrapper around a C++ shared_ptr. A shared_ptr may be be null.
#[repr(C)]
#[derive(Default)]
pub struct SharedPtr<T: Shared>(SharedPtrBase<T>);
impl<T: Shared> SharedPtr<T> {
pub fn is_null(&self) -> bool {
<T as Shared>::get(&self.0).is_null()
}
pub fn use_count(&self) -> long {
<T as Shared>::use_count(&self.0)
}
pub fn take(&mut self) -> Option<SharedRef<T>> {
if self.is_null() {
None
} else {
let base = take(&mut self.0);
Some(SharedRef(base))
}
}
pub fn unwrap(self) -> SharedRef<T> {
assert!(!self.is_null());
SharedRef(self.0)
}
}
impl<T: Shared> Clone for SharedPtr<T> {
fn clone(&self) -> Self {
Self(<T as Shared>::clone(&self.0))
}
}
impl<T, U> From<U> for SharedPtr<T>
where
T: Shared,
U: Into<UniquePtr<T>>,
{
fn from(unique_ptr: U) -> Self {
let unique_ptr = unique_ptr.into();
Self(<T as Shared>::from_unique_ptr(unique_ptr))
}
}
impl<T: Shared> From<SharedRef<T>> for SharedPtr<T> {
fn from(mut shared_ref: SharedRef<T>) -> Self {
Self(take(&mut shared_ref.0))
}
}
/// Wrapper around a C++ shared_ptr. The shared_ptr is assumed to contain a
/// value and not be null.
/// value and may not be null.
#[repr(C)]
pub struct SharedRef<T>([*mut Opaque; 2], PhantomData<T>)
where
T: Shared;
pub struct SharedRef<T: Shared>(SharedPtrBase<T>);
unsafe impl<T> Send for SharedRef<T> where T: Shared + Send {}
impl<T> SharedRef<T>
where
T: Shared,
{
impl<T: Shared> SharedRef<T> {
pub fn use_count(&self) -> long {
<T as Shared>::use_count(self)
<T as Shared>::use_count(&self.0)
}
}
impl<T> Clone for SharedRef<T>
where
T: Shared,
{
impl<T: Shared> Clone for SharedRef<T> {
fn clone(&self) -> Self {
<T as Shared>::clone(self)
Self(<T as Shared>::clone(&self.0))
}
}
impl<T> From<UniqueRef<T>> for SharedRef<T>
where
T: Shared,
{
fn from(unique: UniqueRef<T>) -> Self {
<T as Shared>::from_unique(unique)
impl<T: Shared> From<UniqueRef<T>> for SharedRef<T> {
fn from(unique_ref: UniqueRef<T>) -> Self {
SharedPtr::from(unique_ref).unwrap()
}
}
impl<T> Deref for SharedRef<T>
where
T: Shared,
{
type Target = UnsafeCell<T>;
impl<T: Shared> Deref for SharedRef<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { &*(<T as Shared>::deref(self) as *const UnsafeCell<T>) }
unsafe { &*(<T as Shared>::get(&self.0)) }
}
}
impl<T> DerefMut for SharedRef<T>
where
T: Shared,
{
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *(<T as Shared>::deref(self) as *mut UnsafeCell<T>) }
}
}
impl<T> Drop for SharedRef<T>
where
T: Shared,
{
fn drop(&mut self) {
<T as Shared>::reset(self);
impl<T: Shared> AsRef<T> for SharedRef<T> {
fn as_ref(&self) -> &T {
&**self
}
}

View file

@ -395,26 +395,20 @@ fn array_buffer() {
let unique_bs = v8::ArrayBuffer::new_backing_store_from_boxed_slice(data);
assert_eq!(10, unique_bs.byte_length());
assert_eq!(false, unique_bs.is_shared());
assert_eq!(unique_bs[0], 0);
assert_eq!(unique_bs[9], 9);
assert_eq!(unique_bs[0].get(), 0);
assert_eq!(unique_bs[9].get(), 9);
let shared_bs_1 = unique_bs.make_shared();
{
let bs = unsafe { &mut *shared_bs_1.get() };
assert_eq!(10, bs.byte_length());
assert_eq!(false, bs.is_shared());
assert_eq!(bs[0], 0);
assert_eq!(bs[9], 9);
}
assert_eq!(10, shared_bs_1.byte_length());
assert_eq!(false, shared_bs_1.is_shared());
assert_eq!(shared_bs_1[0].get(), 0);
assert_eq!(shared_bs_1[9].get(), 9);
let ab = v8::ArrayBuffer::with_backing_store(scope, &shared_bs_1);
let shared_bs_2 = ab.get_backing_store();
{
let bs = unsafe { &mut *shared_bs_2.get() };
assert_eq!(10, ab.byte_length());
assert_eq!(bs[0], 0);
assert_eq!(bs[9], 9);
}
assert_eq!(10, shared_bs_2.byte_length());
assert_eq!(shared_bs_2[0].get(), 0);
assert_eq!(shared_bs_2[9].get(), 9);
}
}
@ -445,6 +439,25 @@ fn backing_store_segfault() {
drop(shared_bs); // Error occurred here.
}
#[test]
fn shared_array_buffer_allocator() {
let alloc1 = v8::new_default_allocator();
assert_eq!(1, v8::SharedRef::use_count(&alloc1));
let alloc2 = alloc1.clone();
assert_eq!(2, v8::SharedRef::use_count(&alloc1));
assert_eq!(2, v8::SharedRef::use_count(&alloc2));
let mut alloc2 = v8::SharedPtr::from(alloc2);
assert_eq!(2, v8::SharedRef::use_count(&alloc1));
assert_eq!(2, v8::SharedPtr::use_count(&alloc2));
drop(alloc1);
assert_eq!(1, v8::SharedPtr::use_count(&alloc2));
alloc2.take();
assert_eq!(0, v8::SharedPtr::use_count(&alloc2));
}
#[test]
fn array_buffer_with_shared_backing_store() {
let _setup_guard = setup();
@ -463,16 +476,16 @@ fn array_buffer_with_shared_backing_store() {
assert_eq!(42, ab1.byte_length());
let bs1 = ab1.get_backing_store();
assert_eq!(ab1.byte_length(), unsafe { (*bs1.get()).byte_length() });
assert_eq!(ab1.byte_length(), bs1.byte_length());
assert_eq!(2, v8::SharedRef::use_count(&bs1));
let bs2 = ab1.get_backing_store();
assert_eq!(ab1.byte_length(), unsafe { (*bs2.get()).byte_length() });
assert_eq!(ab1.byte_length(), bs2.byte_length());
assert_eq!(3, v8::SharedRef::use_count(&bs1));
assert_eq!(3, v8::SharedRef::use_count(&bs2));
let bs3 = ab1.get_backing_store();
assert_eq!(ab1.byte_length(), unsafe { (*bs3.get()).byte_length() });
assert_eq!(ab1.byte_length(), bs3.byte_length());
assert_eq!(4, v8::SharedRef::use_count(&bs1));
assert_eq!(4, v8::SharedRef::use_count(&bs2));
assert_eq!(4, v8::SharedRef::use_count(&bs3));
@ -489,7 +502,7 @@ fn array_buffer_with_shared_backing_store() {
assert_eq!(3, v8::SharedRef::use_count(&bs3));
let bs4 = ab2.get_backing_store();
assert_eq!(ab1.byte_length(), unsafe { (*bs4.get()).byte_length() });
assert_eq!(ab1.byte_length(), bs4.byte_length());
assert_eq!(4, v8::SharedRef::use_count(&bs3));
assert_eq!(4, v8::SharedRef::use_count(&bs4));
@ -2056,11 +2069,8 @@ fn shared_array_buffer() {
let sab = v8::SharedArrayBuffer::new(scope, 16).unwrap();
let shared_bs_1 = sab.get_backing_store();
{
let bs = unsafe { &mut *shared_bs_1.get() };
bs[5] = 12;
bs[12] = 52;
}
shared_bs_1[5].set(12);
shared_bs_1[12].set(52);
let global = context.global(scope);
let r = global
@ -2080,11 +2090,8 @@ fn shared_array_buffer() {
let result: v8::Local<v8::Integer> =
script.run(scope, context).unwrap().try_into().unwrap();
assert_eq!(result.value(), 64);
{
let bs = unsafe { &*shared_bs_1.get() };
assert_eq!(bs[2], 16);
assert_eq!(bs[14], 62);
}
assert_eq!(shared_bs_1[2].get(), 16);
assert_eq!(shared_bs_1[14].get(), 62);
let data: Box<[u8]> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into_boxed_slice();
let bs = v8::SharedArrayBuffer::new_backing_store_from_boxed_slice(data);
@ -2092,20 +2099,14 @@ fn shared_array_buffer() {
assert_eq!(bs.is_shared(), true);
let shared_bs_2 = bs.make_shared();
{
let bs = unsafe { &*shared_bs_2.get() };
assert_eq!(bs.byte_length(), 10);
assert_eq!(bs.is_shared(), true);
}
assert_eq!(shared_bs_2.byte_length(), 10);
assert_eq!(shared_bs_2.is_shared(), true);
let ab = v8::SharedArrayBuffer::with_backing_store(scope, &shared_bs_2);
let shared_bs_3 = ab.get_backing_store();
{
let bs = unsafe { &*shared_bs_3.get() };
assert_eq!(bs.byte_length(), 10);
assert_eq!(bs[0], 0);
assert_eq!(bs[9], 9);
}
assert_eq!(shared_bs_3.byte_length(), 10);
assert_eq!(shared_bs_3[0].get(), 0);
assert_eq!(shared_bs_3[9].get(), 9);
}
}