mirror of
https://github.com/denoland/rusty_v8.git
synced 2024-12-26 00:59:28 -05:00
Make BackingStore APIs more consistent with C++ (#234)
This commit is contained in:
parent
3c1c52fb33
commit
1a1bac3883
5 changed files with 224 additions and 108 deletions
|
@ -1,3 +1,11 @@
|
||||||
|
// Copyright 2019-2020 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::ffi::c_void;
|
||||||
|
use std::ops::Deref;
|
||||||
|
use std::ops::DerefMut;
|
||||||
|
use std::ptr::null_mut;
|
||||||
|
use std::slice;
|
||||||
|
|
||||||
use crate::support::long;
|
use crate::support::long;
|
||||||
use crate::support::Delete;
|
use crate::support::Delete;
|
||||||
use crate::support::Opaque;
|
use crate::support::Opaque;
|
||||||
|
@ -13,11 +21,11 @@ use crate::ToLocal;
|
||||||
extern "C" {
|
extern "C" {
|
||||||
fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
|
fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
|
||||||
fn v8__ArrayBuffer__Allocator__DELETE(this: &'static mut Allocator);
|
fn v8__ArrayBuffer__Allocator__DELETE(this: &'static mut Allocator);
|
||||||
fn v8__ArrayBuffer__New__byte_length(
|
fn v8__ArrayBuffer__New__with_byte_length(
|
||||||
isolate: *mut Isolate,
|
isolate: *mut Isolate,
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
) -> *mut ArrayBuffer;
|
) -> *mut ArrayBuffer;
|
||||||
fn v8__ArrayBuffer__New__backing_store(
|
fn v8__ArrayBuffer__New__with_backing_store(
|
||||||
isolate: *mut Isolate,
|
isolate: *mut Isolate,
|
||||||
backing_store: *mut SharedRef<BackingStore>,
|
backing_store: *mut SharedRef<BackingStore>,
|
||||||
) -> *mut ArrayBuffer;
|
) -> *mut ArrayBuffer;
|
||||||
|
@ -25,20 +33,25 @@ extern "C" {
|
||||||
fn v8__ArrayBuffer__GetBackingStore(
|
fn v8__ArrayBuffer__GetBackingStore(
|
||||||
self_: *const ArrayBuffer,
|
self_: *const ArrayBuffer,
|
||||||
) -> SharedRef<BackingStore>;
|
) -> SharedRef<BackingStore>;
|
||||||
fn v8__ArrayBuffer__NewBackingStore(
|
fn v8__ArrayBuffer__NewBackingStore__with_byte_length(
|
||||||
isolate: *mut Isolate,
|
isolate: *mut Isolate,
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
) -> *mut BackingStore;
|
) -> *mut BackingStore;
|
||||||
fn v8__ArrayBuffer__NewBackingStore_FromRaw(
|
fn v8__ArrayBuffer__NewBackingStore__with_data(
|
||||||
data: *mut std::ffi::c_void,
|
data: *mut c_void,
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
deleter: BackingStoreDeleterCallback,
|
deleter: BackingStoreDeleterCallback,
|
||||||
) -> SharedRef<BackingStore>;
|
deleter_data: *mut c_void,
|
||||||
|
) -> *mut BackingStore;
|
||||||
|
|
||||||
fn v8__BackingStore__Data(self_: &mut BackingStore) -> *mut std::ffi::c_void;
|
fn v8__BackingStore__Data(this: *mut BackingStore) -> *mut c_void;
|
||||||
fn v8__BackingStore__ByteLength(self_: &BackingStore) -> usize;
|
fn v8__BackingStore__ByteLength(this: *const BackingStore) -> usize;
|
||||||
fn v8__BackingStore__IsShared(self_: &BackingStore) -> bool;
|
fn v8__BackingStore__IsShared(this: *const BackingStore) -> bool;
|
||||||
fn v8__BackingStore__DELETE(self_: &mut BackingStore);
|
fn v8__BackingStore__DELETE(this: &mut BackingStore);
|
||||||
|
|
||||||
|
fn std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
|
||||||
|
unique: UniqueRef<BackingStore>,
|
||||||
|
) -> SharedRef<BackingStore>;
|
||||||
fn std__shared_ptr__v8__BackingStore__get(
|
fn std__shared_ptr__v8__BackingStore__get(
|
||||||
ptr: *const SharedRef<BackingStore>,
|
ptr: *const SharedRef<BackingStore>,
|
||||||
) -> *mut BackingStore;
|
) -> *mut BackingStore;
|
||||||
|
@ -92,15 +105,15 @@ impl Delete for Allocator {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
|
pub type BackingStoreDeleterCallback = unsafe extern "C" fn(
|
||||||
data: *mut std::ffi::c_void,
|
data: *mut c_void,
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
deleter_data: *mut std::ffi::c_void,
|
deleter_data: *mut c_void,
|
||||||
);
|
);
|
||||||
|
|
||||||
pub unsafe extern "C" fn backing_store_deleter_callback(
|
pub unsafe extern "C" fn backing_store_deleter_callback(
|
||||||
data: *mut std::ffi::c_void,
|
data: *mut c_void,
|
||||||
_byte_length: usize,
|
_byte_length: usize,
|
||||||
_deleter_data: *mut std::ffi::c_void,
|
_deleter_data: *mut c_void,
|
||||||
) {
|
) {
|
||||||
let b = Box::from_raw(data);
|
let b = Box::from_raw(data);
|
||||||
drop(b)
|
drop(b)
|
||||||
|
@ -123,21 +136,11 @@ pub struct BackingStore([usize; 6]);
|
||||||
unsafe impl Send for BackingStore {}
|
unsafe impl Send for BackingStore {}
|
||||||
|
|
||||||
impl BackingStore {
|
impl BackingStore {
|
||||||
/// Returns a rust u8 slice with a lifetime equal to the lifetime of the BackingStore.
|
|
||||||
pub fn data_bytes<'a>(&'a mut self) -> &'a mut [u8] {
|
|
||||||
unsafe {
|
|
||||||
std::slice::from_raw_parts_mut::<'a, u8>(
|
|
||||||
v8__BackingStore__Data(self) as *mut u8,
|
|
||||||
self.byte_length(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a pointer to the beginning of the memory block for this backing
|
/// Return a pointer to the beginning of the memory block for this backing
|
||||||
/// store. The pointer is only valid as long as this backing store object
|
/// store. The pointer is only valid as long as this backing store object
|
||||||
/// lives.
|
/// lives.
|
||||||
pub fn data(&mut self) -> &mut std::ffi::c_void {
|
pub fn data(&self) -> *mut c_void {
|
||||||
unsafe { &mut *v8__BackingStore__Data(self) }
|
unsafe { v8__BackingStore__Data(self as *const _ as *mut Self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The length (in bytes) of this backing store.
|
/// The length (in bytes) of this backing store.
|
||||||
|
@ -152,6 +155,24 @@ impl BackingStore {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Deref for BackingStore {
|
||||||
|
type Target = [u8];
|
||||||
|
|
||||||
|
/// Returns a [u8] slice refencing the data in the backing store.
|
||||||
|
fn deref(&self) -> &[u8] {
|
||||||
|
unsafe { slice::from_raw_parts(self.data() as *mut u8, self.byte_length()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for BackingStore {
|
||||||
|
/// Returns a mutable [u8] slice refencing the data in the backing store.
|
||||||
|
fn deref_mut(&mut self) -> &mut [u8] {
|
||||||
|
unsafe {
|
||||||
|
slice::from_raw_parts_mut(self.data() as *mut u8, self.byte_length())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Delete for BackingStore {
|
impl Delete for BackingStore {
|
||||||
fn delete(&mut self) {
|
fn delete(&mut self) {
|
||||||
unsafe { v8__BackingStore__DELETE(self) };
|
unsafe { v8__BackingStore__DELETE(self) };
|
||||||
|
@ -159,6 +180,11 @@ impl Delete for BackingStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Shared for BackingStore {
|
impl Shared for BackingStore {
|
||||||
|
fn from_unique(unique: UniqueRef<Self>) -> SharedRef<Self> {
|
||||||
|
unsafe {
|
||||||
|
std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(unique)
|
||||||
|
}
|
||||||
|
}
|
||||||
fn deref(ptr: *const SharedRef<Self>) -> *mut Self {
|
fn deref(ptr: *const SharedRef<Self>) -> *mut Self {
|
||||||
unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
|
unsafe { std__shared_ptr__v8__BackingStore__get(ptr) }
|
||||||
}
|
}
|
||||||
|
@ -181,17 +207,17 @@ impl ArrayBuffer {
|
||||||
) -> Local<'sc, ArrayBuffer> {
|
) -> Local<'sc, ArrayBuffer> {
|
||||||
let isolate = scope.isolate();
|
let isolate = scope.isolate();
|
||||||
let ptr =
|
let ptr =
|
||||||
unsafe { v8__ArrayBuffer__New__byte_length(isolate, byte_length) };
|
unsafe { v8__ArrayBuffer__New__with_byte_length(isolate, byte_length) };
|
||||||
unsafe { scope.to_local(ptr) }.unwrap()
|
unsafe { scope.to_local(ptr) }.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_with_backing_store<'sc>(
|
pub fn with_backing_store<'sc>(
|
||||||
scope: &mut impl ToLocal<'sc>,
|
scope: &mut impl ToLocal<'sc>,
|
||||||
backing_store: &mut SharedRef<BackingStore>,
|
backing_store: &mut SharedRef<BackingStore>,
|
||||||
) -> Local<'sc, ArrayBuffer> {
|
) -> Local<'sc, ArrayBuffer> {
|
||||||
let isolate = scope.isolate();
|
let isolate = scope.isolate();
|
||||||
let ptr = unsafe {
|
let ptr = unsafe {
|
||||||
v8__ArrayBuffer__New__backing_store(isolate, &mut *backing_store)
|
v8__ArrayBuffer__New__with_backing_store(isolate, &mut *backing_store)
|
||||||
};
|
};
|
||||||
unsafe { scope.to_local(ptr) }.unwrap()
|
unsafe { scope.to_local(ptr) }.unwrap()
|
||||||
}
|
}
|
||||||
|
@ -200,6 +226,11 @@ impl ArrayBuffer {
|
||||||
pub fn byte_length(&self) -> usize {
|
pub fn byte_length(&self) -> usize {
|
||||||
unsafe { v8__ArrayBuffer__ByteLength(self) }
|
unsafe { v8__ArrayBuffer__ByteLength(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get a shared pointer to the backing store of this array buffer. This
|
||||||
|
/// pointer coordinates the lifetime management of the internal storage
|
||||||
|
/// with any live ArrayBuffers on the heap, even across isolates. The embedder
|
||||||
|
/// should not attempt to manage lifetime of the storage through other means.
|
||||||
pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
|
pub fn get_backing_store(&self) -> SharedRef<BackingStore> {
|
||||||
unsafe { v8__ArrayBuffer__GetBackingStore(self) }
|
unsafe { v8__ArrayBuffer__GetBackingStore(self) }
|
||||||
}
|
}
|
||||||
|
@ -216,7 +247,7 @@ impl ArrayBuffer {
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
) -> UniqueRef<BackingStore> {
|
) -> UniqueRef<BackingStore> {
|
||||||
unsafe {
|
unsafe {
|
||||||
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore(
|
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_byte_length(
|
||||||
scope.isolate(),
|
scope.isolate(),
|
||||||
byte_length,
|
byte_length,
|
||||||
))
|
))
|
||||||
|
@ -232,13 +263,14 @@ impl ArrayBuffer {
|
||||||
/// to the buffer must not be passed again to any V8 API function.
|
/// to the buffer must not be passed again to any V8 API function.
|
||||||
pub unsafe fn new_backing_store_from_boxed_slice(
|
pub unsafe fn new_backing_store_from_boxed_slice(
|
||||||
data: Box<[u8]>,
|
data: Box<[u8]>,
|
||||||
) -> SharedRef<BackingStore> {
|
) -> UniqueRef<BackingStore> {
|
||||||
let byte_length = data.len();
|
let byte_length = data.len();
|
||||||
let data_ptr = Box::into_raw(data) as *mut std::ffi::c_void;
|
let data_ptr = Box::into_raw(data) as *mut c_void;
|
||||||
v8__ArrayBuffer__NewBackingStore_FromRaw(
|
UniqueRef::from_raw(v8__ArrayBuffer__NewBackingStore__with_data(
|
||||||
data_ptr,
|
data_ptr,
|
||||||
byte_length,
|
byte_length,
|
||||||
backing_store_deleter_callback,
|
backing_store_deleter_callback,
|
||||||
)
|
null_mut(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -561,19 +561,19 @@ v8::Primitive* v8__PrimitiveArray__Get(v8::PrimitiveArray& self,
|
||||||
return local_to_ptr(self.Get(isolate, index));
|
return local_to_ptr(self.Get(isolate, index));
|
||||||
}
|
}
|
||||||
|
|
||||||
v8::BackingStore* v8__ArrayBuffer__NewBackingStore(v8::Isolate* isolate,
|
v8::BackingStore* v8__ArrayBuffer__NewBackingStore__with_byte_length(
|
||||||
size_t length) {
|
v8::Isolate* isolate, size_t byte_length) {
|
||||||
std::unique_ptr<v8::BackingStore> u =
|
std::unique_ptr<v8::BackingStore> u =
|
||||||
v8::ArrayBuffer::NewBackingStore(isolate, length);
|
v8::ArrayBuffer::NewBackingStore(isolate, byte_length);
|
||||||
return u.release();
|
return u.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
two_pointers_t v8__ArrayBuffer__NewBackingStore_FromRaw(
|
v8::BackingStore* v8__ArrayBuffer__NewBackingStore__with_data(
|
||||||
void* data, size_t length, v8::BackingStoreDeleterCallback deleter) {
|
void* data, size_t byte_length, v8::BackingStoreDeleterCallback deleter,
|
||||||
std::unique_ptr<v8::BackingStore> u =
|
void* deleter_data) {
|
||||||
v8::ArrayBuffer::NewBackingStore(data, length, deleter, nullptr);
|
std::unique_ptr<v8::BackingStore> u = v8::ArrayBuffer::NewBackingStore(
|
||||||
const std::shared_ptr<v8::BackingStore> bs = std::move(u);
|
data, byte_length, deleter, deleter_data);
|
||||||
return make_pod<two_pointers_t>(bs);
|
return u.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
two_pointers_t v8__ArrayBuffer__GetBackingStore(v8::ArrayBuffer& self) {
|
two_pointers_t v8__ArrayBuffer__GetBackingStore(v8::ArrayBuffer& self) {
|
||||||
|
@ -582,16 +582,21 @@ two_pointers_t v8__ArrayBuffer__GetBackingStore(v8::ArrayBuffer& self) {
|
||||||
|
|
||||||
void* v8__BackingStore__Data(v8::BackingStore& self) { return self.Data(); }
|
void* v8__BackingStore__Data(v8::BackingStore& self) { return self.Data(); }
|
||||||
|
|
||||||
size_t v8__BackingStore__ByteLength(v8::BackingStore& self) {
|
size_t v8__BackingStore__ByteLength(const v8::BackingStore& self) {
|
||||||
return self.ByteLength();
|
return self.ByteLength();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool v8__BackingStore__IsShared(v8::BackingStore& self) {
|
bool v8__BackingStore__IsShared(const v8::BackingStore& self) {
|
||||||
return self.IsShared();
|
return self.IsShared();
|
||||||
}
|
}
|
||||||
|
|
||||||
void v8__BackingStore__DELETE(v8::BackingStore& self) { delete &self; }
|
void v8__BackingStore__DELETE(v8::BackingStore& self) { delete &self; }
|
||||||
|
|
||||||
|
two_pointers_t std__shared_ptr__v8__BackingStore__CONVERT__std__unique_ptr(
|
||||||
|
v8::BackingStore* ptr) {
|
||||||
|
return make_pod<two_pointers_t>(std::shared_ptr<v8::BackingStore>(ptr));
|
||||||
|
}
|
||||||
|
|
||||||
v8::BackingStore* std__shared_ptr__v8__BackingStore__get(
|
v8::BackingStore* std__shared_ptr__v8__BackingStore__get(
|
||||||
const std::shared_ptr<v8::BackingStore>& ptr) {
|
const std::shared_ptr<v8::BackingStore>& ptr) {
|
||||||
return ptr.get();
|
return ptr.get();
|
||||||
|
@ -739,12 +744,12 @@ void v8__ArrayBuffer__Allocator__DELETE(v8::ArrayBuffer::Allocator& self) {
|
||||||
delete &self;
|
delete &self;
|
||||||
}
|
}
|
||||||
|
|
||||||
v8::ArrayBuffer* v8__ArrayBuffer__New__byte_length(v8::Isolate* isolate,
|
v8::ArrayBuffer* v8__ArrayBuffer__New__with_byte_length(v8::Isolate* isolate,
|
||||||
size_t byte_length) {
|
size_t byte_length) {
|
||||||
return local_to_ptr(v8::ArrayBuffer::New(isolate, byte_length));
|
return local_to_ptr(v8::ArrayBuffer::New(isolate, byte_length));
|
||||||
}
|
}
|
||||||
|
|
||||||
v8::ArrayBuffer* v8__ArrayBuffer__New__backing_store(
|
v8::ArrayBuffer* v8__ArrayBuffer__New__with_backing_store(
|
||||||
v8::Isolate* isolate, std::shared_ptr<v8::BackingStore>& backing_store) {
|
v8::Isolate* isolate, std::shared_ptr<v8::BackingStore>& backing_store) {
|
||||||
return local_to_ptr(v8::ArrayBuffer::New(isolate, backing_store));
|
return local_to_ptr(v8::ArrayBuffer::New(isolate, backing_store));
|
||||||
}
|
}
|
||||||
|
@ -1072,11 +1077,16 @@ v8::PrimitiveArray* v8__ScriptOrModule__GetHostDefinedOptions(
|
||||||
return local_to_ptr(self.GetHostDefinedOptions());
|
return local_to_ptr(self.GetHostDefinedOptions());
|
||||||
}
|
}
|
||||||
|
|
||||||
v8::SharedArrayBuffer* v8__SharedArrayBuffer__New(v8::Isolate* isolate,
|
v8::SharedArrayBuffer* v8__SharedArrayBuffer__New__with_byte_length(
|
||||||
size_t byte_length) {
|
v8::Isolate* isolate, size_t byte_length) {
|
||||||
return local_to_ptr(v8::SharedArrayBuffer::New(isolate, byte_length));
|
return local_to_ptr(v8::SharedArrayBuffer::New(isolate, byte_length));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
v8::SharedArrayBuffer* v8__SharedArrayBuffer__New__with_backing_store(
|
||||||
|
v8::Isolate* isolate, std::shared_ptr<v8::BackingStore>& backing_store) {
|
||||||
|
return local_to_ptr(v8::SharedArrayBuffer::New(isolate, backing_store));
|
||||||
|
}
|
||||||
|
|
||||||
size_t v8__SharedArrayBuffer__ByteLength(v8::SharedArrayBuffer& self) {
|
size_t v8__SharedArrayBuffer__ByteLength(v8::SharedArrayBuffer& self) {
|
||||||
return self.ByteLength();
|
return self.ByteLength();
|
||||||
}
|
}
|
||||||
|
@ -1086,17 +1096,19 @@ two_pointers_t v8__SharedArrayBuffer__GetBackingStore(
|
||||||
return make_pod<two_pointers_t>(self.GetBackingStore());
|
return make_pod<two_pointers_t>(self.GetBackingStore());
|
||||||
}
|
}
|
||||||
|
|
||||||
two_pointers_t v8__SharedArrayBuffer__NewBackingStore_FromRaw(
|
v8::BackingStore* v8__SharedArrayBuffer__NewBackingStore__with_byte_length(
|
||||||
void* data, size_t length, v8::BackingStoreDeleterCallback deleter) {
|
v8::Isolate* isolate, size_t byte_length) {
|
||||||
std::unique_ptr<v8::BackingStore> u =
|
std::unique_ptr<v8::BackingStore> u =
|
||||||
v8::SharedArrayBuffer::NewBackingStore(data, length, deleter, nullptr);
|
v8::SharedArrayBuffer::NewBackingStore(isolate, byte_length);
|
||||||
const std::shared_ptr<v8::BackingStore> bs = std::move(u);
|
return u.release();
|
||||||
return make_pod<two_pointers_t>(bs);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
v8::SharedArrayBuffer* v8__SharedArrayBuffer__New__backing_store(
|
v8::BackingStore* v8__SharedArrayBuffer__NewBackingStore__with_data(
|
||||||
v8::Isolate* isolate, std::shared_ptr<v8::BackingStore>& backing_store) {
|
void* data, size_t byte_length, v8::BackingStoreDeleterCallback deleter,
|
||||||
return local_to_ptr(v8::SharedArrayBuffer::New(isolate, backing_store));
|
void* deleter_data) {
|
||||||
|
std::unique_ptr<v8::BackingStore> u = v8::SharedArrayBuffer::NewBackingStore(
|
||||||
|
data, byte_length, deleter, deleter_data);
|
||||||
|
return u.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
v8::Value* v8__JSON__Parse(v8::Local<v8::Context> context,
|
v8::Value* v8__JSON__Parse(v8::Local<v8::Context> context,
|
||||||
|
|
|
@ -1,21 +1,27 @@
|
||||||
|
// Copyright 2019-2020 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::ffi::c_void;
|
||||||
|
use std::ptr::null_mut;
|
||||||
|
|
||||||
use crate::array_buffer::backing_store_deleter_callback;
|
use crate::array_buffer::backing_store_deleter_callback;
|
||||||
use crate::support::SharedRef;
|
use crate::support::SharedRef;
|
||||||
|
use crate::support::UniqueRef;
|
||||||
use crate::BackingStore;
|
use crate::BackingStore;
|
||||||
use crate::BackingStoreDeleterCallback;
|
use crate::BackingStoreDeleterCallback;
|
||||||
|
use crate::InIsolate;
|
||||||
use crate::Isolate;
|
use crate::Isolate;
|
||||||
use crate::Local;
|
use crate::Local;
|
||||||
use crate::SharedArrayBuffer;
|
use crate::SharedArrayBuffer;
|
||||||
use crate::ToLocal;
|
use crate::ToLocal;
|
||||||
|
|
||||||
extern "C" {
|
extern "C" {
|
||||||
fn v8__SharedArrayBuffer__New(
|
fn v8__SharedArrayBuffer__New__with_byte_length(
|
||||||
isolate: *mut Isolate,
|
isolate: *mut Isolate,
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
) -> *mut SharedArrayBuffer;
|
) -> *mut SharedArrayBuffer;
|
||||||
fn v8__SharedArrayBuffer__New__DEPRECATED(
|
fn v8__SharedArrayBuffer__New__with_backing_store(
|
||||||
isolate: *mut Isolate,
|
isolate: *mut Isolate,
|
||||||
data_ptr: *mut std::ffi::c_void,
|
backing_store: *mut SharedRef<BackingStore>,
|
||||||
data_length: usize,
|
|
||||||
) -> *mut SharedArrayBuffer;
|
) -> *mut SharedArrayBuffer;
|
||||||
fn v8__SharedArrayBuffer__ByteLength(
|
fn v8__SharedArrayBuffer__ByteLength(
|
||||||
self_: *const SharedArrayBuffer,
|
self_: *const SharedArrayBuffer,
|
||||||
|
@ -23,15 +29,16 @@ extern "C" {
|
||||||
fn v8__SharedArrayBuffer__GetBackingStore(
|
fn v8__SharedArrayBuffer__GetBackingStore(
|
||||||
self_: *const SharedArrayBuffer,
|
self_: *const SharedArrayBuffer,
|
||||||
) -> SharedRef<BackingStore>;
|
) -> SharedRef<BackingStore>;
|
||||||
fn v8__SharedArrayBuffer__NewBackingStore_FromRaw(
|
fn v8__SharedArrayBuffer__NewBackingStore__with_byte_length(
|
||||||
data: *mut std::ffi::c_void,
|
isolate: *mut Isolate,
|
||||||
|
byte_length: usize,
|
||||||
|
) -> *mut BackingStore;
|
||||||
|
fn v8__SharedArrayBuffer__NewBackingStore__with_data(
|
||||||
|
data: *mut c_void,
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
deleter: BackingStoreDeleterCallback,
|
deleter: BackingStoreDeleterCallback,
|
||||||
) -> SharedRef<BackingStore>;
|
deleter_data: *mut c_void,
|
||||||
fn v8__SharedArrayBuffer__New__backing_store(
|
) -> *mut BackingStore;
|
||||||
isolate: *mut Isolate,
|
|
||||||
backing_store: *mut SharedRef<BackingStore>,
|
|
||||||
) -> *mut SharedArrayBuffer;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SharedArrayBuffer {
|
impl SharedArrayBuffer {
|
||||||
|
@ -44,10 +51,27 @@ impl SharedArrayBuffer {
|
||||||
byte_length: usize,
|
byte_length: usize,
|
||||||
) -> Option<Local<'sc, SharedArrayBuffer>> {
|
) -> Option<Local<'sc, SharedArrayBuffer>> {
|
||||||
unsafe {
|
unsafe {
|
||||||
Local::from_raw(v8__SharedArrayBuffer__New(scope.isolate(), byte_length))
|
Local::from_raw(v8__SharedArrayBuffer__New__with_byte_length(
|
||||||
|
scope.isolate(),
|
||||||
|
byte_length,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn with_backing_store<'sc>(
|
||||||
|
scope: &mut impl ToLocal<'sc>,
|
||||||
|
backing_store: &mut SharedRef<BackingStore>,
|
||||||
|
) -> Local<'sc, SharedArrayBuffer> {
|
||||||
|
let isolate = scope.isolate();
|
||||||
|
let ptr = unsafe {
|
||||||
|
v8__SharedArrayBuffer__New__with_backing_store(
|
||||||
|
isolate,
|
||||||
|
&mut *backing_store,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
unsafe { scope.to_local(ptr) }.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
/// Data length in bytes.
|
/// Data length in bytes.
|
||||||
pub fn byte_length(&self) -> usize {
|
pub fn byte_length(&self) -> usize {
|
||||||
unsafe { v8__SharedArrayBuffer__ByteLength(self) }
|
unsafe { v8__SharedArrayBuffer__ByteLength(self) }
|
||||||
|
@ -61,15 +85,25 @@ impl SharedArrayBuffer {
|
||||||
unsafe { v8__SharedArrayBuffer__GetBackingStore(self) }
|
unsafe { v8__SharedArrayBuffer__GetBackingStore(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_with_backing_store<'sc>(
|
/// Returns a new standalone BackingStore that is allocated using the array
|
||||||
scope: &mut impl ToLocal<'sc>,
|
/// buffer allocator of the isolate. The result can be later passed to
|
||||||
backing_store: &mut SharedRef<BackingStore>,
|
/// ArrayBuffer::New.
|
||||||
) -> Local<'sc, SharedArrayBuffer> {
|
///
|
||||||
let isolate = scope.isolate();
|
/// If the allocator returns nullptr, then the function may cause GCs in the
|
||||||
let ptr = unsafe {
|
/// given isolate and re-try the allocation. If GCs do not help, then the
|
||||||
v8__SharedArrayBuffer__New__backing_store(isolate, &mut *backing_store)
|
/// function will crash with an out-of-memory error.
|
||||||
};
|
pub fn new_backing_store(
|
||||||
unsafe { scope.to_local(ptr) }.unwrap()
|
scope: &mut impl InIsolate,
|
||||||
|
byte_length: usize,
|
||||||
|
) -> UniqueRef<BackingStore> {
|
||||||
|
unsafe {
|
||||||
|
UniqueRef::from_raw(
|
||||||
|
v8__SharedArrayBuffer__NewBackingStore__with_byte_length(
|
||||||
|
scope.isolate(),
|
||||||
|
byte_length,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a new standalone BackingStore that takes over the ownership of
|
/// Returns a new standalone BackingStore that takes over the ownership of
|
||||||
|
@ -81,13 +115,14 @@ impl SharedArrayBuffer {
|
||||||
/// to the buffer must not be passed again to any V8 API function.
|
/// to the buffer must not be passed again to any V8 API function.
|
||||||
pub unsafe fn new_backing_store_from_boxed_slice(
|
pub unsafe fn new_backing_store_from_boxed_slice(
|
||||||
data: Box<[u8]>,
|
data: Box<[u8]>,
|
||||||
) -> SharedRef<BackingStore> {
|
) -> UniqueRef<BackingStore> {
|
||||||
let byte_length = data.len();
|
let byte_length = data.len();
|
||||||
let data_ptr = Box::into_raw(data) as *mut std::ffi::c_void;
|
let data_ptr = Box::into_raw(data) as *mut c_void;
|
||||||
v8__SharedArrayBuffer__NewBackingStore_FromRaw(
|
UniqueRef::from_raw(v8__SharedArrayBuffer__NewBackingStore__with_data(
|
||||||
data_ptr,
|
data_ptr,
|
||||||
byte_length,
|
byte_length,
|
||||||
backing_store_deleter_callback,
|
backing_store_deleter_callback,
|
||||||
)
|
null_mut(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,6 +57,15 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> From<UniqueRef<T>> for UniquePtr<T>
|
||||||
|
where
|
||||||
|
T: Delete,
|
||||||
|
{
|
||||||
|
fn from(unique_ref: UniqueRef<T>) -> Self {
|
||||||
|
unsafe { Self::from_raw(unique_ref.into_raw()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T> Deref for UniquePtr<T>
|
impl<T> Deref for UniquePtr<T>
|
||||||
where
|
where
|
||||||
T: Delete,
|
T: Delete,
|
||||||
|
@ -101,6 +110,13 @@ where
|
||||||
Self(r)
|
Self(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn make_shared(self) -> SharedRef<T>
|
||||||
|
where
|
||||||
|
T: Shared,
|
||||||
|
{
|
||||||
|
self.into()
|
||||||
|
}
|
||||||
|
|
||||||
pub unsafe fn from_raw(p: *mut T) -> Self {
|
pub unsafe fn from_raw(p: *mut T) -> Self {
|
||||||
transmute(NonNull::new(p))
|
transmute(NonNull::new(p))
|
||||||
}
|
}
|
||||||
|
@ -114,9 +130,9 @@ impl<T> Deref for UniqueRef<T>
|
||||||
where
|
where
|
||||||
T: Delete,
|
T: Delete,
|
||||||
{
|
{
|
||||||
type Target = &'static mut T;
|
type Target = T;
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.0
|
self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,7 +141,7 @@ where
|
||||||
T: Delete,
|
T: Delete,
|
||||||
{
|
{
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
&mut self.0
|
self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,8 +159,9 @@ where
|
||||||
|
|
||||||
pub trait Shared
|
pub trait Shared
|
||||||
where
|
where
|
||||||
Self: Sized + 'static,
|
Self: Delete + 'static,
|
||||||
{
|
{
|
||||||
|
fn from_unique(unique: UniqueRef<Self>) -> SharedRef<Self>;
|
||||||
fn deref(shared_ptr: *const SharedRef<Self>) -> *mut Self;
|
fn deref(shared_ptr: *const SharedRef<Self>) -> *mut Self;
|
||||||
fn reset(shared_ptr: *mut SharedRef<Self>);
|
fn reset(shared_ptr: *mut SharedRef<Self>);
|
||||||
fn use_count(shared_ptr: *const SharedRef<Self>) -> long;
|
fn use_count(shared_ptr: *const SharedRef<Self>) -> long;
|
||||||
|
@ -157,6 +174,8 @@ pub struct SharedRef<T>([*mut Opaque; 2], PhantomData<T>)
|
||||||
where
|
where
|
||||||
T: Shared;
|
T: Shared;
|
||||||
|
|
||||||
|
unsafe impl<T> Send for SharedRef<T> where T: Shared + Send {}
|
||||||
|
|
||||||
impl<T> SharedRef<T>
|
impl<T> SharedRef<T>
|
||||||
where
|
where
|
||||||
T: Shared,
|
T: Shared,
|
||||||
|
@ -166,7 +185,14 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<T> Send for SharedRef<T> where T: Shared + Send {}
|
impl<T> From<UniqueRef<T>> for SharedRef<T>
|
||||||
|
where
|
||||||
|
T: Delete + Shared,
|
||||||
|
{
|
||||||
|
fn from(unique: UniqueRef<T>) -> Self {
|
||||||
|
<T as Shared>::from_unique(unique)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T> Deref for SharedRef<T>
|
impl<T> Deref for SharedRef<T>
|
||||||
where
|
where
|
||||||
|
|
|
@ -257,16 +257,24 @@ fn array_buffer() {
|
||||||
assert_eq!(false, bs.is_shared());
|
assert_eq!(false, bs.is_shared());
|
||||||
|
|
||||||
let data: Box<[u8]> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into_boxed_slice();
|
let data: Box<[u8]> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into_boxed_slice();
|
||||||
let mut bs =
|
let unique_bs =
|
||||||
unsafe { v8::ArrayBuffer::new_backing_store_from_boxed_slice(data) };
|
unsafe { v8::ArrayBuffer::new_backing_store_from_boxed_slice(data) };
|
||||||
assert_eq!(10, bs.byte_length());
|
assert_eq!(10, unique_bs.byte_length());
|
||||||
assert_eq!(false, bs.is_shared());
|
assert_eq!(false, unique_bs.is_shared());
|
||||||
let ab = v8::ArrayBuffer::new_with_backing_store(scope, &mut bs);
|
assert_eq!(unique_bs[0], 0);
|
||||||
let mut bs = ab.get_backing_store();
|
assert_eq!(unique_bs[9], 9);
|
||||||
|
|
||||||
|
let mut shared_bs = unique_bs.make_shared();
|
||||||
|
assert_eq!(10, shared_bs.byte_length());
|
||||||
|
assert_eq!(false, shared_bs.is_shared());
|
||||||
|
assert_eq!(shared_bs[0], 0);
|
||||||
|
assert_eq!(shared_bs[9], 9);
|
||||||
|
|
||||||
|
let ab = v8::ArrayBuffer::with_backing_store(scope, &mut shared_bs);
|
||||||
|
let bs = ab.get_backing_store();
|
||||||
assert_eq!(10, ab.byte_length());
|
assert_eq!(10, ab.byte_length());
|
||||||
let data = bs.data_bytes();
|
assert_eq!(bs[0], 0);
|
||||||
assert_eq!(data[0], 0);
|
assert_eq!(bs[9], 9);
|
||||||
assert_eq!(data[9], 9);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -311,7 +319,7 @@ fn array_buffer_with_shared_backing_store() {
|
||||||
drop(bs1);
|
drop(bs1);
|
||||||
assert_eq!(2, v8::SharedRef::use_count(&bs3));
|
assert_eq!(2, v8::SharedRef::use_count(&bs3));
|
||||||
|
|
||||||
let ab2 = v8::ArrayBuffer::new_with_backing_store(scope, &mut bs3);
|
let ab2 = v8::ArrayBuffer::with_backing_store(scope, &mut bs3);
|
||||||
assert_eq!(ab1.byte_length(), ab2.byte_length());
|
assert_eq!(ab1.byte_length(), ab2.byte_length());
|
||||||
assert_eq!(3, v8::SharedRef::use_count(&bs3));
|
assert_eq!(3, v8::SharedRef::use_count(&bs3));
|
||||||
|
|
||||||
|
@ -1847,9 +1855,8 @@ fn shared_array_buffer() {
|
||||||
assert!(maybe_sab.is_some());
|
assert!(maybe_sab.is_some());
|
||||||
let sab = maybe_sab.unwrap();
|
let sab = maybe_sab.unwrap();
|
||||||
let mut backing_store = sab.get_backing_store();
|
let mut backing_store = sab.get_backing_store();
|
||||||
let shared_buf = backing_store.data_bytes();
|
backing_store[5] = 12;
|
||||||
shared_buf[5] = 12;
|
backing_store[12] = 52;
|
||||||
shared_buf[12] = 52;
|
|
||||||
let global = context.global(scope);
|
let global = context.global(scope);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
global.create_data_property(
|
global.create_data_property(
|
||||||
|
@ -1872,21 +1879,25 @@ fn shared_array_buffer() {
|
||||||
let result: v8::Local<v8::Integer> =
|
let result: v8::Local<v8::Integer> =
|
||||||
script.run(scope, context).unwrap().try_into().unwrap();
|
script.run(scope, context).unwrap().try_into().unwrap();
|
||||||
assert_eq!(result.value(), 64);
|
assert_eq!(result.value(), 64);
|
||||||
assert_eq!(shared_buf[2], 16);
|
assert_eq!(backing_store[2], 16);
|
||||||
assert_eq!(shared_buf[14], 62);
|
assert_eq!(backing_store[14], 62);
|
||||||
|
|
||||||
let data: Box<[u8]> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into_boxed_slice();
|
let data: Box<[u8]> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into_boxed_slice();
|
||||||
let mut bs = unsafe {
|
let bs = unsafe {
|
||||||
v8::SharedArrayBuffer::new_backing_store_from_boxed_slice(data)
|
v8::SharedArrayBuffer::new_backing_store_from_boxed_slice(data)
|
||||||
};
|
};
|
||||||
assert_eq!(10, bs.byte_length());
|
assert_eq!(10, bs.byte_length());
|
||||||
assert_eq!(true, bs.is_shared());
|
assert_eq!(true, bs.is_shared());
|
||||||
let ab = v8::SharedArrayBuffer::new_with_backing_store(scope, &mut bs);
|
|
||||||
let mut bs = ab.get_backing_store();
|
let mut bs = bs.make_shared();
|
||||||
|
assert_eq!(10, bs.byte_length());
|
||||||
|
assert_eq!(true, bs.is_shared());
|
||||||
|
|
||||||
|
let ab = v8::SharedArrayBuffer::with_backing_store(scope, &mut bs);
|
||||||
|
let bs = ab.get_backing_store();
|
||||||
assert_eq!(10, ab.byte_length());
|
assert_eq!(10, ab.byte_length());
|
||||||
let data = bs.data_bytes();
|
assert_eq!(bs[0], 0);
|
||||||
assert_eq!(data[0], 0);
|
assert_eq!(bs[9], 9);
|
||||||
assert_eq!(data[9], 9);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue