0
0
Fork 0
mirror of https://github.com/denoland/rusty_v8.git synced 2025-01-11 08:34:01 -05:00

Expose ArrayBuffer::Allocator interface (#578)

This commit is contained in:
Heyang Zhou 2021-01-15 06:29:06 +08:00 committed by GitHub
parent 5cb14847aa
commit 3fd7cae994
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 213 additions and 0 deletions

View file

@ -20,6 +20,10 @@ use crate::Local;
extern "C" {
fn v8__ArrayBuffer__Allocator__NewDefaultAllocator() -> *mut Allocator;
fn v8__ArrayBuffer__Allocator__NewRustAllocator(
handle: *const c_void,
vtable: *const RustAllocatorVtable<c_void>,
) -> *mut Allocator;
fn v8__ArrayBuffer__Allocator__DELETE(this: *mut Allocator);
fn v8__ArrayBuffer__New__with_byte_length(
isolate: *mut Isolate,
@ -103,6 +107,22 @@ extern "C" {
#[derive(Debug)]
pub struct Allocator(Opaque);
/// A wrapper around the V8 Allocator class.
#[repr(C)]
pub struct RustAllocatorVtable<T> {
pub allocate: unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub allocate_uninitialized:
unsafe extern "C" fn(handle: &T, len: usize) -> *mut c_void,
pub free: unsafe extern "C" fn(handle: &T, data: *mut c_void, len: usize),
pub reallocate: unsafe extern "C" fn(
handle: &T,
data: *mut c_void,
old_length: usize,
new_length: usize,
) -> *mut c_void,
pub drop: unsafe extern "C" fn(handle: *const T),
}
impl Shared for Allocator {
fn clone(ptr: &SharedPtrBase<Self>) -> SharedPtrBase<Self> {
unsafe { std__shared_ptr__v8__ArrayBuffer__Allocator__COPY(ptr) }
@ -132,6 +152,65 @@ pub fn new_default_allocator() -> UniqueRef<Allocator> {
}
}
/// Creates an allocator managed by Rust code.
///
/// Marked `unsafe` because the caller must ensure that `handle` is valid and matches what `vtable` expects.
pub unsafe fn new_rust_allocator<T: Sized + Send + Sync + 'static>(
handle: *const T,
vtable: &'static RustAllocatorVtable<T>,
) -> UniqueRef<Allocator> {
UniqueRef::from_raw(v8__ArrayBuffer__Allocator__NewRustAllocator(
handle as *const c_void,
vtable as *const RustAllocatorVtable<T>
as *const RustAllocatorVtable<c_void>,
))
}
#[test]
fn test_rust_allocator() {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
unsafe extern "C" fn allocate(_: &AtomicUsize, _: usize) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn allocate_uninitialized(
_: &AtomicUsize,
_: usize,
) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn free(_: &AtomicUsize, _: *mut c_void, _: usize) {
unimplemented!()
}
unsafe extern "C" fn reallocate(
_: &AtomicUsize,
_: *mut c_void,
_: usize,
_: usize,
) -> *mut c_void {
unimplemented!()
}
unsafe extern "C" fn drop(x: *const AtomicUsize) {
let arc = Arc::from_raw(x);
arc.store(42, Ordering::SeqCst);
}
let retval = Arc::new(AtomicUsize::new(0));
let vtable: &'static RustAllocatorVtable<AtomicUsize> =
&RustAllocatorVtable {
allocate,
allocate_uninitialized,
free,
reallocate,
drop,
};
unsafe { new_rust_allocator(Arc::into_raw(retval.clone()), vtable) };
assert_eq!(retval.load(Ordering::SeqCst), 42);
assert_eq!(Arc::strong_count(&retval), 1);
}
#[test]
fn test_default_allocator() {
new_default_allocator();

View file

@ -1105,10 +1105,59 @@ size_t v8__ArrayBufferView__CopyContents(const v8::ArrayBufferView& self,
return ptr_to_local(&self)->CopyContents(dest, byte_length);
}
struct RustAllocatorVtable {
void* (*allocate)(void* handle, size_t length);
void* (*allocate_uninitialized)(void* handle, size_t length);
void (*free)(void* handle, void* data, size_t length);
void* (*reallocate)(void* handle, void* data, size_t old_length,
size_t new_length);
void (*drop)(void* handle);
};
class RustAllocator : public v8::ArrayBuffer::Allocator {
private:
void* handle;
const RustAllocatorVtable* vtable;
public:
RustAllocator(void* handle, const RustAllocatorVtable* vtable) {
this->handle = handle;
this->vtable = vtable;
}
RustAllocator(const RustAllocator& that) = delete;
RustAllocator(RustAllocator&& that) = delete;
void operator=(const RustAllocator& that) = delete;
void operator=(RustAllocator&& that) = delete;
virtual ~RustAllocator() { vtable->drop(handle); }
void* Allocate(size_t length) final {
return vtable->allocate(handle, length);
}
void* AllocateUninitialized(size_t length) final {
return vtable->allocate_uninitialized(handle, length);
}
void Free(void* data, size_t length) final {
vtable->free(handle, data, length);
}
void* Reallocate(void* data, size_t old_length, size_t new_length) final {
return vtable->reallocate(handle, data, old_length, new_length);
}
};
v8::ArrayBuffer::Allocator* v8__ArrayBuffer__Allocator__NewDefaultAllocator() {
return v8::ArrayBuffer::Allocator::NewDefaultAllocator();
}
v8::ArrayBuffer::Allocator* v8__ArrayBuffer__Allocator__NewRustAllocator(
void* handle, const RustAllocatorVtable* vtable) {
return new RustAllocator(handle, vtable);
}
void v8__ArrayBuffer__Allocator__DELETE(v8::ArrayBuffer::Allocator* self) {
delete self;
}

View file

@ -4430,3 +4430,88 @@ fn unbound_script_conversion() {
assert_eq!(result.to_rust_string_lossy(scope), "Hello world");
}
}
#[test]
fn run_with_rust_allocator() {
use std::sync::Arc;
unsafe extern "C" fn allocate(count: &AtomicUsize, n: usize) -> *mut c_void {
count.fetch_add(n, Ordering::SeqCst);
Box::into_raw(vec![0u8; n].into_boxed_slice()) as *mut [u8] as *mut c_void
}
unsafe extern "C" fn allocate_uninitialized(
count: &AtomicUsize,
n: usize,
) -> *mut c_void {
count.fetch_add(n, Ordering::SeqCst);
let mut store = Vec::with_capacity(n);
store.set_len(n);
Box::into_raw(store.into_boxed_slice()) as *mut [u8] as *mut c_void
}
unsafe extern "C" fn free(count: &AtomicUsize, data: *mut c_void, n: usize) {
count.fetch_sub(n, Ordering::SeqCst);
Box::from_raw(std::slice::from_raw_parts_mut(data as *mut u8, n));
}
unsafe extern "C" fn reallocate(
count: &AtomicUsize,
prev: *mut c_void,
oldlen: usize,
newlen: usize,
) -> *mut c_void {
count.fetch_add(newlen.wrapping_sub(oldlen), Ordering::SeqCst);
let old_store =
Box::from_raw(std::slice::from_raw_parts_mut(prev as *mut u8, oldlen));
let mut new_store = Vec::with_capacity(newlen);
let copy_len = oldlen.min(newlen);
new_store.extend_from_slice(&old_store[..copy_len]);
new_store.resize(newlen, 0u8);
Box::into_raw(new_store.into_boxed_slice()) as *mut [u8] as *mut c_void
}
unsafe extern "C" fn drop(count: *const AtomicUsize) {
Arc::from_raw(count);
}
let vtable: &'static v8::RustAllocatorVtable<AtomicUsize> =
&v8::RustAllocatorVtable {
allocate,
allocate_uninitialized,
free,
reallocate,
drop,
};
let count = Arc::new(AtomicUsize::new(0));
let _setup_guard = setup();
let create_params =
v8::CreateParams::default().array_buffer_allocator(unsafe {
v8::new_rust_allocator(Arc::into_raw(count.clone()), vtable)
});
let isolate = &mut v8::Isolate::new(create_params);
{
let scope = &mut v8::HandleScope::new(isolate);
let context = v8::Context::new(scope);
let scope = &mut v8::ContextScope::new(scope, context);
let source = v8::String::new(
scope,
r#"
for(let i = 0; i < 10; i++) new ArrayBuffer(1024 * i);
"OK";
"#,
)
.unwrap();
let script = v8::Script::compile(scope, source, None).unwrap();
let result = script.run(scope).unwrap();
assert_eq!(result.to_rust_string_lossy(scope), "OK");
}
let mut stats = v8::HeapStatistics::default();
isolate.get_heap_statistics(&mut stats);
let count_loaded = count.load(Ordering::SeqCst);
assert!(count_loaded > 0);
assert!(count_loaded <= stats.external_memory());
// Force a GC.
isolate.low_memory_notification();
let count_loaded = count.load(Ordering::SeqCst);
assert_eq!(count_loaded, 0);
}