0
0
Fork 0
mirror of https://github.com/denoland/rusty_v8.git synced 2024-12-26 09:13:46 -05:00

Add safe alternative to get_data/set_data (#360)

This commit is contained in:
Bert Belder 2020-04-23 09:34:28 +02:00 committed by GitHub
parent 92389f3321
commit 0d636de447
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 247 additions and 30 deletions

View file

@ -16,8 +16,10 @@ use crate::String;
use crate::Value; use crate::Value;
use std::any::Any; use std::any::Any;
use std::any::TypeId;
use std::cell::{Ref, RefCell, RefMut};
use std::collections::HashMap;
use std::ffi::c_void; use std::ffi::c_void;
use std::mem::replace;
use std::ops::Deref; use std::ops::Deref;
use std::ops::DerefMut; use std::ops::DerefMut;
use std::ptr::null_mut; use std::ptr::null_mut;
@ -143,7 +145,10 @@ impl Isolate {
crate::V8::assert_initialized(); crate::V8::assert_initialized();
let (raw_create_params, create_param_allocations) = params.finalize(); let (raw_create_params, create_param_allocations) = params.finalize();
let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) }; let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
OwnedIsolate::new(cxx_isolate, create_param_allocations) let mut owned_isolate =
OwnedIsolate::new(cxx_isolate, create_param_allocations);
owned_isolate.create_annex();
owned_isolate
} }
/// Initial configuration parameters for a new Isolate. /// Initial configuration parameters for a new Isolate.
@ -155,12 +160,27 @@ impl Isolate {
IsolateHandle::new(self) IsolateHandle::new(self)
} }
unsafe fn set_annex(&mut self, ptr: *mut IsolateAnnex) { fn create_annex(&mut self) {
v8__Isolate__SetData(self, 0, ptr as *mut c_void) let annex_arc = Arc::new(IsolateAnnex::new(self));
let annex_ptr = Arc::into_raw(annex_arc);
unsafe { v8__Isolate__SetData(self, 0, annex_ptr as *mut c_void) }
} }
fn get_annex(&self) -> *mut IsolateAnnex { fn get_annex(&self) -> &IsolateAnnex {
unsafe { v8__Isolate__GetData(self, 0) as *mut _ } unsafe {
&*(v8__Isolate__GetData(self, 0) as *const _ as *const IsolateAnnex)
}
}
fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
unsafe { &mut *(v8__Isolate__GetData(self, 0) as *mut IsolateAnnex) }
}
fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
let annex_ptr = self.get_annex();
let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
Arc::into_raw(annex_arc.clone());
annex_arc
} }
/// Associate embedder-specific data with the isolate. |slot| has to be /// Associate embedder-specific data with the isolate. |slot| has to be
@ -181,6 +201,57 @@ impl Isolate {
unsafe { v8__Isolate__GetNumberOfDataSlots(self) - 1 } unsafe { v8__Isolate__GetNumberOfDataSlots(self) - 1 }
} }
/// Safe alternative to Isolate::get_data
///
/// Warning: will be renamed to get_data_mut() after original unsafe version
/// is removed.
pub fn get_slot_mut<T: 'static>(&self) -> Option<RefMut<T>> {
let cell = self.get_annex().slots.get(&TypeId::of::<T>())?;
let ref_mut = cell.try_borrow_mut().ok()?;
let ref_mut = RefMut::map(ref_mut, |box_any| {
let mut_any = &mut **box_any;
Any::downcast_mut::<T>(mut_any).unwrap()
});
Some(ref_mut)
}
/// Safe alternative to Isolate::get_data
///
/// Warning: will be renamed to get_data() after original unsafe version is
/// removed.
pub fn get_slot<T: 'static>(&self) -> Option<Ref<T>> {
let cell = self.get_annex().slots.get(&TypeId::of::<T>())?;
let r = cell.try_borrow().ok()?;
Some(Ref::map(r, |box_any| {
let a = &**box_any;
Any::downcast_ref::<T>(a).unwrap()
}))
}
/// Safe alternative to Isolate::set_data
///
/// Use with Isolate::get_slot and Isolate::get_slot_mut to associate state
/// with an Isolate.
///
/// This method gives ownership of value to the Isolate. Exactly one object of
/// each type can be associated with an Isolate. If called more than once with
/// an object of the same type, the earlier version will be dropped and
/// replaced.
///
/// Returns true if value was set without replacing an existing value.
///
/// The value will be dropped when the isolate is dropped.
///
/// Warning: will be renamed to set_data() after original unsafe version is
/// removed.
pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
self
.get_annex_mut()
.slots
.insert(Any::type_id(&value), RefCell::new(Box::new(value)))
.is_none()
}
/// Sets this isolate as the entered one for the current thread. /// Sets this isolate as the entered one for the current thread.
/// Saves the previously entered one (if any), so that it can be /// Saves the previously entered one (if any), so that it can be
/// restored when exiting. Re-entering an isolate is allowed. /// restored when exiting. Re-entering an isolate is allowed.
@ -320,6 +391,7 @@ impl Isolate {
pub(crate) struct IsolateAnnex { pub(crate) struct IsolateAnnex {
isolate: *mut Isolate, isolate: *mut Isolate,
mutex: Mutex<()>, mutex: Mutex<()>,
slots: HashMap<TypeId, RefCell<Box<dyn Any>>>,
} }
impl IsolateAnnex { impl IsolateAnnex {
@ -327,6 +399,7 @@ impl IsolateAnnex {
Self { Self {
isolate, isolate,
mutex: Mutex::new(()), mutex: Mutex::new(()),
slots: HashMap::new(),
} }
} }
} }
@ -346,34 +419,17 @@ impl IsolateHandle {
} }
pub(crate) fn new(isolate: &mut Isolate) -> Self { pub(crate) fn new(isolate: &mut Isolate) -> Self {
let annex_ptr = isolate.get_annex(); Self(isolate.get_annex_arc())
if annex_ptr.is_null() {
let annex_arc = Arc::new(IsolateAnnex::new(isolate));
let annex_ptr = Arc::into_raw(annex_arc.clone());
unsafe {
isolate.set_annex(annex_ptr as *mut IsolateAnnex);
}
IsolateHandle(annex_arc)
} else {
let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
Arc::into_raw(annex_arc.clone());
IsolateHandle(annex_arc)
}
} }
fn dispose_isolate(isolate: &mut Isolate) { fn dispose_isolate(isolate: &mut Isolate) {
let annex_ptr = isolate.get_annex(); let annex = isolate.get_annex_mut();
if !annex_ptr.is_null() {
unsafe {
{ {
let _lock = (*annex_ptr).mutex.lock().unwrap(); let _lock = annex.mutex.lock().unwrap();
isolate.set_data(0, null_mut()); annex.isolate = null_mut();
let isolate_ptr = replace(&mut (*annex_ptr).isolate, null_mut());
assert_eq!(isolate as *mut _, isolate_ptr);
}
Arc::from_raw(annex_ptr);
};
} }
unsafe { Arc::from_raw(annex) };
unsafe { isolate.set_data(0, null_mut()) };
} }
/// Forcefully terminate the current thread of JavaScript execution /// Forcefully terminate the current thread of JavaScript execution

161
tests/slots.rs Normal file
View file

@ -0,0 +1,161 @@
// These tests mock out an organizational pattern that we hope to use in Deno.
// There we want to wrap v8::Isolate to provide extra functionality at multiple
// layers: v8::Isolate -> CoreIsolate -> EsIsolate
// This demonstrates how this can be done in a safe way.
use rusty_v8 as v8;
use std::ops::Deref;
use std::ops::DerefMut;
use std::rc::Rc;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::Once;
struct CoreIsolate(v8::OwnedIsolate);
struct CoreIsolateState {
drop_count: Rc<AtomicUsize>,
i: usize,
}
impl Drop for CoreIsolateState {
fn drop(&mut self) {
self.drop_count.fetch_add(1, Ordering::SeqCst);
}
}
impl CoreIsolate {
fn new(drop_count: Rc<AtomicUsize>) -> CoreIsolate {
static START: Once = Once::new();
START.call_once(|| {
v8::V8::initialize_platform(v8::new_default_platform().unwrap());
v8::V8::initialize();
});
let mut isolate = v8::Isolate::new(Default::default());
let state = CoreIsolateState { drop_count, i: 0 };
isolate.set_slot(state);
CoreIsolate(isolate)
}
// Returns false if there was an error.
fn execute(&mut self, code: &str) -> bool {
let mut hs = v8::HandleScope::new(&mut self.0);
let scope = hs.enter();
let context = v8::Context::new(scope);
let mut cs = v8::ContextScope::new(scope, context);
let scope = cs.enter();
let source = v8::String::new(scope, code).unwrap();
let mut script = v8::Script::compile(scope, context, source, None).unwrap();
let r = script.run(scope, context);
r.is_some()
}
fn get_i(&self) -> usize {
let s = self.0.get_slot::<CoreIsolateState>().unwrap();
s.i
}
fn set_i(&self, i: usize) {
let mut s = self.0.get_slot_mut::<CoreIsolateState>().unwrap();
s.i = i;
}
}
impl Deref for CoreIsolate {
type Target = v8::Isolate;
fn deref(&self) -> &v8::Isolate {
&self.0
}
}
impl DerefMut for CoreIsolate {
fn deref_mut(&mut self) -> &mut v8::Isolate {
&mut self.0
}
}
struct EsIsolate(CoreIsolate);
struct EsIsolateState {
drop_count: Rc<AtomicUsize>,
x: bool,
}
impl Drop for EsIsolateState {
fn drop(&mut self) {
self.drop_count.fetch_add(1, Ordering::SeqCst);
}
}
impl EsIsolate {
fn new(drop_count: Rc<AtomicUsize>) -> Self {
let mut core_isolate = CoreIsolate::new(drop_count.clone());
let state = EsIsolateState {
drop_count,
x: false,
};
core_isolate.set_slot(state);
EsIsolate(core_isolate)
}
fn get_x(&self) -> bool {
let state = self.0.get_slot::<EsIsolateState>().unwrap();
state.x
}
fn set_x(&self, x: bool) {
let mut state = self.0.get_slot_mut::<EsIsolateState>().unwrap();
state.x = x;
}
}
impl Deref for EsIsolate {
type Target = CoreIsolate;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for EsIsolate {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[test]
fn slots_layer1() {
let drop_count = Rc::new(AtomicUsize::new(0));
let mut core_isolate = CoreIsolate::new(drop_count.clone());
assert!(core_isolate.execute("1 + 1"));
assert!(!core_isolate.execute("throw 'foo'"));
assert_eq!(0, core_isolate.get_i());
core_isolate.set_i(123);
assert_eq!(123, core_isolate.get_i());
assert_eq!(drop_count.load(Ordering::SeqCst), 0);
// Check that we can deref CoreIsolate by running a random v8::Isolate method
core_isolate.run_microtasks();
drop(core_isolate);
assert_eq!(drop_count.load(Ordering::SeqCst), 1);
}
#[test]
fn slots_layer2() {
let drop_count = Rc::new(AtomicUsize::new(0));
let mut es_isolate = EsIsolate::new(drop_count.clone());
// We can deref to CoreIsolate and use execute...
assert!(es_isolate.execute("1 + 1"));
assert!(!es_isolate.execute("throw 'bar'"));
// We can use get_x set_x
assert!(!es_isolate.get_x());
es_isolate.set_x(true);
assert!(es_isolate.get_x());
// Check that we can deref all the way to a v8::Isolate method
es_isolate.run_microtasks();
// When we drop, both CoreIsolateState and EsIsolateState should be dropped.
assert_eq!(drop_count.load(Ordering::SeqCst), 0);
drop(es_isolate);
assert_eq!(drop_count.load(Ordering::SeqCst), 2);
}