0
0
Fork 0
mirror of https://github.com/denoland/rusty_v8.git synced 2024-12-25 00:29:14 -05:00

Add IsolateHandle (#274)

Co-authored-by: Bert Belder <bertbelder@gmail.com>
This commit is contained in:
Ryan Dahl 2020-02-12 11:33:58 -05:00 committed by GitHub
parent 32abe84dc6
commit 27277ad801
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 237 additions and 54 deletions

View file

@ -3,6 +3,7 @@ use std::ptr::NonNull;
use crate::InIsolate; use crate::InIsolate;
use crate::Isolate; use crate::Isolate;
use crate::IsolateHandle;
use crate::Local; use crate::Local;
use crate::ToLocal; use crate::ToLocal;
use crate::Value; use crate::Value;
@ -34,17 +35,15 @@ extern "C" {
#[repr(C)] #[repr(C)]
pub struct Global<T> { pub struct Global<T> {
value: Option<NonNull<T>>, value: Option<NonNull<T>>,
isolate: Option<NonNull<Isolate>>, isolate_handle: Option<IsolateHandle>,
} }
unsafe impl<T> Send for Global<T> {}
impl<T> Global<T> { impl<T> Global<T> {
/// Construct a Global with no storage cell. /// Construct a Global with no storage cell.
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
value: None, value: None,
isolate: None, isolate_handle: None,
} }
} }
@ -60,7 +59,7 @@ impl<T> Global<T> {
Self { Self {
value: other_value value: other_value
.map(|v| unsafe { transmute(v8__Global__New(isolate, transmute(v))) }), .map(|v| unsafe { transmute(v8__Global__New(isolate, transmute(v))) }),
isolate: other_value.map(|_| isolate.into()), isolate_handle: other_value.map(|_| IsolateHandle::new(isolate)),
} }
} }
@ -105,7 +104,7 @@ impl<T> Global<T> {
) )
}, },
} }
self.isolate = other_value.map(|_| isolate.into()); self.isolate_handle = other_value.map(|_| IsolateHandle::new(isolate));
} }
/// If non-empty, destroy the underlying storage cell /// If non-empty, destroy the underlying storage cell
@ -114,10 +113,13 @@ impl<T> Global<T> {
self.set(scope, None); self.set(scope, None);
} }
fn check_isolate(&self, other: &Isolate) { fn check_isolate(&self, isolate: &mut Isolate) {
match self.value { match self.value {
None => assert_eq!(self.isolate, None), None => assert!(self.isolate_handle.is_none()),
Some(_) => assert_eq!(self.isolate.unwrap(), other.into()), Some(_) => assert_eq!(
unsafe { self.isolate_handle.as_ref().unwrap().get_isolate_ptr() },
isolate as *mut _
),
} }
} }
} }
@ -130,30 +132,52 @@ impl<T> Default for Global<T> {
impl<T> Drop for Global<T> { impl<T> Drop for Global<T> {
fn drop(&mut self) { fn drop(&mut self) {
if !self.is_empty() { match &mut self.value {
panic!("Global handle dropped while holding a value"); None => {
// This global handle is empty.
assert!(self.isolate_handle.is_none())
}
Some(_)
if unsafe {
self
.isolate_handle
.as_ref()
.unwrap()
.get_isolate_ptr()
.is_null()
} =>
{
// This global handle is associated with an Isolate that has already
// been disposed.
}
addr @ Some(_) => unsafe {
// Destroy the storage cell that contains the contents of this Global.
v8__Global__Reset__0(
&mut *(addr as *mut Option<NonNull<T>> as *mut *mut Value),
)
},
} }
} }
} }
pub trait AnyHandle<T> { pub trait AnyHandle<T> {
fn read(self, isolate: &Isolate) -> Option<NonNull<T>>; fn read(self, isolate: &mut Isolate) -> Option<NonNull<T>>;
} }
impl<'sc, T> AnyHandle<T> for Local<'sc, T> { impl<'sc, T> AnyHandle<T> for Local<'sc, T> {
fn read(self, _isolate: &Isolate) -> Option<NonNull<T>> { fn read(self, _isolate: &mut Isolate) -> Option<NonNull<T>> {
Some(self.as_non_null()) Some(self.as_non_null())
} }
} }
impl<'sc, T> AnyHandle<T> for Option<Local<'sc, T>> { impl<'sc, T> AnyHandle<T> for Option<Local<'sc, T>> {
fn read(self, _isolate: &Isolate) -> Option<NonNull<T>> { fn read(self, _isolate: &mut Isolate) -> Option<NonNull<T>> {
self.map(|local| local.as_non_null()) self.map(|local| local.as_non_null())
} }
} }
impl<'sc, T> AnyHandle<T> for &Global<T> { impl<'sc, T> AnyHandle<T> for &Global<T> {
fn read(self, isolate: &Isolate) -> Option<NonNull<T>> { fn read(self, isolate: &mut Isolate) -> Option<NonNull<T>> {
self.check_isolate(isolate); self.check_isolate(isolate);
self.value self.value
} }

View file

@ -18,10 +18,15 @@ use crate::ScriptOrModule;
use crate::StartupData; use crate::StartupData;
use crate::String; use crate::String;
use crate::Value; use crate::Value;
use std::ffi::c_void; use std::ffi::c_void;
use std::mem::replace;
use std::ops::Deref; use std::ops::Deref;
use std::ops::DerefMut; use std::ops::DerefMut;
use std::ptr::null_mut;
use std::ptr::NonNull; use std::ptr::NonNull;
use std::sync::Arc;
use std::sync::Mutex;
pub type MessageCallback = extern "C" fn(Local<Message>, Local<Value>); pub type MessageCallback = extern "C" fn(Local<Message>, Local<Value>);
@ -161,22 +166,26 @@ impl Isolate {
CreateParams::new() CreateParams::new()
} }
pub fn thread_safe_handle(&mut self) -> IsolateHandle {
IsolateHandle::new(self)
}
/// Associate embedder-specific data with the isolate. |slot| has to be /// Associate embedder-specific data with the isolate. |slot| has to be
/// between 0 and GetNumberOfDataSlots() - 1. /// between 0 and GetNumberOfDataSlots() - 1.
pub unsafe fn set_data(&mut self, slot: u32, ptr: *mut c_void) { pub unsafe fn set_data(&mut self, slot: u32, ptr: *mut c_void) {
v8__Isolate__SetData(self, slot, ptr) v8__Isolate__SetData(self, slot + 1, ptr)
} }
/// Retrieve embedder-specific data from the isolate. /// Retrieve embedder-specific data from the isolate.
/// Returns NULL if SetData has never been called for the given |slot|. /// Returns NULL if SetData has never been called for the given |slot|.
pub fn get_data(&self, slot: u32) -> *mut c_void { pub fn get_data(&self, slot: u32) -> *mut c_void {
unsafe { v8__Isolate__GetData(self, slot) } unsafe { v8__Isolate__GetData(self, slot + 1) }
} }
/// Returns the maximum number of available embedder data slots. Valid slots /// Returns the maximum number of available embedder data slots. Valid slots
/// are in the range of 0 - GetNumberOfDataSlots() - 1. /// are in the range of 0 - GetNumberOfDataSlots() - 1.
pub fn get_number_of_data_slots(&self) -> u32 { pub fn get_number_of_data_slots(&self) -> u32 {
unsafe { v8__Isolate__GetNumberOfDataSlots(self) } unsafe { v8__Isolate__GetNumberOfDataSlots(self) - 1 }
} }
/// Sets this isolate as the entered one for the current thread. /// Sets this isolate as the entered one for the current thread.
@ -283,23 +292,97 @@ impl Isolate {
} }
} }
/// Runs the default MicrotaskQueue until it gets empty.
/// Any exceptions thrown by microtask callbacks are swallowed.
pub fn run_microtasks(&mut self) {
unsafe { v8__Isolate__RunMicrotasks(self) }
}
/// Enqueues the callback to the default MicrotaskQueue
pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
unsafe { v8__Isolate__EnqueueMicrotask(self, microtask) }
}
/// Disposes the isolate. The isolate must not be entered by any
/// thread to be disposable.
unsafe fn dispose(&mut self) {
IsolateHandle::dispose(self);
v8__Isolate__Dispose(self)
}
}
pub(crate) struct IsolateAnnex {
isolate: *mut Isolate,
mutex: Mutex<()>,
}
impl IsolateAnnex {
fn new(isolate: &mut Isolate) -> Self {
Self {
isolate,
mutex: Mutex::new(()),
}
}
}
#[derive(Clone)]
pub struct IsolateHandle(Arc<IsolateAnnex>);
unsafe impl Send for IsolateHandle {}
unsafe impl Sync for IsolateHandle {}
impl IsolateHandle {
// This function is marked unsafe because it must be called only with either
// IsolateAnnex::mutex locked, or from the main thread associated with the V8
// isolate.
pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut Isolate {
self.0.isolate
}
fn dispose(isolate: &mut Isolate) {
let annex_ptr = isolate.get_data(0) as *mut IsolateAnnex;
if !annex_ptr.is_null() {
unsafe {
isolate.set_data(0, null_mut());
let _lock = (*annex_ptr).mutex.lock().unwrap();
let isolate_ptr = replace(&mut (*annex_ptr).isolate, null_mut());
assert_eq!(isolate as *mut _, isolate_ptr);
Arc::from_raw(annex_ptr);
};
}
}
pub(crate) fn new(isolate: &mut Isolate) -> Self {
let annex_ptr = isolate.get_data(0) as *mut IsolateAnnex;
if annex_ptr.is_null() {
let annex_arc = Arc::new(IsolateAnnex::new(isolate));
let annex_ptr = Arc::into_raw(annex_arc.clone());
unsafe {
isolate.set_data(0, annex_ptr as *mut c_void);
}
IsolateHandle(annex_arc)
} else {
let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
Arc::into_raw(annex_arc.clone());
IsolateHandle(annex_arc)
}
}
/// Forcefully terminate the current thread of JavaScript execution /// Forcefully terminate the current thread of JavaScript execution
/// in the given isolate. /// in the given isolate.
/// ///
/// This method can be used by any thread even if that thread has not /// This method can be used by any thread even if that thread has not
/// acquired the V8 lock with a Locker object. /// acquired the V8 lock with a Locker object.
pub fn terminate_execution(&self) {
unsafe { v8__Isolate__TerminateExecution(self) }
}
/// Is V8 terminating JavaScript execution.
/// ///
/// Returns true if JavaScript execution is currently terminating /// Returns false if Isolate was already destroyed.
/// because of a call to TerminateExecution. In that case there are pub fn terminate_execution(&self) -> bool {
/// still JavaScript frames on the stack and the termination let _lock = self.0.mutex.lock().unwrap();
/// exception is still active. if self.0.isolate.is_null() {
pub fn is_execution_terminating(&self) -> bool { false
unsafe { v8__Isolate__IsExecutionTerminating(self) } } else {
unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
true
}
} }
/// Resume execution capability in the given isolate, whose execution /// Resume execution capability in the given isolate, whose execution
@ -314,19 +397,33 @@ impl Isolate {
/// ///
/// This method can be used by any thread even if that thread has not /// This method can be used by any thread even if that thread has not
/// acquired the V8 lock with a Locker object. /// acquired the V8 lock with a Locker object.
pub fn cancel_terminate_execution(&self) { ///
unsafe { v8__Isolate__CancelTerminateExecution(self) } /// Returns false if Isolate was already destroyed.
pub fn cancel_terminate_execution(&self) -> bool {
let _lock = self.0.mutex.lock().unwrap();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
true
}
} }
/// Runs the default MicrotaskQueue until it gets empty. /// Is V8 terminating JavaScript execution.
/// Any exceptions thrown by microtask callbacks are swallowed. ///
pub fn run_microtasks(&mut self) { /// Returns true if JavaScript execution is currently terminating
unsafe { v8__Isolate__RunMicrotasks(self) } /// because of a call to TerminateExecution. In that case there are
} /// still JavaScript frames on the stack and the termination
/// exception is still active.
/// Enqueues the callback to the default MicrotaskQueue ///
pub fn enqueue_microtask(&mut self, microtask: Local<Function>) { /// Returns false if Isolate was already destroyed.
unsafe { v8__Isolate__EnqueueMicrotask(self, microtask) } pub fn is_execution_terminating(&self) -> bool {
let _lock = self.0.mutex.lock().unwrap();
if self.0.isolate.is_null() {
false
} else {
unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
}
} }
/// Request V8 to interrupt long running JavaScript code and invoke /// Request V8 to interrupt long running JavaScript code and invoke
@ -335,6 +432,8 @@ impl Isolate {
/// There may be a number of interrupt requests in flight. /// There may be a number of interrupt requests in flight.
/// Can be called from another thread without acquiring a |Locker|. /// Can be called from another thread without acquiring a |Locker|.
/// Registered |callback| must not reenter interrupted Isolate. /// Registered |callback| must not reenter interrupted Isolate.
///
/// Returns false if Isolate was already destroyed.
// Clippy warns that this method is dereferencing a raw pointer, but it is // Clippy warns that this method is dereferencing a raw pointer, but it is
// not: https://github.com/rust-lang/rust-clippy/issues/3045 // not: https://github.com/rust-lang/rust-clippy/issues/3045
#[allow(clippy::not_unsafe_ptr_arg_deref)] #[allow(clippy::not_unsafe_ptr_arg_deref)]
@ -342,19 +441,21 @@ impl Isolate {
&self, &self,
callback: InterruptCallback, callback: InterruptCallback,
data: *mut c_void, data: *mut c_void,
) { ) -> bool {
unsafe { v8__Isolate__RequestInterrupt(self, callback, data) } let _lock = self.0.mutex.lock().unwrap();
} if self.0.isolate.is_null() {
false
/// Disposes the isolate. The isolate must not be entered by any } else {
/// thread to be disposable. unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
pub unsafe fn dispose(&mut self) { true
v8__Isolate__Dispose(self) }
} }
} }
/// Internal method for constructing an OwnedIsolate. /// Internal method for constructing an OwnedIsolate.
pub unsafe fn new_owned_isolate(isolate_ptr: *mut Isolate) -> OwnedIsolate { pub(crate) unsafe fn new_owned_isolate(
isolate_ptr: *mut Isolate,
) -> OwnedIsolate {
OwnedIsolate(NonNull::new(isolate_ptr).unwrap()) OwnedIsolate(NonNull::new(isolate_ptr).unwrap())
} }

View file

@ -132,6 +132,7 @@ pub use isolate::CreateParams;
pub use isolate::HostImportModuleDynamicallyCallback; pub use isolate::HostImportModuleDynamicallyCallback;
pub use isolate::HostInitializeImportMetaObjectCallback; pub use isolate::HostInitializeImportMetaObjectCallback;
pub use isolate::Isolate; pub use isolate::Isolate;
pub use isolate::IsolateHandle;
pub use isolate::MessageCallback; pub use isolate::MessageCallback;
pub use isolate::OwnedIsolate; pub use isolate::OwnedIsolate;
pub use isolate::PromiseRejectCallback; pub use isolate::PromiseRejectCallback;

View file

@ -126,6 +126,28 @@ fn global_handles() {
assert!(g5.is_empty()); assert!(g5.is_empty());
} }
#[test]
fn global_handle_drop() {
let _setup_guard = setup();
// Global 'g1' will be dropped _after_ the Isolate has been disposed.
let mut g1 = v8::Global::<v8::String>::new();
let mut params = v8::Isolate::create_params();
params.set_array_buffer_allocator(v8::new_default_allocator());
let mut isolate = v8::Isolate::new(params);
let mut hs = v8::HandleScope::new(&mut isolate);
let scope = hs.enter();
let l1 = v8::String::new(scope, "foo").unwrap();
g1.set(scope, l1);
// Global 'g2' will be dropped _before_ the Isolate has been disposed.
let l2 = v8::String::new(scope, "bar").unwrap();
let _g2 = v8::Global::new_from(scope, l2);
}
#[test] #[test]
fn test_string() { fn test_string() {
let _setup_guard = setup(); let _setup_guard = setup();
@ -451,12 +473,47 @@ fn throw_exception() {
} }
} }
#[test]
fn thread_safe_handle_drop_after_isolate() {
let _setup_guard = setup();
let mut params = v8::Isolate::create_params();
params.set_array_buffer_allocator(v8::new_default_allocator());
let mut isolate = v8::Isolate::new(params);
let handle = isolate.thread_safe_handle();
// We can call it twice.
let handle_ = isolate.thread_safe_handle();
// Check that handle is Send and Sync.
fn f<S: Send + Sync>(_: S) {}
f(handle_);
// All methods on IsolateHandle should return false after the isolate is
// dropped.
drop(isolate);
assert_eq!(false, handle.terminate_execution());
assert_eq!(false, handle.cancel_terminate_execution());
assert_eq!(false, handle.is_execution_terminating());
static CALL_COUNT: AtomicUsize = AtomicUsize::new(0);
extern "C" fn callback(
_isolate: &mut v8::Isolate,
data: *mut std::ffi::c_void,
) {
assert_eq!(data, std::ptr::null_mut());
CALL_COUNT.fetch_add(1, Ordering::SeqCst);
}
assert_eq!(
false,
handle.request_interrupt(callback, std::ptr::null_mut())
);
assert_eq!(CALL_COUNT.load(Ordering::SeqCst), 0);
}
// TODO(ry) This test should use threads
#[test] #[test]
fn terminate_execution() { fn terminate_execution() {
let _setup_guard = setup(); let _setup_guard = setup();
let mut params = v8::Isolate::create_params(); let mut params = v8::Isolate::create_params();
params.set_array_buffer_allocator(v8::new_default_allocator()); params.set_array_buffer_allocator(v8::new_default_allocator());
let mut isolate = v8::Isolate::new(params); let mut isolate = v8::Isolate::new(params);
let handle = isolate.thread_safe_handle();
// Originally run fine. // Originally run fine.
{ {
let mut hs = v8::HandleScope::new(&mut isolate); let mut hs = v8::HandleScope::new(&mut isolate);
@ -469,7 +526,7 @@ fn terminate_execution() {
assert!(result.same_value(true_val)); assert!(result.same_value(true_val));
} }
// Terminate. // Terminate.
isolate.terminate_execution(); handle.terminate_execution();
// Below run should fail with terminated knowledge. // Below run should fail with terminated knowledge.
{ {
let mut hs = v8::HandleScope::new(&mut isolate); let mut hs = v8::HandleScope::new(&mut isolate);
@ -484,7 +541,7 @@ fn terminate_execution() {
assert!(tc.has_terminated()); assert!(tc.has_terminated());
} }
// Cancel termination. // Cancel termination.
isolate.cancel_terminate_execution(); handle.cancel_terminate_execution();
// Works again. // Works again.
{ {
let mut hs = v8::HandleScope::new(&mut isolate); let mut hs = v8::HandleScope::new(&mut isolate);
@ -498,12 +555,14 @@ fn terminate_execution() {
} }
} }
// TODO(ry) This test should use threads
#[test] #[test]
fn request_interrupt_small_scripts() { fn request_interrupt_small_scripts() {
let _setup_guard = setup(); let _setup_guard = setup();
let mut params = v8::Isolate::create_params(); let mut params = v8::Isolate::create_params();
params.set_array_buffer_allocator(v8::new_default_allocator()); params.set_array_buffer_allocator(v8::new_default_allocator());
let mut isolate = v8::Isolate::new(params); let mut isolate = v8::Isolate::new(params);
let handle = isolate.thread_safe_handle();
{ {
let mut hs = v8::HandleScope::new(&mut isolate); let mut hs = v8::HandleScope::new(&mut isolate);
let scope = hs.enter(); let scope = hs.enter();
@ -519,9 +578,7 @@ fn request_interrupt_small_scripts() {
assert_eq!(data, std::ptr::null_mut()); assert_eq!(data, std::ptr::null_mut());
CALL_COUNT.fetch_add(1, Ordering::SeqCst); CALL_COUNT.fetch_add(1, Ordering::SeqCst);
} }
scope handle.request_interrupt(callback, std::ptr::null_mut());
.isolate()
.request_interrupt(callback, std::ptr::null_mut());
eval(scope, context, "(function(x){return x;})(1);"); eval(scope, context, "(function(x){return x;})(1);");
assert_eq!(CALL_COUNT.load(Ordering::SeqCst), 1); assert_eq!(CALL_COUNT.load(Ordering::SeqCst), 1);
} }