2023-01-02 16:00:42 -05:00
|
|
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
2021-10-19 18:00:45 -04:00
|
|
|
|
|
|
|
use std::ops::Deref;
|
|
|
|
use std::ops::DerefMut;
|
2022-04-01 18:10:42 -04:00
|
|
|
use std::ops::Range;
|
2022-05-13 06:53:13 -04:00
|
|
|
use std::rc::Rc;
|
2021-10-19 18:00:45 -04:00
|
|
|
|
2022-05-13 06:53:13 -04:00
|
|
|
use super::rawbytes;
|
2022-03-23 16:15:01 -04:00
|
|
|
use super::transl8::FromV8;
|
|
|
|
|
2022-04-25 10:56:47 -04:00
|
|
|
/// A V8Slice encapsulates a slice that's been borrowed from a JavaScript
|
2021-10-19 18:00:45 -04:00
|
|
|
/// ArrayBuffer object. JavaScript objects can normally be garbage collected,
|
2022-04-25 10:56:47 -04:00
|
|
|
/// but the existence of a V8Slice inhibits this until it is dropped. It
|
2021-10-19 18:00:45 -04:00
|
|
|
/// behaves much like an Arc<[u8]>.
|
|
|
|
///
|
|
|
|
/// # Cloning
|
2022-04-25 10:56:47 -04:00
|
|
|
/// Cloning a V8Slice does not clone the contents of the buffer,
|
2021-10-19 18:00:45 -04:00
|
|
|
/// it creates a new reference to that buffer.
|
|
|
|
///
|
|
|
|
/// To actually clone the contents of the buffer do
|
|
|
|
/// `let copy = Vec::from(&*zero_copy_buf);`
|
|
|
|
#[derive(Clone)]
|
2022-04-25 10:56:47 -04:00
|
|
|
pub struct V8Slice {
|
2022-04-02 07:35:57 -04:00
|
|
|
pub(crate) store: v8::SharedRef<v8::BackingStore>,
|
|
|
|
pub(crate) range: Range<usize>,
|
2021-10-19 18:00:45 -04:00
|
|
|
}
|
|
|
|
|
2022-08-21 13:31:14 -04:00
|
|
|
// SAFETY: unsafe trait must have unsafe implementation
|
2022-04-25 10:56:47 -04:00
|
|
|
unsafe impl Send for V8Slice {}
|
2021-10-19 18:00:45 -04:00
|
|
|
|
2022-04-25 10:56:47 -04:00
|
|
|
impl V8Slice {
|
2022-01-20 09:11:09 -05:00
|
|
|
pub fn from_buffer(
|
|
|
|
buffer: v8::Local<v8::ArrayBuffer>,
|
2022-04-01 18:10:42 -04:00
|
|
|
range: Range<usize>,
|
2021-10-19 18:00:45 -04:00
|
|
|
) -> Result<Self, v8::DataError> {
|
2022-04-01 18:10:42 -04:00
|
|
|
let store = buffer.get_backing_store();
|
|
|
|
if store.is_shared() {
|
|
|
|
return Err(v8::DataError::BadType {
|
2021-10-19 18:00:45 -04:00
|
|
|
actual: "shared ArrayBufferView",
|
|
|
|
expected: "non-shared ArrayBufferView",
|
2022-04-01 18:10:42 -04:00
|
|
|
});
|
2021-10-19 18:00:45 -04:00
|
|
|
}
|
2022-04-01 18:10:42 -04:00
|
|
|
Ok(Self { store, range })
|
2022-01-20 09:11:09 -05:00
|
|
|
}
|
|
|
|
|
2022-04-01 18:10:42 -04:00
|
|
|
fn as_slice(&self) -> &[u8] {
|
2022-05-05 08:16:39 -04:00
|
|
|
// SAFETY: v8::SharedRef<v8::BackingStore> is similar to Arc<[u8]>,
|
|
|
|
// it points to a fixed continuous slice of bytes on the heap.
|
|
|
|
// We assume it's initialized and thus safe to read (though may not contain meaningful data)
|
2022-04-01 18:10:42 -04:00
|
|
|
unsafe { &*(&self.store[self.range.clone()] as *const _ as *const [u8]) }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn as_slice_mut(&mut self) -> &mut [u8] {
|
2022-07-01 09:28:06 -04:00
|
|
|
#[allow(clippy::cast_ref_to_mut)]
|
2022-05-05 08:16:39 -04:00
|
|
|
// SAFETY: v8::SharedRef<v8::BackingStore> is similar to Arc<[u8]>,
|
|
|
|
// it points to a fixed continuous slice of bytes on the heap.
|
|
|
|
// It's safe-ish to mutate concurrently because it can not be
|
|
|
|
// shrunk/grown/moved/reallocated, thus avoiding dangling refs (unlike a Vec).
|
|
|
|
// Concurrent writes can't lead to meaningful structural invalidation
|
|
|
|
// since we treat them as opaque buffers / "bags of bytes",
|
|
|
|
// concurrent mutation is simply an accepted fact of life.
|
|
|
|
// And in practice V8Slices also do not have overallping read/write phases.
|
|
|
|
// TLDR: permissive interior mutability on slices of bytes is "fine"
|
|
|
|
unsafe {
|
|
|
|
&mut *(&self.store[self.range.clone()] as *const _ as *mut [u8])
|
|
|
|
}
|
2022-01-20 09:11:09 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-02 07:35:57 -04:00
|
|
|
pub(crate) fn to_ranged_buffer<'s>(
|
|
|
|
scope: &mut v8::HandleScope<'s>,
|
|
|
|
value: v8::Local<v8::Value>,
|
|
|
|
) -> Result<(v8::Local<'s, v8::ArrayBuffer>, Range<usize>), v8::DataError> {
|
2022-07-03 21:30:42 -04:00
|
|
|
if let Ok(view) = v8::Local::<v8::ArrayBufferView>::try_from(value) {
|
2022-04-02 07:35:57 -04:00
|
|
|
let (offset, len) = (view.byte_offset(), view.byte_length());
|
|
|
|
let buffer = view.buffer(scope).ok_or(v8::DataError::NoData {
|
|
|
|
expected: "view to have a buffer",
|
|
|
|
})?;
|
|
|
|
let buffer = v8::Local::new(scope, buffer); // recreate handle to avoid lifetime issues
|
|
|
|
return Ok((buffer, offset..offset + len));
|
|
|
|
}
|
|
|
|
let b: v8::Local<v8::ArrayBuffer> = value.try_into()?;
|
|
|
|
let b = v8::Local::new(scope, b); // recreate handle to avoid lifetime issues
|
|
|
|
Ok((b, 0..b.byte_length()))
|
|
|
|
}
|
|
|
|
|
2022-04-25 10:56:47 -04:00
|
|
|
impl FromV8 for V8Slice {
|
2022-03-23 16:15:01 -04:00
|
|
|
fn from_v8(
|
|
|
|
scope: &mut v8::HandleScope,
|
|
|
|
value: v8::Local<v8::Value>,
|
|
|
|
) -> Result<Self, crate::Error> {
|
2022-04-02 07:35:57 -04:00
|
|
|
to_ranged_buffer(scope, value)
|
|
|
|
.and_then(|(b, r)| Self::from_buffer(b, r))
|
|
|
|
.map_err(|_| crate::Error::ExpectedBuffer)
|
2021-10-19 18:00:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-25 10:56:47 -04:00
|
|
|
impl Deref for V8Slice {
|
2021-10-19 18:00:45 -04:00
|
|
|
type Target = [u8];
|
|
|
|
fn deref(&self) -> &[u8] {
|
2022-04-01 18:10:42 -04:00
|
|
|
self.as_slice()
|
2021-10-19 18:00:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-25 10:56:47 -04:00
|
|
|
impl DerefMut for V8Slice {
|
2021-10-19 18:00:45 -04:00
|
|
|
fn deref_mut(&mut self) -> &mut [u8] {
|
2022-04-01 18:10:42 -04:00
|
|
|
self.as_slice_mut()
|
2021-10-19 18:00:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-25 10:56:47 -04:00
|
|
|
impl AsRef<[u8]> for V8Slice {
|
2021-10-19 18:00:45 -04:00
|
|
|
fn as_ref(&self) -> &[u8] {
|
2022-04-01 18:10:42 -04:00
|
|
|
self.as_slice()
|
2021-10-19 18:00:45 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-25 10:56:47 -04:00
|
|
|
impl AsMut<[u8]> for V8Slice {
|
2021-10-19 18:00:45 -04:00
|
|
|
fn as_mut(&mut self) -> &mut [u8] {
|
2022-04-01 18:10:42 -04:00
|
|
|
self.as_slice_mut()
|
2021-10-19 18:00:45 -04:00
|
|
|
}
|
|
|
|
}
|
2022-05-13 06:53:13 -04:00
|
|
|
|
|
|
|
// Implement V8Slice -> bytes::Bytes
|
|
|
|
impl V8Slice {
|
|
|
|
fn rc_into_byte_parts(self: Rc<Self>) -> (*const u8, usize, *mut V8Slice) {
|
|
|
|
let (ptr, len) = {
|
|
|
|
let slice = self.as_ref();
|
|
|
|
(slice.as_ptr(), slice.len())
|
|
|
|
};
|
|
|
|
let rc_raw = Rc::into_raw(self);
|
|
|
|
let data = rc_raw as *mut V8Slice;
|
|
|
|
(ptr, len, data)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<V8Slice> for bytes::Bytes {
|
|
|
|
fn from(v8slice: V8Slice) -> Self {
|
|
|
|
let (ptr, len, data) = Rc::new(v8slice).rc_into_byte_parts();
|
|
|
|
rawbytes::RawBytes::new_raw(ptr, len, data.cast(), &V8SLICE_VTABLE)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// NOTE: in the limit we could avoid extra-indirection and use the C++ shared_ptr
|
|
|
|
// but we can't store both the underlying data ptr & ctrl ptr ... so instead we
|
|
|
|
// use a shared rust ptr (Rc/Arc) that itself controls the C++ shared_ptr
|
|
|
|
const V8SLICE_VTABLE: rawbytes::Vtable = rawbytes::Vtable {
|
|
|
|
clone: v8slice_clone,
|
|
|
|
drop: v8slice_drop,
|
2022-09-01 06:31:48 -04:00
|
|
|
to_vec: v8slice_to_vec,
|
2022-05-13 06:53:13 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
unsafe fn v8slice_clone(
|
|
|
|
data: &rawbytes::AtomicPtr<()>,
|
|
|
|
ptr: *const u8,
|
|
|
|
len: usize,
|
|
|
|
) -> bytes::Bytes {
|
|
|
|
let rc = Rc::from_raw(*data as *const V8Slice);
|
|
|
|
let (_, _, data) = rc.clone().rc_into_byte_parts();
|
|
|
|
std::mem::forget(rc);
|
|
|
|
// NOTE: `bytes::Bytes` does bounds checking so we trust its ptr, len inputs
|
|
|
|
// and must use them to allow cloning Bytes it has sliced
|
|
|
|
rawbytes::RawBytes::new_raw(ptr, len, data.cast(), &V8SLICE_VTABLE)
|
|
|
|
}
|
|
|
|
|
2022-09-01 06:31:48 -04:00
|
|
|
unsafe fn v8slice_to_vec(
|
|
|
|
data: &rawbytes::AtomicPtr<()>,
|
|
|
|
ptr: *const u8,
|
|
|
|
len: usize,
|
|
|
|
) -> Vec<u8> {
|
|
|
|
let rc = Rc::from_raw(*data as *const V8Slice);
|
|
|
|
std::mem::forget(rc);
|
|
|
|
// NOTE: `bytes::Bytes` does bounds checking so we trust its ptr, len inputs
|
|
|
|
// and must use them to allow cloning Bytes it has sliced
|
|
|
|
Vec::from_raw_parts(ptr as _, len, len)
|
|
|
|
}
|
|
|
|
|
2022-05-13 06:53:13 -04:00
|
|
|
unsafe fn v8slice_drop(
|
|
|
|
data: &mut rawbytes::AtomicPtr<()>,
|
|
|
|
_: *const u8,
|
|
|
|
_: usize,
|
|
|
|
) {
|
|
|
|
drop(Rc::from_raw(*data as *const V8Slice))
|
|
|
|
}
|