1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-08 07:08:27 -05:00

cleanup(serde_v8): simpler ZeroCopyBuf (#14095)

This commit is contained in:
Aaron O'Mullan 2022-04-02 00:10:42 +02:00 committed by David Sherret
parent 3cc14196e2
commit d3aa444755

View file

@ -1,8 +1,8 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::cell::Cell;
use std::ops::Deref; use std::ops::Deref;
use std::ops::DerefMut; use std::ops::DerefMut;
use std::ops::Range;
use super::transl8::FromV8; use super::transl8::FromV8;
@ -19,9 +19,8 @@ use super::transl8::FromV8;
/// `let copy = Vec::from(&*zero_copy_buf);` /// `let copy = Vec::from(&*zero_copy_buf);`
#[derive(Clone)] #[derive(Clone)]
pub struct ZeroCopyBuf { pub struct ZeroCopyBuf {
backing_store: v8::SharedRef<v8::BackingStore>, store: v8::SharedRef<v8::BackingStore>,
byte_offset: usize, range: Range<usize>,
byte_length: usize,
} }
unsafe impl Send for ZeroCopyBuf {} unsafe impl Send for ZeroCopyBuf {}
@ -29,21 +28,16 @@ unsafe impl Send for ZeroCopyBuf {}
impl ZeroCopyBuf { impl ZeroCopyBuf {
pub fn from_buffer( pub fn from_buffer(
buffer: v8::Local<v8::ArrayBuffer>, buffer: v8::Local<v8::ArrayBuffer>,
byte_offset: usize, range: Range<usize>,
byte_length: usize,
) -> Result<Self, v8::DataError> { ) -> Result<Self, v8::DataError> {
let backing_store = buffer.get_backing_store(); let store = buffer.get_backing_store();
match backing_store.is_shared() { if store.is_shared() {
true => Err(v8::DataError::BadType { return Err(v8::DataError::BadType {
actual: "shared ArrayBufferView", actual: "shared ArrayBufferView",
expected: "non-shared ArrayBufferView", expected: "non-shared ArrayBufferView",
}), });
false => Ok(Self {
backing_store,
byte_offset,
byte_length,
}),
} }
Ok(Self { store, range })
} }
pub fn from_view( pub fn from_view(
@ -53,7 +47,17 @@ impl ZeroCopyBuf {
let buffer = view.buffer(scope).ok_or(v8::DataError::NoData { let buffer = view.buffer(scope).ok_or(v8::DataError::NoData {
expected: "view to have a buffer", expected: "view to have a buffer",
})?; })?;
Self::from_buffer(buffer, view.byte_offset(), view.byte_length()) let (offset, len) = (view.byte_offset(), view.byte_length());
Self::from_buffer(buffer, offset..offset + len)
}
fn as_slice(&self) -> &[u8] {
unsafe { &*(&self.store[self.range.clone()] as *const _ as *const [u8]) }
}
#[allow(clippy::cast_ref_to_mut)]
fn as_slice_mut(&mut self) -> &mut [u8] {
unsafe { &mut *(&self.store[self.range.clone()] as *const _ as *mut [u8]) }
} }
} }
@ -65,7 +69,7 @@ impl FromV8 for ZeroCopyBuf {
if value.is_array_buffer() { if value.is_array_buffer() {
value value
.try_into() .try_into()
.and_then(|b| Self::from_buffer(b, 0, b.byte_length())) .and_then(|b| Self::from_buffer(b, 0..b.byte_length()))
} else { } else {
value value
.try_into() .try_into()
@ -78,59 +82,24 @@ impl FromV8 for ZeroCopyBuf {
impl Deref for ZeroCopyBuf { impl Deref for ZeroCopyBuf {
type Target = [u8]; type Target = [u8];
fn deref(&self) -> &[u8] { fn deref(&self) -> &[u8] {
unsafe { self.as_slice()
get_backing_store_slice(
&self.backing_store,
self.byte_offset,
self.byte_length,
)
}
} }
} }
impl DerefMut for ZeroCopyBuf { impl DerefMut for ZeroCopyBuf {
fn deref_mut(&mut self) -> &mut [u8] { fn deref_mut(&mut self) -> &mut [u8] {
unsafe { self.as_slice_mut()
get_backing_store_slice_mut(
&self.backing_store,
self.byte_offset,
self.byte_length,
)
}
} }
} }
impl AsRef<[u8]> for ZeroCopyBuf { impl AsRef<[u8]> for ZeroCopyBuf {
fn as_ref(&self) -> &[u8] { fn as_ref(&self) -> &[u8] {
&*self self.as_slice()
} }
} }
impl AsMut<[u8]> for ZeroCopyBuf { impl AsMut<[u8]> for ZeroCopyBuf {
fn as_mut(&mut self) -> &mut [u8] { fn as_mut(&mut self) -> &mut [u8] {
&mut *self self.as_slice_mut()
} }
} }
unsafe fn get_backing_store_slice(
backing_store: &v8::SharedRef<v8::BackingStore>,
byte_offset: usize,
byte_length: usize,
) -> &[u8] {
let cells: *const [Cell<u8>] =
&backing_store[byte_offset..byte_offset + byte_length];
let bytes = cells as *const [u8];
&*bytes
}
#[allow(clippy::mut_from_ref)]
unsafe fn get_backing_store_slice_mut(
backing_store: &v8::SharedRef<v8::BackingStore>,
byte_offset: usize,
byte_length: usize,
) -> &mut [u8] {
let cells: *const [Cell<u8>] =
&backing_store[byte_offset..byte_offset + byte_length];
let bytes = cells as *const _ as *mut [u8];
&mut *bytes
}