use std::any::Any; use std::borrow::Borrow; use std::borrow::BorrowMut; use std::convert::identity; use std::convert::AsMut; use std::convert::AsRef; use std::marker::PhantomData; use std::mem::align_of; use std::mem::forget; use std::mem::needs_drop; use std::mem::size_of; use std::mem::take; use std::mem::transmute_copy; use std::ops::Deref; use std::ops::DerefMut; use std::ptr::drop_in_place; use std::ptr::null_mut; use std::ptr::NonNull; use std::rc::Rc; use std::sync::Arc; // TODO use libc::intptr_t when stable. // https://doc.rust-lang.org/1.7.0/libc/type.intptr_t.html #[allow(non_camel_case_types)] pub type intptr_t = isize; pub use std::os::raw::c_char as char; pub use std::os::raw::c_int as int; pub use std::os::raw::c_long as long; pub type Opaque = [u8; 0]; /// Pointer to object allocated on the C++ heap. The pointer may be null. #[repr(transparent)] pub struct UniquePtr(Option>); impl UniquePtr { pub fn is_null(&self) -> bool { self.0.is_none() } pub fn as_ref(&self) -> Option<&UniqueRef> { self.0.as_ref() } pub fn as_mut(&mut self) -> Option<&mut UniqueRef> { self.0.as_mut() } pub fn take(&mut self) -> Option> { take(&mut self.0) } pub fn unwrap(self) -> UniqueRef { self.0.unwrap() } } impl UniquePtr { pub unsafe fn from_raw(ptr: *mut T) -> Self { assert_unique_ptr_layout_compatible::(); Self(UniqueRef::try_from_raw(ptr)) } pub fn into_raw(self) -> *mut T { self .0 .map(|unique_ref| unique_ref.into_raw()) .unwrap_or_else(null_mut) } } impl UniquePtr { pub fn make_shared(self) -> SharedPtr { self.into() } } impl Default for UniquePtr { fn default() -> Self { assert_unique_ptr_layout_compatible::(); Self(None) } } impl From> for UniquePtr { fn from(unique_ref: UniqueRef) -> Self { assert_unique_ptr_layout_compatible::(); Self(Some(unique_ref)) } } /// Pointer to object allocated on the C++ heap. The pointer may not be null. #[repr(transparent)] pub struct UniqueRef(NonNull); impl UniqueRef { unsafe fn try_from_raw(ptr: *mut T) -> Option { assert_unique_ptr_layout_compatible::(); NonNull::new(ptr).map(Self) } pub unsafe fn from_raw(ptr: *mut T) -> Self { assert_unique_ptr_layout_compatible::(); Self::try_from_raw(ptr).unwrap() } pub fn into_raw(self) -> *mut T { let ptr = self.0.as_ptr(); forget(self); ptr } } impl UniqueRef { pub fn make_shared(self) -> SharedRef { self.into() } } impl Drop for UniqueRef { fn drop(&mut self) { unsafe { drop_in_place(self.0.as_ptr()) } } } impl Deref for UniqueRef { type Target = T; fn deref(&self) -> &Self::Target { unsafe { self.0.as_ref() } } } impl DerefMut for UniqueRef { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { self.0.as_mut() } } } impl AsRef for UniqueRef { fn as_ref(&self) -> &T { &**self } } impl AsMut for UniqueRef { fn as_mut(&mut self) -> &mut T { &mut **self } } impl Borrow for UniqueRef { fn borrow(&self) -> &T { &**self } } impl BorrowMut for UniqueRef { fn borrow_mut(&mut self) -> &mut T { &mut **self } } fn assert_unique_ptr_layout_compatible() { // Assert that `U` (a `UniqueRef` or `UniquePtr`) has the same memory layout // as a raw C pointer. assert_eq!(size_of::(), size_of::<*mut T>()); assert_eq!(align_of::(), align_of::<*mut T>()); // Assert that `T` (probably) implements `Drop`. If it doesn't, a regular // reference should be used instead of UniquePtr/UniqueRef. assert!(needs_drop::()); } pub trait Shared where Self: Sized, { fn clone(shared_ptr: &SharedPtrBase) -> SharedPtrBase; fn from_unique_ptr(shared_ptr: UniquePtr) -> SharedPtrBase; fn get(shared_ptr: &SharedPtrBase) -> *mut Self; fn reset(shared_ptr: &mut SharedPtrBase); fn use_count(shared_ptr: &SharedPtrBase) -> long; } /// Private base type which is shared by the `SharedPtr` and `SharedRef` /// implementations. #[repr(C)] pub struct SharedPtrBase([usize; 2], PhantomData); unsafe impl Send for SharedPtrBase {} unsafe impl Sync for SharedPtrBase {} impl Default for SharedPtrBase { fn default() -> Self { Self([0usize; 2], PhantomData) } } impl Drop for SharedPtrBase { fn drop(&mut self) { ::reset(self); } } /// Wrapper around a C++ shared_ptr. A shared_ptr may be be null. #[repr(C)] #[derive(Default)] pub struct SharedPtr(SharedPtrBase); impl SharedPtr { pub fn is_null(&self) -> bool { ::get(&self.0).is_null() } pub fn use_count(&self) -> long { ::use_count(&self.0) } pub fn take(&mut self) -> Option> { if self.is_null() { None } else { let base = take(&mut self.0); Some(SharedRef(base)) } } pub fn unwrap(self) -> SharedRef { assert!(!self.is_null()); SharedRef(self.0) } } impl Clone for SharedPtr { fn clone(&self) -> Self { Self(::clone(&self.0)) } } impl From for SharedPtr where T: Shared, U: Into>, { fn from(unique_ptr: U) -> Self { let unique_ptr = unique_ptr.into(); Self(::from_unique_ptr(unique_ptr)) } } impl From> for SharedPtr { fn from(mut shared_ref: SharedRef) -> Self { Self(take(&mut shared_ref.0)) } } /// Wrapper around a C++ shared_ptr. The shared_ptr is assumed to contain a /// value and may not be null. #[repr(C)] pub struct SharedRef(SharedPtrBase); impl SharedRef { pub fn use_count(&self) -> long { ::use_count(&self.0) } } impl Clone for SharedRef { fn clone(&self) -> Self { Self(::clone(&self.0)) } } impl From> for SharedRef { fn from(unique_ref: UniqueRef) -> Self { SharedPtr::from(unique_ref).unwrap() } } impl Deref for SharedRef { type Target = T; fn deref(&self) -> &Self::Target { unsafe { &*(::get(&self.0)) } } } impl AsRef for SharedRef { fn as_ref(&self) -> &T { &**self } } impl Borrow for SharedRef { fn borrow(&self) -> &T { &**self } } /// A trait for values with static lifetimes that are allocated at a fixed /// address in memory. Practically speaking, that means they're either a /// `&'static` reference, or they're heap-allocated in a `Arc`, `Box`, `Rc`, /// `UniqueRef`, `SharedRef` or `Vec`. pub trait Allocated: Deref + Borrow + 'static { } impl Allocated for A where A: Deref + Borrow + 'static { } pub(crate) enum Allocation { Static(&'static T), Arc(Arc), Box(Box), Rc(Rc), UniqueRef(UniqueRef), Other(Box + 'static>), // Note: it would be nice to add `SharedRef` to this list, but it requires the // `T: Shared` bound, and it's unfortunately not possible to set bounds on // individual enum variants. } impl Allocation { unsafe fn transmute_wrap( value: Abstract, wrap: fn(Concrete) -> Self, ) -> Self { assert_eq!(size_of::(), size_of::()); let wrapped = wrap(transmute_copy(&value)); forget(value); wrapped } fn try_wrap( value: Abstract, wrap: fn(Concrete) -> Self, ) -> Result { if Any::is::(&value) { Ok(unsafe { Self::transmute_wrap(value, wrap) }) } else { Err(value) } } pub fn of + Borrow + 'static>( a: Abstract, ) -> Self { Self::try_wrap(a, identity) .or_else(|a| Self::try_wrap(a, Self::Static)) .or_else(|a| Self::try_wrap(a, Self::Arc)) .or_else(|a| Self::try_wrap(a, Self::Box)) .or_else(|a| Self::try_wrap(a, Self::Rc)) .or_else(|a| Self::try_wrap(a, Self::UniqueRef)) .unwrap_or_else(|a| Self::Other(Box::from(a))) } } impl Deref for Allocation { type Target = T; fn deref(&self) -> &Self::Target { match self { Self::Static(v) => v.borrow(), Self::Arc(v) => v.borrow(), Self::Box(v) => v.borrow(), Self::Rc(v) => v.borrow(), Self::UniqueRef(v) => v.borrow(), Self::Other(v) => (&**v).borrow(), } } } impl AsRef for Allocation { fn as_ref(&self) -> &T { &**self } } impl Borrow for Allocation { fn borrow(&self) -> &T { &**self } } #[repr(C)] #[derive(Debug, PartialEq)] pub(crate) enum MaybeBool { JustFalse = 0, JustTrue = 1, Nothing = 2, } impl Into> for MaybeBool { fn into(self) -> Option { match self { MaybeBool::JustFalse => Some(false), MaybeBool::JustTrue => Some(true), MaybeBool::Nothing => None, } } } #[derive(Copy, Clone)] #[repr(transparent)] pub struct CxxVTable(pub *const Opaque); #[derive(Copy, Clone)] pub struct RustVTable(pub *const Opaque, pub PhantomData); pub struct FieldOffset(usize, PhantomData); unsafe impl Send for FieldOffset where F: Send {} unsafe impl Sync for FieldOffset where F: Sync {} impl Copy for FieldOffset {} impl Clone for FieldOffset { fn clone(&self) -> Self { Self(self.0, self.1) } } impl FieldOffset { pub fn from_ptrs(embedder_ptr: *const E, field_ptr: *const F) -> Self { let embedder_addr = embedder_ptr as usize; let field_addr = field_ptr as usize; assert!(field_addr >= embedder_addr); assert!((field_addr + size_of::()) <= (embedder_addr + size_of::())); Self(field_addr - embedder_addr, PhantomData) } pub unsafe fn to_embedder(self, field: &F) -> &E { (((field as *const _ as usize) - self.0) as *const E) .as_ref() .unwrap() } pub unsafe fn to_embedder_mut(self, field: &mut F) -> &mut E { (((field as *mut _ as usize) - self.0) as *mut E) .as_mut() .unwrap() } } #[repr(C)] #[derive(Default)] pub struct Maybe { has_value: bool, value: T, } impl Into> for Maybe { fn into(self) -> Option { if self.has_value { Some(self.value) } else { None } } } pub trait UnitType where Self: Copy + Sized, { #[inline(always)] fn get() -> Self { UnitValue::::get() } } impl UnitType for T where T: Copy + Sized {} #[derive(Copy, Clone)] struct UnitValue(PhantomData) where Self: Sized; impl UnitValue where Self: Copy + Sized, { const SELF: Self = Self::new_checked(); const fn new_checked() -> Self { // Statically assert that T is indeed a unit type. let size_must_be_0 = size_of::(); let s = Self(PhantomData::); [s][size_must_be_0] } #[inline(always)] fn get_checked(self) -> T { // This run-time check serves just as a backup for the compile-time // check when Self::SELF is initialized. assert_eq!(size_of::(), 0); unsafe { std::mem::MaybeUninit::::zeroed().assume_init() } } #[inline(always)] pub fn get() -> T { // Accessing the Self::SELF is necessary to make the compile-time type check // work. Self::SELF.get_checked() } } pub struct DefaultTag; pub struct IdenticalConversionTag; pub trait MapFnFrom where F: UnitType, Self: Sized, { fn mapping() -> Self; #[inline(always)] fn map_fn_from(_: F) -> Self { Self::mapping() } } impl MapFnFrom for F where Self: UnitType, { #[inline(always)] fn mapping() -> Self { Self::get() } } pub trait MapFnTo where Self: UnitType, T: Sized, { fn mapping() -> T; #[inline(always)] fn map_fn_to(self) -> T { Self::mapping() } } impl MapFnTo for F where Self: UnitType, T: MapFnFrom, { #[inline(always)] fn mapping() -> T { T::map_fn_from(F::get()) } } pub trait CFnFrom where Self: Sized, F: UnitType, { fn mapping() -> Self; #[inline(always)] fn c_fn_from(_: F) -> Self { Self::mapping() } } macro_rules! impl_c_fn_from { ($($arg:ident: $ty:ident),*) => { impl CFnFrom for extern "C" fn($($ty),*) -> R where F: UnitType + Fn($($ty),*) -> R, { #[inline(always)] fn mapping() -> Self { extern "C" fn c_fn($($arg: $ty),*) -> R where F: UnitType + Fn($($ty),*) -> R, { (F::get())($($arg),*) }; c_fn:: } } }; } impl_c_fn_from!(); impl_c_fn_from!(a0: A0); impl_c_fn_from!(a0: A0, a1: A1); impl_c_fn_from!(a0: A0, a1: A1, a2: A2); impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3); impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4); impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5); impl_c_fn_from!(a0: A0, a1: A1, a2: A2, a3: A3, a4: A4, a5: A5, a6: A6); pub trait ToCFn where Self: UnitType, T: Sized, { fn mapping() -> T; #[inline(always)] fn to_c_fn(self) -> T { Self::mapping() } } impl ToCFn for F where Self: UnitType, T: CFnFrom, { #[inline(always)] fn mapping() -> T { T::c_fn_from(F::get()) } } #[cfg(test)] mod tests { use super::*; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; static TEST_OBJ_DROPPED: AtomicBool = AtomicBool::new(false); struct TestObj { pub id: u32, } impl Drop for TestObj { fn drop(&mut self) { assert!(!TEST_OBJ_DROPPED.swap(true, Ordering::SeqCst)); } } struct TestObjRef(TestObj); impl Deref for TestObjRef { type Target = TestObj; fn deref(&self) -> &TestObj { &self.0 } } impl Borrow for TestObjRef { fn borrow(&self) -> &TestObj { &**self } } #[test] fn allocation() { // Static. static STATIC_OBJ: TestObj = TestObj { id: 1 }; let owner = Allocation::of(&STATIC_OBJ); match owner { Allocation::Static(_) => assert_eq!(owner.id, 1), _ => panic!(), } drop(owner); assert!(!TEST_OBJ_DROPPED.load(Ordering::SeqCst)); // Arc. let owner = Allocation::of(Arc::new(TestObj { id: 2 })); match owner { Allocation::Arc(_) => assert_eq!(owner.id, 2), _ => panic!(), } drop(owner); assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst)); // Box. let owner = Allocation::of(Box::new(TestObj { id: 3 })); match owner { Allocation::Box(_) => assert_eq!(owner.id, 3), _ => panic!(), } drop(owner); assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst)); // Rc. let owner = Allocation::of(Rc::new(TestObj { id: 4 })); match owner { Allocation::Rc(_) => assert_eq!(owner.id, 4), _ => panic!(), } drop(owner); assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst)); // Other. let owner = Allocation::of(TestObjRef(TestObj { id: 5 })); match owner { Allocation::Other(_) => assert_eq!(owner.id, 5), _ => panic!(), } drop(owner); assert!(TEST_OBJ_DROPPED.swap(false, Ordering::SeqCst)); // Contents of Vec should not be moved. let vec = vec![1u8, 2, 3, 5, 8, 13, 21]; let vec_element_ptrs = vec.iter().map(|i| i as *const u8).collect::>(); let owner = Allocation::of(vec); match owner { Allocation::Other(_) => {} _ => panic!(), } owner .iter() .map(|i| i as *const u8) .zip(vec_element_ptrs) .for_each(|(p1, p2)| assert_eq!(p1, p2)); } }