diff --git a/godot-cell/src/blocking_cell.rs b/godot-cell/src/blocking_cell.rs index 3c643298d..af80d4971 100644 --- a/godot-cell/src/blocking_cell.rs +++ b/godot-cell/src/blocking_cell.rs @@ -11,13 +11,13 @@ use std::pin::Pin; use std::sync::{Arc, Condvar, Mutex, MutexGuard}; use std::thread; -use crate::blocking_guards::{MutGuardBlocking, RefGuardBlocking}; +use crate::blocking_guards::{InaccessibleGuardBlocking, MutGuardBlocking, RefGuardBlocking}; use crate::cell::GdCellInner; -use crate::guards::InaccessibleGuard; /// Blocking version of [`panicking::GdCell`](crate::panicking::GdCell) for multithreaded usage. /// /// This version of GdCell blocks the current thread if it does not yet hold references to the cell. +/// Since `GdCellInner` isn't thread-safe by itself, any access to `inner` must be guarded by locking the `thread_tracker`. /// /// For more details on when threads are being blocked see [`Self::borrow`] and [`Self::borrow_mut`]. /// @@ -107,6 +107,7 @@ impl GdCellBlocking { inner_guard, self.mut_condition.clone(), self.immut_condition.clone(), + self.thread_tracker.clone(), )) } @@ -119,11 +120,14 @@ impl GdCellBlocking { pub fn make_inaccessible<'cell, 'val>( &'cell self, current_ref: &'val mut T, - ) -> Result, Box> + ) -> Result, Box> where 'cell: 'val, { - self.inner.as_ref().make_inaccessible(current_ref) + let _tracker_guard = self.thread_tracker.lock().unwrap(); + let inner = self.inner.as_ref().make_inaccessible(current_ref)?; + let inaccessible = InaccessibleGuardBlocking::new(inner, self.thread_tracker.clone()); + Ok(inaccessible) } /// Returns `true` if there are any mutable or shared references, regardless of whether the mutable @@ -164,6 +168,12 @@ impl GdCellBlocking { } } +// SAFETY: +// - `T` must not be `Sync`, and the only way to access the underlying `T` is via `GdCellBlocking`. +// - It must be ensured that `GdCellInner`, which holds `T`, cannot be accessed from multiple threads simultaneously while handing out guards. +// The current implementation ensures this by locking the `thread_tracker`. +unsafe impl Sync for GdCellBlocking {} + /// Holds the reference count and the currently mutable thread. #[derive(Debug)] pub(crate) struct ThreadTracker { diff --git a/godot-cell/src/blocking_guards.rs b/godot-cell/src/blocking_guards.rs index f5ec418d4..032327532 100644 --- a/godot-cell/src/blocking_guards.rs +++ b/godot-cell/src/blocking_guards.rs @@ -11,6 +11,7 @@ use std::sync::{Arc, Condvar, Mutex}; use crate::blocking_cell::ThreadTracker; use crate::guards::{MutGuard, RefGuard}; +use crate::panicking::InaccessibleGuard; /// Extended version of [`panicking::RefGuard`](crate::panicking::RefGuard) that tracks which thread a reference belongs to and when it's dropped. /// @@ -50,7 +51,7 @@ impl Drop for RefGuardBlocking<'_, T> { state_lock.decrement_current_thread_shared_count(); - // SAFETY: guard is dropped exactly once, here. + // SAFETY: guard is dropped exactly once, here, while state is guarded by `_tracker_guard` preventing access from any other thread. unsafe { ManuallyDrop::drop(&mut self.inner) }; self.mut_condition.notify_one(); @@ -66,6 +67,7 @@ pub struct MutGuardBlocking<'a, T> { inner: ManuallyDrop>, mut_condition: Arc, immut_condition: Arc, + state: Arc>, } impl<'a, T> MutGuardBlocking<'a, T> { @@ -73,11 +75,13 @@ impl<'a, T> MutGuardBlocking<'a, T> { inner: MutGuard<'a, T>, mut_condition: Arc, immut_condition: Arc, + state: Arc>, ) -> Self { Self { inner: ManuallyDrop::new(inner), immut_condition, mut_condition, + state, } } } @@ -86,22 +90,71 @@ impl<'a, T> Deref for MutGuardBlocking<'a, T> { type Target = as Deref>::Target; fn deref(&self) -> &Self::Target { + let _tracker_guard = self.state.lock().unwrap(); self.inner.deref().deref() } } impl DerefMut for MutGuardBlocking<'_, T> { fn deref_mut(&mut self) -> &mut Self::Target { + let _tracker_guard = self.state.lock().unwrap(); self.inner.deref_mut().deref_mut() } } impl Drop for MutGuardBlocking<'_, T> { fn drop(&mut self) { - // SAFETY: guard is dropped exactly once, here. + let _tracker_guard = self.state.lock().unwrap(); + + // SAFETY: guard is dropped exactly once, here, while state is guarded by `_tracker_guard` preventing access from any other thread. unsafe { ManuallyDrop::drop(&mut self.inner) }; self.mut_condition.notify_one(); self.immut_condition.notify_all(); } } + +/// Extended version of [`panicking::InaccessibleGuard`](crate::panicking::InaccessibleGuard) that blocks thread upon dropping. +/// +/// See [`panicking::InaccessibleGuard`](crate::panicking::InaccessibleGuard) for more details. +#[derive(Debug)] +pub struct InaccessibleGuardBlocking<'a, T> { + inner: ManuallyDrop>, + state: Arc>, +} + +impl<'a, T> InaccessibleGuardBlocking<'a, T> { + pub(crate) fn new(inner: InaccessibleGuard<'a, T>, state: Arc>) -> Self { + Self { + inner: ManuallyDrop::new(inner), + state, + } + } + + /// Drop self if possible, otherwise returns self again. + /// + /// Used currently in the mock-tests, as we need a thread safe way to drop self. Using the normal drop + /// logic may poison state, however it should not cause any UB either way. + /// + /// See [`panicking::InaccessibleGuard::try_drop`](crate::panicking::InaccessibleGuard::try_drop) for more details. + pub fn try_drop(self) -> Result<(), Self> { + let can_drop = { + let _tracker_guard = self.state.lock(); + self.inner.can_drop() + }; + + if !can_drop { + Err(self) + } else { + Ok(()) + } + } +} + +impl Drop for InaccessibleGuardBlocking<'_, T> { + fn drop(&mut self) { + let _tracker_guard = self.state.lock().unwrap(); + unsafe { ManuallyDrop::drop(&mut self.inner) }; + drop(_tracker_guard) + } +} diff --git a/godot-cell/src/cell.rs b/godot-cell/src/cell.rs index 3885751ba..185215140 100644 --- a/godot-cell/src/cell.rs +++ b/godot-cell/src/cell.rs @@ -8,9 +8,9 @@ use std::cell::UnsafeCell; use std::error::Error; use std::marker::PhantomPinned; +use std::mem::MaybeUninit; use std::pin::Pin; use std::ptr::NonNull; -use std::sync::Mutex; use crate::borrow_state::BorrowState; use crate::guards::{InaccessibleGuard, MutGuard, RefGuard}; @@ -73,11 +73,12 @@ impl GdCell { /// /// This cell must be pinned to be usable, as it stores self-referential pointers. The [`GdCell`] type abstracts this detail away from /// the public type. -// TODO: consider not using `Mutex` +/// +/// The cell is **not** thread-safe by itself. #[derive(Debug)] pub(crate) struct GdCellInner { /// The mutable state of this cell. - pub(crate) state: Mutex>, + pub(crate) state: UnsafeCell>, /// The actual value we're handing out references to, uses `UnsafeCell` as we're passing out `&mut` /// references to its contents even when we only have a `&` reference to the cell. value: UnsafeCell, @@ -88,34 +89,56 @@ pub(crate) struct GdCellInner { impl GdCellInner { /// Creates a new cell storing `value`. pub fn new(value: T) -> Pin> { - let cell = Box::pin(Self { - state: Mutex::new(CellState::new()), - value: UnsafeCell::new(value), - _pin: PhantomPinned, - }); - - cell.state.lock().unwrap().initialize_ptr(&cell.value); + let mut uninitialized_cell: Box> = Box::new_uninit(); + let uninitialized_cell_ptr = uninitialized_cell.as_mut_ptr(); + + // SAFETY: pointer to `value` is properly aligned. + let value_ptr = unsafe { + let value_ptr = &raw mut (*uninitialized_cell_ptr).value; + value_ptr.write(UnsafeCell::new(value)); + value_ptr + }; + + // SAFETY + // `value_ptr` is properly aligned and points to initialized data. + // Additionally, since Box::pin(...) is equivalent to Box::into_pin(Box::...) `value_ref` + // will remain valid and refer to the same underlying value after pinning. + let value_ref = unsafe { value_ptr.as_ref().unwrap() }; + + // SAFETY: pointer to `state` is properly aligned. + let state_ptr = unsafe { &raw mut (*uninitialized_cell_ptr).state }; + + // SAFETY: See above. + unsafe { + state_ptr.write(UnsafeCell::new(CellState::new(value_ref))); + } - cell + Box::into_pin( + // SAFETY: All `GdCellInner` fields are valid. + unsafe { uninitialized_cell.assume_init() }, + ) } /// Returns a new shared reference to the contents of the cell. /// /// Fails if an accessible mutable reference exists. pub fn borrow(self: Pin<&Self>) -> Result, Box> { - let mut state = self.state.lock().unwrap(); + // SAFETY: This is the only active reference to the state. + let state = unsafe { self.cell_state_mut() }; state.borrow_state.increment_shared()?; + let value = state.get_ptr(); // SAFETY: `increment_shared` succeeded, therefore there cannot currently be any accessible mutable // references. - unsafe { Ok(RefGuard::new(&self.get_ref().state, state.get_ptr())) } + unsafe { Ok(RefGuard::new(&self.get_ref().state, value)) } } /// Returns a new mutable reference to the contents of the cell. /// /// Fails if an accessible mutable reference exists, or a shared reference exists. pub fn borrow_mut(self: Pin<&Self>) -> Result, Box> { - let mut state = self.state.lock().unwrap(); + // SAFETY: This is the only active reference to the state. + let state = unsafe { self.cell_state_mut() }; state.borrow_state.increment_mut()?; let count = state.borrow_state.mut_count(); let value = state.get_ptr(); @@ -147,6 +170,25 @@ impl GdCellInner { InaccessibleGuard::new(&self.get_ref().state, current_ref) } + /// Returns a reference to the CellState. + /// + /// # Safety + /// - The caller must ensure that there are no active exclusive references to the given state. + unsafe fn cell_state(&self) -> &CellState { + // SAFETY: the underlying `CellState` will not be deallocated as long as Cell itself is alive. + unsafe { &*self.state.get() } + } + + /// Returns the exclusive reference to the CellState. + /// + /// # Safety + /// - The caller must ensure that there are no active references to the given state. + #[allow(clippy::mut_from_ref)] + unsafe fn cell_state_mut(&self) -> &mut CellState { + // SAFETY: the underlying `CellState` will not be deallocated as long as Cell itself is alive. + unsafe { &mut *self.state.get() } + } + /// Returns `true` if there are any mutable or shared references, regardless of whether the mutable /// references are accessible or not. /// @@ -157,26 +199,19 @@ impl GdCellInner { /// cell hands out a new borrow before it is destroyed. So we still need to ensure that this cannot /// happen at the same time. pub fn is_currently_bound(self: Pin<&Self>) -> bool { - let state = self.state.lock().unwrap(); - + // SAFETY: this is the only reference to the `cell_state` in given context. + let state = unsafe { self.cell_state() }; state.borrow_state.shared_count() > 0 || state.borrow_state.mut_count() > 0 } /// Similar to [`Self::is_currently_bound`] but only counts mutable references and ignores shared references. pub(crate) fn is_currently_mutably_bound(self: Pin<&Self>) -> bool { - let state = self.state.lock().unwrap(); - - state.borrow_state.mut_count() > 0 + // SAFETY: this is the only reference to the `cell_state` in given context. + unsafe { self.cell_state() }.borrow_state.mut_count() > 0 } } -// SAFETY: `T` is Sync, so we can return references to it on different threads. -// It is also Send, so we can return mutable references to it on different threads. -// Additionally, all internal state is synchronized via a mutex, so we won't have race conditions when trying to use it from multiple threads. -unsafe impl Sync for GdCellInner {} - -/// Mutable state of the `GdCell`, bundled together to make it easier to avoid deadlocks when locking the -/// mutex. +/// Mutable state of the `GdCell`. #[derive(Debug)] pub(crate) struct CellState { /// Tracking the borrows this cell has. This ensures relevant invariants are upheld. @@ -191,8 +226,7 @@ pub(crate) struct CellState { /// /// We always generate new pointer based off of the pointer currently in this field, to ensure any new /// references are derived from the most recent `&mut` reference. - // TODO: Consider using `NonNull` instead. - ptr: *mut T, + ptr: NonNull, /// How many pointers have been handed out. /// @@ -201,44 +235,43 @@ pub(crate) struct CellState { } impl CellState { - /// Create a new uninitialized state. Use [`initialize_ptr()`](CellState::initialize_ptr()) to initialize - /// it. - fn new() -> Self { + fn new(value: &UnsafeCell) -> Self { Self { borrow_state: BorrowState::new(), - ptr: std::ptr::null_mut(), + ptr: NonNull::new(value.get()).unwrap(), stack_depth: 0, } } - /// Initialize the pointer if it is `None`. - fn initialize_ptr(&mut self, value: &UnsafeCell) { - if self.ptr.is_null() { - self.ptr = value.get(); - assert!(!self.ptr.is_null()); - } else { - panic!("Cannot initialize pointer as it is already initialized.") - } - } - /// Returns the current pointer. Panics if uninitialized. pub(crate) fn get_ptr(&self) -> NonNull { - NonNull::new(self.ptr).unwrap() + self.ptr } /// Push a pointer to this state. pub(crate) fn push_ptr(&mut self, new_ptr: NonNull) -> usize { - self.ptr = new_ptr.as_ptr(); + self.ptr = new_ptr; self.stack_depth += 1; self.stack_depth } /// Pop a pointer to this state, resetting it to the given old pointer. pub(crate) fn pop_ptr(&mut self, old_ptr: NonNull) -> usize { - self.ptr = old_ptr.as_ptr(); + self.ptr = old_ptr; self.stack_depth -= 1; self.stack_depth } + + /// Returns underlying [`BorrowState`]. + /// + /// # Safety + /// + /// - `cell_state` must point to a valid reference. + /// - There can't be any active reference to `CellState`. + #[allow(clippy::mut_from_ref)] + pub(crate) unsafe fn borrow_state(cell_state: &UnsafeCell) -> &mut BorrowState { + &mut cell_state.get().as_mut().unwrap().borrow_state + } } #[cfg(test)] diff --git a/godot-cell/src/guards.rs b/godot-cell/src/guards.rs index c38661c70..dc2c55b0e 100644 --- a/godot-cell/src/guards.rs +++ b/godot-cell/src/guards.rs @@ -4,10 +4,9 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ - +use std::cell::UnsafeCell; use std::ops::{Deref, DerefMut}; use std::ptr::NonNull; -use std::sync::{Mutex, MutexGuard}; use crate::cell::CellState; @@ -19,7 +18,7 @@ use crate::cell::CellState; #[derive(Debug)] pub struct RefGuard<'a, T> { /// The current state of borrows to the borrowed value. - state: &'a Mutex>, + state: &'a UnsafeCell>, /// A pointer to the borrowed value. value: NonNull, @@ -40,7 +39,7 @@ impl<'a, T> RefGuard<'a, T> { /// /// These conditions ensure that it is safe to call [`as_ref()`](NonNull::as_ref) on `value` for as long /// as the returned guard exists. - pub(crate) unsafe fn new(state: &'a Mutex>, value: NonNull) -> Self { + pub(crate) unsafe fn new(state: &'a UnsafeCell>, value: NonNull) -> Self { Self { state, value } } } @@ -56,10 +55,8 @@ impl Deref for RefGuard<'_, T> { impl Drop for RefGuard<'_, T> { fn drop(&mut self) { - self.state - .lock() - .unwrap() - .borrow_state + // SAFETY: There is no other active reference to the state, and it is ensured that RefGuard is alive at least as long as the reference to the state. + unsafe { CellState::borrow_state(self.state) } .decrement_shared() .unwrap(); } @@ -74,7 +71,7 @@ impl Drop for RefGuard<'_, T> { /// reference handed out by this guard. #[derive(Debug)] pub struct MutGuard<'a, T> { - state: &'a Mutex>, + state: &'a UnsafeCell>, count: usize, value: NonNull, } @@ -110,7 +107,7 @@ impl<'a, T> MutGuard<'a, T> { /// - When it is made inaccessible, [`GdCell`](super::GdCell) will also ensure that any new references /// are derived from this guard's `value` pointer, thus preventing `value` from being invalidated. pub(crate) unsafe fn new( - state: &'a Mutex>, + state: &'a UnsafeCell>, count: usize, value: NonNull, ) -> Self { @@ -126,7 +123,8 @@ impl Deref for MutGuard<'_, T> { type Target = T; fn deref(&self) -> &Self::Target { - let count = self.state.lock().unwrap().borrow_state.mut_count(); + // SAFETY: There can't be any other active reference to CellState. + let count = unsafe { CellState::borrow_state(self.state) }.mut_count(); // This is just a best-effort error check. It should never be triggered. assert_eq!( self.count, @@ -152,7 +150,8 @@ impl Deref for MutGuard<'_, T> { impl DerefMut for MutGuard<'_, T> { fn deref_mut(&mut self) -> &mut Self::Target { - let count = self.state.lock().unwrap().borrow_state.mut_count(); + // SAFETY: There can't be any other active reference to CellState. + let count = unsafe { CellState::borrow_state(self.state) }.mut_count(); // This is just a best-effort error check. It should never be triggered. assert_eq!( self.count, @@ -179,10 +178,8 @@ impl DerefMut for MutGuard<'_, T> { impl Drop for MutGuard<'_, T> { fn drop(&mut self) { - self.state - .lock() - .unwrap() - .borrow_state + // SAFETY: It is ensured that MutGuard is exclusive and alive at least as long as the reference to the state. + unsafe { CellState::borrow_state(self.state) } .decrement_mut() .unwrap(); } @@ -199,7 +196,7 @@ impl Drop for MutGuard<'_, T> { /// is dropped, it resets the state to what it was before, as if this guard never existed. #[derive(Debug)] pub struct InaccessibleGuard<'a, T> { - state: &'a Mutex>, + state: &'a UnsafeCell>, stack_depth: usize, prev_ptr: NonNull, } @@ -216,15 +213,15 @@ impl<'a, T> InaccessibleGuard<'a, T> { /// - There are any shared references. /// - `new_ref` is not equal to the pointer in `state`. pub(crate) fn new<'b>( - state: &'a Mutex>, + state: &'a UnsafeCell>, new_ref: &'b mut T, ) -> Result> where 'a: 'b, { - let mut guard = state.lock().unwrap(); - - let current_ptr = guard.get_ptr(); + // SAFETY: There can be only one active reference to the cell state at a given time. + let cell_state = unsafe { state.get().as_mut() }.unwrap(); + let current_ptr = cell_state.get_ptr(); let new_ptr = NonNull::from(new_ref); if current_ptr != new_ptr { @@ -232,9 +229,9 @@ impl<'a, T> InaccessibleGuard<'a, T> { return Err("wrong reference passed in".into()); } - guard.borrow_state.set_inaccessible()?; - let prev_ptr = guard.get_ptr(); - let stack_depth = guard.push_ptr(new_ptr); + cell_state.borrow_state.set_inaccessible()?; + let prev_ptr = cell_state.get_ptr(); + let stack_depth = cell_state.push_ptr(new_ptr); Ok(Self { state, @@ -244,11 +241,8 @@ impl<'a, T> InaccessibleGuard<'a, T> { } /// Single implementation of drop-logic for use in both drop implementations. - fn perform_drop( - mut state: MutexGuard<'_, CellState>, - prev_ptr: NonNull, - stack_depth: usize, - ) { + fn perform_drop(state: &'a UnsafeCell>, prev_ptr: NonNull, stack_depth: usize) { + let state = unsafe { state.get().as_mut() }.unwrap(); if state.stack_depth != stack_depth { state .borrow_state @@ -259,18 +253,29 @@ impl<'a, T> InaccessibleGuard<'a, T> { state.pop_ptr(prev_ptr); } + /// Returns `true` if guard can be safely dropped, i.e.: + /// + /// - Guard is being released in correct order. + /// - There is no accessible mutable reference to underlying value. + /// - There are no shared references to underlying value. + #[doc(hidden)] + pub fn can_drop(&self) -> bool { + let state = unsafe { self.state.get().as_mut() }.unwrap(); + state.borrow_state.may_unset_inaccessible() || state.stack_depth == self.stack_depth + } + /// Drop self if possible, otherwise returns self again. /// /// Used currently in the mock-tests, as we need a thread safe way to drop self. Using the normal drop /// logic may poison state, however it should not cause any UB either way. #[doc(hidden)] - pub fn try_drop(self) -> Result<(), std::mem::ManuallyDrop> { - let manual = std::mem::ManuallyDrop::new(self); - let state = manual.state.lock().unwrap(); - if !state.borrow_state.may_unset_inaccessible() || state.stack_depth != manual.stack_depth { - return Err(manual); + pub fn try_drop(self) -> Result<(), Self> { + if !self.can_drop() { + return Err(self); } - Self::perform_drop(state, manual.prev_ptr, manual.stack_depth); + + let manual = std::mem::ManuallyDrop::new(self); + Self::perform_drop(manual.state, manual.prev_ptr, manual.stack_depth); Ok(()) } @@ -280,7 +285,6 @@ impl Drop for InaccessibleGuard<'_, T> { fn drop(&mut self) { // Default behavior of drop-logic simply panics and poisons the cell on failure. This is appropriate // for single-threaded code where no errors should happen here. - let state = self.state.lock().unwrap(); - Self::perform_drop(state, self.prev_ptr, self.stack_depth); + Self::perform_drop(self.state, self.prev_ptr, self.stack_depth); } } diff --git a/godot-cell/src/lib.rs b/godot-cell/src/lib.rs index a29e3b578..e94a565c2 100644 --- a/godot-cell/src/lib.rs +++ b/godot-cell/src/lib.rs @@ -43,6 +43,8 @@ pub mod panicking { pub mod blocking { pub use crate::blocking_cell::GdCellBlocking as GdCell; - pub use crate::blocking_guards::{MutGuardBlocking as MutGuard, RefGuardBlocking as RefGuard}; - pub use crate::guards::InaccessibleGuard; + pub use crate::blocking_guards::{ + InaccessibleGuardBlocking as InaccessibleGuard, MutGuardBlocking as MutGuard, + RefGuardBlocking as RefGuard, + }; } diff --git a/godot-cell/tests/mock/main.rs b/godot-cell/tests/mock/main.rs index 3fc8dc257..4c0c8e04c 100644 --- a/godot-cell/tests/mock/main.rs +++ b/godot-cell/tests/mock/main.rs @@ -135,7 +135,7 @@ macro_rules! setup_mock { while let Some(guard) = guard_opt.take() { if let Err(new_guard) = std::mem::ManuallyDrop::into_inner(guard).try_drop() { - guard_opt = Some(new_guard); + guard_opt = Some(std::mem::ManuallyDrop::new(new_guard)); std::hint::spin_loop() } } diff --git a/godot-cell/tests/mock/panicking.rs b/godot-cell/tests/mock/panicking.rs index 7d279b80c..f2058fb81 100644 --- a/godot-cell/tests/mock/panicking.rs +++ b/godot-cell/tests/mock/panicking.rs @@ -8,6 +8,9 @@ //! A mock implementation of our instance-binding pattern in pure rust. //! //! Used so we can run miri on this, which we cannot when we are running in itest against Godot. +//! +//! Currently, the panicking `GdCell` is suitable only for single-threaded use. Without `experimental-threads` enabled, +//! godot-rust will block access to bindings from any thread other than the main one. use std::collections::HashMap; use std::error::Error; @@ -55,131 +58,3 @@ fn all_calls_work() { } } } - -/// Run each method both from the main thread and a newly created thread. -#[test] -fn calls_different_thread() { - use std::thread; - - let instance_id = MyClass::init(); - - // We're not running in parallel, so it will never fail to increment completely. - for (f, _, expected_increment) in CALLS { - let start = unsafe { get_int(instance_id) }; - unsafe { - f(instance_id).unwrap(); - - assert_id_is(instance_id, start + expected_increment); - } - let start = start + expected_increment; - thread::spawn(move || unsafe { f(instance_id).unwrap() }) - .join() - .unwrap(); - unsafe { - assert_id_is(instance_id, start + expected_increment); - } - } -} - -/// Call each method from different threads, allowing them to run in parallel. -/// -/// This may cause borrow failures, we do a best-effort attempt at estimating the value then. We can detect -/// if the first call failed, so then we know the integer was incremented by 0. Otherwise, we at least know -/// the range of values that it can be incremented by. -#[test] -fn calls_parallel() { - use std::thread; - - let instance_id = MyClass::init(); - let mut handles = Vec::new(); - - for (f, min_increment, max_increment) in CALLS { - let handle = thread::spawn(move || unsafe { - f(instance_id).map_or((0, 0), |_| (*min_increment, *max_increment)) - }); - handles.push(handle); - } - - let (min_expected, max_expected) = handles - .into_iter() - .map(|handle| handle.join().unwrap()) - .reduce(|(curr_min, curr_max), (min, max)| (curr_min + min, curr_max + max)) - .unwrap(); - - unsafe { - assert!(get_int(instance_id) >= min_expected); - assert!(get_int(instance_id) <= max_expected); - } -} - -/// Call each method from different threads, allowing them to run in parallel. -/// -/// This may cause borrow failures, we do a best-effort attempt at estimating the value then. We can detect -/// if the first call failed, so then we know the integer was incremented by 0. Otherwise, we at least know -/// the range of values that it can be incremented by. -/// -/// Runs each method several times in a row. This should reduce the non-determinism that comes from -/// scheduling of threads. -#[test] -fn calls_parallel_many_serial() { - use std::thread; - - let instance_id = MyClass::init(); - let mut handles = Vec::new(); - - for (f, min_increment, max_increment) in CALLS { - for _ in 0..10 { - let handle = thread::spawn(move || unsafe { - f(instance_id).map_or((0, 0), |_| (*min_increment, *max_increment)) - }); - handles.push(handle); - } - } - - let (min_expected, max_expected) = handles - .into_iter() - .map(|handle| handle.join().unwrap()) - .reduce(|(curr_min, curr_max), (min, max)| (curr_min + min, curr_max + max)) - .unwrap(); - - unsafe { - assert!(get_int(instance_id) >= min_expected); - assert!(get_int(instance_id) <= max_expected); - } -} - -/// Call each method from different threads, allowing them to run in parallel. -/// -/// This may cause borrow failures, we do a best-effort attempt at estimating the value then. We can detect -/// if the first call failed, so then we know the integer was incremented by 0. Otherwise, we at least know -/// the range of values that it can be incremented by. -/// -/// Runs all the tests several times. This is different from [`calls_parallel_many_serial`] as that calls the -/// methods like AAA...BBB...CCC..., whereas this interleaves the methods like ABC...ABC...ABC... -#[test] -fn calls_parallel_many_parallel() { - use std::thread; - - let instance_id = MyClass::init(); - let mut handles = Vec::new(); - - for _ in 0..10 { - for (f, min_increment, max_increment) in CALLS { - let handle = thread::spawn(move || unsafe { - f(instance_id).map_or((0, 0), |_| (*min_increment, *max_increment)) - }); - handles.push(handle); - } - } - - let (min_expected, max_expected) = handles - .into_iter() - .map(|handle| handle.join().unwrap()) - .reduce(|(curr_min, curr_max), (min, max)| (curr_min + min, curr_max + max)) - .unwrap(); - - unsafe { - assert!(get_int(instance_id) >= min_expected); - assert!(get_int(instance_id) <= max_expected); - } -}