Skip to content

Commit

Permalink
refact: use exclusive ref for load_unsyced
Browse files Browse the repository at this point in the history
  • Loading branch information
pedromfedricci committed Nov 12, 2024
1 parent 3136df4 commit 257ee03
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 20 deletions.
22 changes: 6 additions & 16 deletions src/cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,36 +11,26 @@ pub mod atomic {
type Target = T;

#[cfg(not(all(loom, test)))]
unsafe fn load_unsynced(&self) -> *mut Self::Target {
// SAFETY: Caller guaranteed that the atomic value is not currently
// visible by any other thread.
unsafe { *self.as_ptr() }
fn load_unsynced(&mut self) -> *mut Self::Target {
*self.get_mut()
}

#[cfg(all(loom, test))]
#[cfg(not(tarpaulin_include))]
unsafe fn load_unsynced(&self) -> *mut Self::Target {
// SAFETY: Caller guaranteed that the atomic value is not currently
// visible by any other thread.
unsafe { self.unsync_load() }
fn load_unsynced(&mut self) -> *mut Self::Target {
self.with_mut(|ptr| *ptr)
}
}

mod sealed {
/// A trait that extends [`AtomicPtr`] so that it will allow loading the
/// value without any synchronization.
///
/// # Safety
///
/// Caller must guarantee that the atomic value is not currently visible
/// by any other thread, as this is equivalent to a non-atomic load over
/// the value.
/// the raw pointer without any synchronization.
pub trait UnsyncLoad {
/// The type of the pointed to value.
type Target;

/// Load the value without any synchronization.
unsafe fn load_unsynced(&self) -> *mut Self::Target;
fn load_unsynced(&mut self) -> *mut Self::Target;
}
}
}
Expand Down
5 changes: 1 addition & 4 deletions src/inner/raw/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,10 +200,7 @@ impl<T: ?Sized, L: Lock, W: Wait> Mutex<T, L, W> {

impl<T: ?Sized, L, W> Drop for Mutex<T, L, W> {
fn drop(&mut self) {
// SAFETY: A mutex is only ever dropped once all locking threads have
// finished their critical sections, so we have exclusive access over
// the queue's tail.
let tail = unsafe { self.tail.load_unsynced() };
let tail = self.tail.load_unsynced();
// SAFETY: The memory was allocated through the Box API, therefore it
// fulfills the layout requirements. The pointer is guaranteed to not
// be null, since the tail is initialized with a valid allocation, and
Expand Down

0 comments on commit 257ee03

Please sign in to comment.