diff --git a/crossbeam-epoch/src/atomic.rs b/crossbeam-epoch/src/atomic.rs index 2ccbe7a66..706ba86a4 100644 --- a/crossbeam-epoch/src/atomic.rs +++ b/crossbeam-epoch/src/atomic.rs @@ -6,14 +6,13 @@ use core::mem::{self, MaybeUninit}; use core::ops::{Deref, DerefMut}; use core::ptr; use core::slice; -use core::sync::atomic::Ordering; use crate::alloc::alloc; use crate::alloc::boxed::Box; use crate::guard::Guard; -use crate::primitive::sync::atomic::AtomicPtr; #[cfg(not(miri))] use crate::primitive::sync::atomic::AtomicUsize; +use crate::primitive::sync::atomic::{AtomicPtr, Ordering}; use crossbeam_utils::atomic::AtomicConsume; /// Given ordering for the success case in a compare-exchange operation, returns the strongest diff --git a/crossbeam-epoch/src/epoch.rs b/crossbeam-epoch/src/epoch.rs index 663508bd7..18d7418a1 100644 --- a/crossbeam-epoch/src/epoch.rs +++ b/crossbeam-epoch/src/epoch.rs @@ -7,8 +7,7 @@ //! If an object became garbage in some epoch, then we can be sure that after two advancements no //! participant will hold a reference to it. That is the crux of safe memory reclamation. -use crate::primitive::sync::atomic::AtomicUsize; -use core::sync::atomic::Ordering; +use crate::primitive::sync::atomic::{AtomicUsize, Ordering}; /// An epoch that can be marked as pinned or unpinned. /// diff --git a/crossbeam-epoch/src/internal.rs b/crossbeam-epoch/src/internal.rs index 79fbb9709..74d64808a 100644 --- a/crossbeam-epoch/src/internal.rs +++ b/crossbeam-epoch/src/internal.rs @@ -36,11 +36,10 @@ //! destroyed as soon as the data structure gets dropped. use crate::primitive::cell::UnsafeCell; -use crate::primitive::sync::atomic; +use crate::primitive::sync::atomic::{self, Ordering}; use core::cell::Cell; use core::mem::{self, ManuallyDrop}; use core::num::Wrapping; -use core::sync::atomic::Ordering; use core::{fmt, ptr}; use crossbeam_utils::CachePadded; diff --git a/crossbeam-epoch/src/lib.rs b/crossbeam-epoch/src/lib.rs index 36d47878d..dff22f133 100644 --- a/crossbeam-epoch/src/lib.rs +++ b/crossbeam-epoch/src/lib.rs @@ -76,7 +76,7 @@ mod primitive { } pub(crate) mod sync { pub(crate) mod atomic { - pub(crate) use loom::sync::atomic::{fence, AtomicPtr, AtomicUsize}; + pub(crate) use loom::sync::atomic::{fence, AtomicPtr, AtomicUsize, Ordering}; // FIXME: loom does not support compiler_fence at the moment. // https://github.com/tokio-rs/loom/issues/117 @@ -122,7 +122,9 @@ mod primitive { } pub(crate) mod sync { pub(crate) mod atomic { - pub(crate) use core::sync::atomic::{compiler_fence, fence, AtomicPtr, AtomicUsize}; + pub(crate) use core::sync::atomic::{ + compiler_fence, fence, AtomicPtr, AtomicUsize, Ordering, + }; } #[cfg(feature = "alloc")] pub(crate) use alloc::sync::Arc; diff --git a/crossbeam-utils/src/atomic/atomic_cell.rs b/crossbeam-utils/src/atomic/atomic_cell.rs index 2905d6bcc..a31d6277a 100644 --- a/crossbeam-utils/src/atomic/atomic_cell.rs +++ b/crossbeam-utils/src/atomic/atomic_cell.rs @@ -1,12 +1,11 @@ // Necessary for implementing atomic methods for `AtomicUnit` #![allow(clippy::unit_arg)] -use crate::primitive::sync::atomic::{self, AtomicBool}; +use crate::primitive::sync::atomic::{self, AtomicBool, Ordering}; use core::cell::UnsafeCell; use core::cmp; use core::fmt; use core::mem::{self, ManuallyDrop, MaybeUninit}; -use core::sync::atomic::Ordering; use core::ptr; @@ -940,16 +939,7 @@ macro_rules! atomic { /// Returns `true` if operations on `AtomicCell` are lock-free. const fn atomic_is_lock_free() -> bool { - // HACK(taiki-e): This is equivalent to `atomic! { T, _a, true, false }`, but can be used in const fn even in our MSRV (Rust 1.61). - let is_lock_free = can_transmute::() - | can_transmute::() - | can_transmute::() - | can_transmute::(); - #[cfg(target_has_atomic = "64")] - let is_lock_free = is_lock_free | can_transmute::(); - // TODO: AtomicU128 is unstable - // let is_lock_free = is_lock_free | can_transmute::(); - is_lock_free + atomic! { T, _a, true, false } } /// Atomically reads data from `src`. diff --git a/crossbeam-utils/src/backoff.rs b/crossbeam-utils/src/backoff.rs index 9e256aaf2..7a505ed61 100644 --- a/crossbeam-utils/src/backoff.rs +++ b/crossbeam-utils/src/backoff.rs @@ -1,4 +1,4 @@ -use crate::primitive::sync::atomic; +use crate::primitive::hint; use core::cell::Cell; use core::fmt; @@ -145,10 +145,7 @@ impl Backoff { #[inline] pub fn spin(&self) { for _ in 0..1 << self.step.get().min(SPIN_LIMIT) { - // TODO(taiki-e): once we bump the minimum required Rust version to 1.49+, - // use [`core::hint::spin_loop`] instead. - #[allow(deprecated)] - atomic::spin_loop_hint(); + hint::spin_loop(); } if self.step.get() <= SPIN_LIMIT { @@ -209,18 +206,12 @@ impl Backoff { pub fn snooze(&self) { if self.step.get() <= SPIN_LIMIT { for _ in 0..1 << self.step.get() { - // TODO(taiki-e): once we bump the minimum required Rust version to 1.49+, - // use [`core::hint::spin_loop`] instead. - #[allow(deprecated)] - atomic::spin_loop_hint(); + hint::spin_loop(); } } else { #[cfg(not(feature = "std"))] for _ in 0..1 << self.step.get() { - // TODO(taiki-e): once we bump the minimum required Rust version to 1.49+, - // use [`core::hint::spin_loop`] instead. - #[allow(deprecated)] - atomic::spin_loop_hint(); + hint::spin_loop(); } #[cfg(feature = "std")] diff --git a/crossbeam-utils/src/lib.rs b/crossbeam-utils/src/lib.rs index 7384d1155..6ab748f34 100644 --- a/crossbeam-utils/src/lib.rs +++ b/crossbeam-utils/src/lib.rs @@ -42,12 +42,14 @@ #[cfg(crossbeam_loom)] #[allow(unused_imports)] mod primitive { + pub(crate) mod hint { + pub(crate) use loom::hint::spin_loop; + } pub(crate) mod sync { pub(crate) mod atomic { - pub(crate) use loom::sync::atomic::spin_loop_hint; pub(crate) use loom::sync::atomic::{ AtomicBool, AtomicI16, AtomicI32, AtomicI64, AtomicI8, AtomicIsize, AtomicU16, - AtomicU32, AtomicU64, AtomicU8, AtomicUsize, + AtomicU32, AtomicU64, AtomicU8, AtomicUsize, Ordering, }; // FIXME: loom does not support compiler_fence at the moment. @@ -63,13 +65,12 @@ mod primitive { #[cfg(not(crossbeam_loom))] #[allow(unused_imports)] mod primitive { + pub(crate) mod hint { + pub(crate) use core::hint::spin_loop; + } pub(crate) mod sync { pub(crate) mod atomic { - pub(crate) use core::sync::atomic::compiler_fence; - // TODO(taiki-e): once we bump the minimum required Rust version to 1.49+, - // use [`core::hint::spin_loop`] instead. - #[allow(deprecated)] - pub(crate) use core::sync::atomic::spin_loop_hint; + pub(crate) use core::sync::atomic::{compiler_fence, Ordering}; #[cfg(not(crossbeam_no_atomic))] pub(crate) use core::sync::atomic::{ AtomicBool, AtomicI16, AtomicI8, AtomicIsize, AtomicU16, AtomicU8, AtomicUsize, diff --git a/crossbeam-utils/src/sync/once_lock.rs b/crossbeam-utils/src/sync/once_lock.rs index c1fefc96c..761851b01 100644 --- a/crossbeam-utils/src/sync/once_lock.rs +++ b/crossbeam-utils/src/sync/once_lock.rs @@ -4,13 +4,10 @@ use core::cell::UnsafeCell; use core::mem::MaybeUninit; -use core::sync::atomic::{AtomicBool, Ordering}; use std::sync::Once; pub(crate) struct OnceLock { once: Once, - // Once::is_completed requires Rust 1.43, so use this to track of whether they have been initialized. - is_initialized: AtomicBool, value: UnsafeCell>, // Unlike std::sync::OnceLock, we don't need PhantomData here because // we don't use #[may_dangle]. @@ -25,7 +22,6 @@ impl OnceLock { pub(crate) const fn new() -> Self { Self { once: Once::new(), - is_initialized: AtomicBool::new(false), value: UnsafeCell::new(MaybeUninit::uninit()), } } @@ -50,37 +46,28 @@ impl OnceLock { F: FnOnce() -> T, { // Fast path check - if self.is_initialized() { + if self.once.is_completed() { // SAFETY: The inner value has been initialized return unsafe { self.get_unchecked() }; } self.initialize(f); - debug_assert!(self.is_initialized()); - // SAFETY: The inner value has been initialized unsafe { self.get_unchecked() } } - #[inline] - fn is_initialized(&self) -> bool { - self.is_initialized.load(Ordering::Acquire) - } - #[cold] fn initialize(&self, f: F) where F: FnOnce() -> T, { let slot = self.value.get().cast::(); - let is_initialized = &self.is_initialized; self.once.call_once(|| { let value = f(); unsafe { slot.write(value); } - is_initialized.store(true, Ordering::Release); }); } @@ -88,14 +75,14 @@ impl OnceLock { /// /// The value must be initialized unsafe fn get_unchecked(&self) -> &T { - debug_assert!(self.is_initialized()); + debug_assert!(self.once.is_completed()); &*self.value.get().cast::() } } impl Drop for OnceLock { fn drop(&mut self) { - if self.is_initialized() { + if self.once.is_completed() { // SAFETY: The inner value has been initialized unsafe { self.value.get().cast::().drop_in_place() }; } diff --git a/crossbeam-utils/src/sync/parker.rs b/crossbeam-utils/src/sync/parker.rs index df60783c8..57d591560 100644 --- a/crossbeam-utils/src/sync/parker.rs +++ b/crossbeam-utils/src/sync/parker.rs @@ -1,6 +1,5 @@ -use crate::primitive::sync::atomic::AtomicUsize; +use crate::primitive::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use crate::primitive::sync::{Arc, Condvar, Mutex}; -use core::sync::atomic::Ordering::SeqCst; use std::fmt; use std::marker::PhantomData; use std::time::{Duration, Instant};