Skip to content

Commit

Permalink
auto merge of #19765 : luqmana/rust/nonzero-lang-item, r=nikomatsakis
Browse files Browse the repository at this point in the history
This extends the nullable enum opt to traverse beyond just the first level to find possible fields to use as the discriminant. So now, it'll work through structs, tuples, and fixed sized arrays. This also introduces a new lang item, NonZero, that you can use to wrap raw pointers or integral types to indicate to rustc that the underlying value is known to never be 0/NULL. We then use this in Vec, Rc and Arc to have them also benefit from the nullable enum opt.

As per rust-lang/rfcs#499 NonZero is not exposed via the `libstd` facade.

```
x86_64 Linux:
                        T       Option<T> (Before)      Option<T> (After)
----------------------------------------------------------------------------------
Vec<int>                24          32                      24
String                  24          32                      24
Rc<int>                 8           16                      8
Arc<int>                8           16                      8
[Box<int>, ..2]         16          24                      16
(String, uint)          32          40                      32
```

Fixes #19419.
Fixes #13194.
Fixes #9378.
Fixes #7576.
  • Loading branch information
bors committed Dec 29, 2014
2 parents 03a1188 + 766a719 commit 25fb12b
Show file tree
Hide file tree
Showing 13 changed files with 370 additions and 155 deletions.
27 changes: 15 additions & 12 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ use core::default::Default;
use core::kinds::{Sync, Send};
use core::mem::{min_align_of, size_of, drop};
use core::mem;
use core::nonzero::NonZero;
use core::ops::{Drop, Deref};
use core::option::Option;
use core::option::Option::{Some, None};
use core::ptr::RawPtr;
use core::ptr;
use core::ptr::{mod, RawPtr};
use heap::deallocate;

/// An atomically reference counted wrapper for shared state.
Expand Down Expand Up @@ -114,7 +114,7 @@ use heap::deallocate;
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
_ptr: NonZero<*mut ArcInner<T>>,
}

unsafe impl<T: Sync + Send> Send for Arc<T> { }
Expand All @@ -130,7 +130,7 @@ unsafe impl<T: Sync + Send> Sync for Arc<T> { }
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
_ptr: NonZero<*mut ArcInner<T>>,
}

unsafe impl<T: Sync + Send> Send for Weak<T> { }
Expand Down Expand Up @@ -165,7 +165,7 @@ impl<T> Arc<T> {
weak: atomic::AtomicUint::new(1),
data: data,
};
Arc { _ptr: unsafe { mem::transmute(x) } }
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}

/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
Expand Down Expand Up @@ -194,7 +194,7 @@ impl<T> Arc<T> {
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
// to these contents.
unsafe { &*self._ptr }
unsafe { &**self._ptr }
}
}

Expand Down Expand Up @@ -281,7 +281,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the Arc itself to be `mut`, so we're returning the only
// possible reference to the inner data.
let inner = unsafe { &mut *self._ptr };
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
Expand Down Expand Up @@ -316,7 +316,8 @@ impl<T: Sync + Send> Drop for Arc<T> {
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
// it is guaranteed to be zeroed after the first if it's run more than once)
if self._ptr.is_null() { return }
let ptr = *self._ptr;
if ptr.is_null() { return }

// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
// unless we are going to delete the object. This same logic applies to the below
Expand Down Expand Up @@ -346,7 +347,7 @@ impl<T: Sync + Send> Drop for Arc<T> {

if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
atomic::fence(atomic::Acquire);
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
Expand Down Expand Up @@ -386,7 +387,7 @@ impl<T: Sync + Send> Weak<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &*self._ptr }
unsafe { &**self._ptr }
}
}

Expand Down Expand Up @@ -442,14 +443,16 @@ impl<T: Sync + Send> Drop for Weak<T> {
/// } // implicit drop
/// ```
fn drop(&mut self) {
let ptr = *self._ptr;

// see comments above for why this check is here
if self._ptr.is_null() { return }
if ptr.is_null() { return }

// If we find out that we were the last weak pointer, then its time to deallocate the data
// entirely. See the discussion in Arc::drop() about the memory orderings
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
atomic::fence(atomic::Acquire);
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
Expand Down
32 changes: 17 additions & 15 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,11 @@ use core::fmt;
use core::hash::{mod, Hash};
use core::kinds::marker;
use core::mem::{transmute, min_align_of, size_of, forget};
use core::nonzero::NonZero;
use core::ops::{Deref, Drop};
use core::option::Option;
use core::option::Option::{Some, None};
use core::ptr;
use core::ptr::RawPtr;
use core::ptr::{mod, RawPtr};
use core::result::Result;
use core::result::Result::{Ok, Err};

Expand All @@ -174,7 +174,7 @@ struct RcBox<T> {
pub struct Rc<T> {
// FIXME #12808: strange names to try to avoid interfering with field accesses of the contained
// type via Deref
_ptr: *mut RcBox<T>,
_ptr: NonZero<*mut RcBox<T>>,
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
Expand All @@ -196,11 +196,11 @@ impl<T> Rc<T> {
// there is an implicit weak pointer owned by all the strong pointers, which
// ensures that the weak destructor never frees the allocation while the strong
// destructor is running, even if the weak pointer is stored inside the strong one.
_ptr: transmute(box RcBox {
_ptr: NonZero::new(transmute(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1)
}),
})),
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
Expand Down Expand Up @@ -281,7 +281,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
let val = ptr::read(&*rc); // copy the contained object
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
deallocate(rc._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>());
forget(rc);
Ok(val)
Expand Down Expand Up @@ -311,7 +311,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
#[experimental]
pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
if is_unique(rc) {
let inner = unsafe { &mut *rc._ptr };
let inner = unsafe { &mut **rc._ptr };
Some(&mut inner.value)
} else {
None
Expand Down Expand Up @@ -343,7 +343,7 @@ impl<T: Clone> Rc<T> {
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the `Rc<T>` itself to be `mut`, so we're returning the only
// possible reference to the inner value.
let inner = unsafe { &mut *self._ptr };
let inner = unsafe { &mut **self._ptr };
&mut inner.value
}
}
Expand Down Expand Up @@ -391,7 +391,8 @@ impl<T> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_strong();
if self.strong() == 0 {
ptr::read(&**self); // destroy the contained object
Expand All @@ -401,7 +402,7 @@ impl<T> Drop for Rc<T> {
self.dec_weak();

if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
Expand Down Expand Up @@ -618,7 +619,7 @@ impl<T: fmt::Show> fmt::Show for Rc<T> {
pub struct Weak<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_ptr: NonZero<*mut RcBox<T>>,
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
Expand Down Expand Up @@ -682,12 +683,13 @@ impl<T> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all the strong pointers
// have disappeared.
if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
Expand Down Expand Up @@ -742,12 +744,12 @@ trait RcBoxPtr<T> {

impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}

impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}

#[cfg(test)]
Expand Down
Loading

0 comments on commit 25fb12b

Please sign in to comment.