diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 6a77bf64baee5..5a4930bfd9e19 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -991,11 +991,13 @@ impl Weak { pub fn new() -> Weak { unsafe { Weak { - ptr: Box::into_raw_non_null(box ArcInner { + // Note that `box` isn't used specifically due to how it handles + // uninhabited types, see #48493 for more info. + ptr: Box::into_raw_non_null(Box::new(ArcInner { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1), data: uninitialized(), - }), + })), } } } @@ -1032,7 +1034,7 @@ impl Weak { pub fn upgrade(&self) -> Option> { // We use a CAS loop to increment the strong count instead of a // fetch_add because once the count hits 0 it must never be above 0. - let inner = self.inner(); + let inner = self.inner()?; // Relaxed load because any write of 0 that we can observe // leaves the field in a permanently zero state (so a @@ -1061,9 +1063,36 @@ impl Weak { } #[inline] - fn inner(&self) -> &ArcInner { + fn inner(&self) -> Option<&ArcInner> { // See comments above for why this is "safe" - unsafe { self.ptr.as_ref() } + // + // Note that the `Option` being returned here is pretty tricky, but is + // in relation to #48493. You can create an instance of `Weak` which + // internally contains `NonNull>`. Now the layout of + // `ArcInner` directly embeds `!` so rustc correctly deduces that the + // size of `ArcInner` is zero. This means that `Box>` + // which we create in `Weak::new()` is actually the pointer 1 (pointers + // to ZST types are currently `1 as *mut _`). + // + // As a result we may not actually have an `&ArcInner` to hand out + // here. That type can't actually exist for all `T`, such as `!`, so we + // can only hand out a safe reference if we've actually got one to hand + // out, aka if the size of the value behind the pointer is nonzero. + // + // Note that this adds an extra branch on methods like `clone` with + // trait objects like `Weak`, but we should be able to recover the + // original performance as soon as we can change `ArcInner` to store a + // `MaybeInitialized` internally instead of a `!` directly. That way + // our allocation will *always* be allocated and we won't need this + // branch. + unsafe { + let ptr = self.ptr.as_ref(); + if mem::size_of_val(ptr) == 0 { + None + } else { + Some(ptr) + } + } } } @@ -1082,11 +1111,19 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { + let inner = match self.inner() { + Some(i) => i, + // If we're something like `Weak` then our destructor doesn't do + // anything anyway so return the same pointer without doing any + // work. + None => return Weak { ptr: self.ptr } + }; + // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked // where are *no other* weak pointers in existence. (So we can't be // running this code in that case). - let old_size = self.inner().weak.fetch_add(1, Relaxed); + let old_size = inner.weak.fetch_add(1, Relaxed); // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { @@ -1148,6 +1185,10 @@ impl Drop for Weak { /// ``` fn drop(&mut self) { let ptr = self.ptr.as_ptr(); + let inner = match self.inner() { + Some(inner) => inner, + None => return, // nothing to change, skip everything + }; // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about @@ -1157,7 +1198,7 @@ impl Drop for Weak { // weak count can only be locked if there was precisely one weak ref, // meaning that drop could only subsequently run ON that remaining weak // ref, which can only happen after the lock is released. - if self.inner().weak.fetch_sub(1, Release) == 1 { + if inner.weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)) diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 1fa5d34cb5787..4d8355a4f9d8d 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -352,7 +352,7 @@ impl Rc { // the strong count, and then remove the implicit "strong weak" // pointer while also handling drop logic by just crafting a // fake Weak. - this.dec_strong(); + this.inner().dec_strong(); let _weak = Weak { ptr: this.ptr }; forget(this); Ok(val) @@ -448,7 +448,7 @@ impl Rc { /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] pub fn downgrade(this: &Self) -> Weak { - this.inc_weak(); + this.inner().inc_weak(); Weak { ptr: this.ptr } } @@ -469,7 +469,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - this.weak() - 1 + this.inner().weak.get() - 1 } /// Gets the number of strong (`Rc`) pointers to this value. @@ -487,7 +487,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.strong() + this.inner().strong.get() } /// Returns true if there are no other `Rc` or [`Weak`][weak] pointers to @@ -557,6 +557,12 @@ impl Rc { pub fn ptr_eq(this: &Self, other: &Self) -> bool { this.ptr.as_ptr() == other.ptr.as_ptr() } + + fn inner(&self) -> &RcBox { + unsafe { + self.ptr.as_ref() + } + } } impl Rc { @@ -600,10 +606,10 @@ impl Rc { unsafe { let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value)); mem::swap(this, &mut swap); - swap.dec_strong(); + swap.inner().dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake // Weak here -- we know other Weaks can clean up for us) - swap.dec_weak(); + swap.inner().dec_weak(); forget(swap); } } @@ -836,16 +842,16 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { unsafe { let ptr = self.ptr.as_ptr(); - self.dec_strong(); - if self.strong() == 0 { + self.inner().dec_strong(); + if self.inner().strong.get() == 0 { // destroy the contained object ptr::drop_in_place(self.ptr.as_mut()); // remove the implicit "strong weak" pointer now that we've // destroyed the contents. - self.dec_weak(); + self.inner().dec_weak(); - if self.weak() == 0 { + if self.inner().weak.get() == 0 { Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)); } } @@ -871,7 +877,7 @@ impl Clone for Rc { /// ``` #[inline] fn clone(&self) -> Rc { - self.inc_strong(); + self.inner().inc_strong(); Rc { ptr: self.ptr, phantom: PhantomData } } } @@ -1190,11 +1196,13 @@ impl Weak { pub fn new() -> Weak { unsafe { Weak { - ptr: Box::into_raw_non_null(box RcBox { + // Note that `box` isn't used specifically due to how it handles + // uninhabited types, see #48493 for more info. + ptr: Box::into_raw_non_null(Box::new(RcBox { strong: Cell::new(0), weak: Cell::new(1), value: uninitialized(), - }), + })), } } } @@ -1229,13 +1237,27 @@ impl Weak { /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { - if self.strong() == 0 { + let inner = self.inner()?; + if inner.strong.get() == 0 { None } else { - self.inc_strong(); + inner.inc_strong(); Some(Rc { ptr: self.ptr, phantom: PhantomData }) } } + + fn inner(&self) -> Option<&RcBox> { + // see the comment in `arc.rs` for why this returns an `Option` rather + // than a `&RcBox` + unsafe { + let ptr = self.ptr.as_ref(); + if mem::size_of_val(ptr) == 0 { + None + } else { + Some(ptr) + } + } + } } #[stable(feature = "rc_weak", since = "1.4.0")] @@ -1267,11 +1289,15 @@ impl Drop for Weak { fn drop(&mut self) { unsafe { let ptr = self.ptr.as_ptr(); + let inner = match self.inner() { + Some(inner) => inner, + None => return, + }; - self.dec_weak(); + inner.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. - if self.weak() == 0 { + if inner.weak.get() == 0 { Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)); } } @@ -1293,7 +1319,9 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - self.inc_weak(); + if let Some(inner) = self.inner() { + inner.inc_weak(); + } Weak { ptr: self.ptr } } } @@ -1335,56 +1363,21 @@ impl Default for Weak { // This should have negligible overhead since you don't actually need to // clone these much in Rust thanks to ownership and move-semantics. -#[doc(hidden)] -trait RcBoxPtr { - fn inner(&self) -> &RcBox; - - #[inline] - fn strong(&self) -> usize { - self.inner().strong.get() - } - - #[inline] +impl RcBox { fn inc_strong(&self) { - self.inner().strong.set(self.strong().checked_add(1).unwrap_or_else(|| unsafe { abort() })); + self.strong.set(self.strong.get().checked_add(1).unwrap_or_else(|| unsafe { abort() })); } - #[inline] fn dec_strong(&self) { - self.inner().strong.set(self.strong() - 1); - } - - #[inline] - fn weak(&self) -> usize { - self.inner().weak.get() + self.strong.set(self.strong.get() - 1); } - #[inline] fn inc_weak(&self) { - self.inner().weak.set(self.weak().checked_add(1).unwrap_or_else(|| unsafe { abort() })); + self.weak.set(self.weak.get().checked_add(1).unwrap_or_else(|| unsafe { abort() })); } - #[inline] fn dec_weak(&self) { - self.inner().weak.set(self.weak() - 1); - } -} - -impl RcBoxPtr for Rc { - #[inline(always)] - fn inner(&self) -> &RcBox { - unsafe { - self.ptr.as_ref() - } - } -} - -impl RcBoxPtr for Weak { - #[inline(always)] - fn inner(&self) -> &RcBox { - unsafe { - self.ptr.as_ref() - } + self.weak.set(self.weak.get() - 1); } } diff --git a/src/test/run-pass/void-collections.rs b/src/test/run-pass/void-collections.rs new file mode 100644 index 0000000000000..b4fbb8a44f0f5 --- /dev/null +++ b/src/test/run-pass/void-collections.rs @@ -0,0 +1,47 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::collections::HashMap; +use std::collections::BTreeMap; + +#[derive(Eq, PartialEq, Hash, PartialOrd, Ord)] +enum Void {} + +trait Foo {} + +impl Foo for T {} + +fn main() { + std::rc::Weak::::new(); + std::rc::Weak::::new().clone(); + (std::rc::Weak::::new() as std::rc::Weak); + (std::rc::Weak::::new() as std::rc::Weak).clone(); + std::sync::Weak::::new(); + (std::sync::Weak::::new() as std::sync::Weak); + (std::sync::Weak::::new() as std::sync::Weak).clone(); + + let mut h: HashMap = HashMap::new(); + assert_eq!(h.len(), 0); + assert_eq!(h.iter().count(), 0); + assert_eq!(h.iter_mut().count(), 0); + assert_eq!(h.into_iter().count(), 0); + + let mut h: BTreeMap = BTreeMap::new(); + assert_eq!(h.len(), 0); + assert_eq!(h.iter().count(), 0); + assert_eq!(h.iter_mut().count(), 0); + assert_eq!(h.into_iter().count(), 0); + + let mut h: Vec = Vec::new(); + assert_eq!(h.len(), 0); + assert_eq!(h.iter().count(), 0); + assert_eq!(h.iter_mut().count(), 0); + assert_eq!(h.into_iter().count(), 0); +}