diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index c470ae7eb3030..3465e386cd925 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -36,8 +36,10 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then s3url="s3://$SCCACHE_BUCKET/docker/$cksum" url="https://s3-us-west-1.amazonaws.com/$SCCACHE_BUCKET/docker/$cksum" echo "Attempting to download $s3url" + rm -f /tmp/rustci_docker_cache set +e - loaded_images=$(curl $url | docker load | sed 's/.* sha/sha/') + retry curl -f -L -C - -o /tmp/rustci_docker_cache "$url" + loaded_images=$(docker load -i /tmp/rustci_docker_cache | sed 's/.* sha/sha/') set -e echo "Downloaded containers:\n$loaded_images" fi diff --git a/src/ci/shared.sh b/src/ci/shared.sh index 4a08683e3ee86..bb6945f0fd6bb 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -21,11 +21,12 @@ function retry { while true; do "$@" && break || { if [[ $n -lt $max ]]; then + sleep $n # don't retry immediately ((n++)) echo "Command failed. Attempt $n/$max:" else echo "The command has failed after $n attempts." - exit 1 + return 1 fi } done diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 4d73d3aa07e66..5c6f6b22aae06 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -385,26 +385,7 @@ impl RawVec { } } - /// Ensures that the buffer contains at least enough space to hold - /// `used_cap + needed_extra_cap` elements. If it doesn't already, - /// will reallocate the minimum possible amount of memory necessary. - /// Generally this will be exactly the amount of memory necessary, - /// but in principle the allocator is free to give back more than - /// we asked for. - /// - /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate - /// the requested space. This is not really unsafe, but the unsafe - /// code *you* write that relies on the behavior of this function may break. - /// - /// # Panics - /// - /// * Panics if the requested capacity exceeds `usize::MAX` bytes. - /// * Panics on 32-bit platforms if the requested capacity exceeds - /// `isize::MAX` bytes. - /// - /// # Aborts - /// - /// Aborts on OOM + /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) -> Result<(), CollectionAllocErr> { @@ -441,6 +422,26 @@ impl RawVec { } } + /// Ensures that the buffer contains at least enough space to hold + /// `used_cap + needed_extra_cap` elements. If it doesn't already, + /// will reallocate the minimum possible amount of memory necessary. + /// Generally this will be exactly the amount of memory necessary, + /// but in principle the allocator is free to give back more than + /// we asked for. + /// + /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate + /// the requested space. This is not really unsafe, but the unsafe + /// code *you* write that relies on the behavior of this function may break. + /// + /// # Panics + /// + /// * Panics if the requested capacity exceeds `usize::MAX` bytes. + /// * Panics on 32-bit platforms if the requested capacity exceeds + /// `isize::MAX` bytes. + /// + /// # Aborts + /// + /// Aborts on OOM pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { match self.try_reserve_exact(used_cap, needed_extra_cap) { Err(CapacityOverflow) => capacity_overflow(), @@ -463,6 +464,42 @@ impl RawVec { Ok(cmp::max(double_cap, required_cap)) } + /// The same as `reserve`, but returns on errors instead of panicking or aborting. + pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) + -> Result<(), CollectionAllocErr> { + unsafe { + // NOTE: we don't early branch on ZSTs here because we want this + // to actually catch "asking for more than usize::MAX" in that case. + // If we make it past the first branch then we are guaranteed to + // panic. + + // Don't actually need any more capacity. + // Wrapping in case they give a bad `used_cap` + if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { + return Ok(()); + } + + let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)?; + let new_layout = Layout::array::(new_cap).map_err(|_| CapacityOverflow)?; + + // FIXME: may crash and burn on over-reserve + alloc_guard(new_layout.size())?; + + let res = match self.current_layout() { + Some(layout) => { + debug_assert!(new_layout.align() == layout.align()); + self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size()) + } + None => self.a.alloc(new_layout), + }; + + self.ptr = res?.cast().into(); + self.cap = new_cap; + + Ok(()) + } + } + /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have /// enough capacity, will reallocate enough space plus comfortable slack @@ -515,42 +552,6 @@ impl RawVec { /// # vector.push_all(&[1, 3, 5, 7, 9]); /// # } /// ``` - pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) - -> Result<(), CollectionAllocErr> { - unsafe { - // NOTE: we don't early branch on ZSTs here because we want this - // to actually catch "asking for more than usize::MAX" in that case. - // If we make it past the first branch then we are guaranteed to - // panic. - - // Don't actually need any more capacity. - // Wrapping in case they give a bad `used_cap` - if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { - return Ok(()); - } - - let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)?; - let new_layout = Layout::array::(new_cap).map_err(|_| CapacityOverflow)?; - - // FIXME: may crash and burn on over-reserve - alloc_guard(new_layout.size())?; - - let res = match self.current_layout() { - Some(layout) => { - debug_assert!(new_layout.align() == layout.align()); - self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size()) - } - None => self.a.alloc(new_layout), - }; - - self.ptr = res?.cast().into(); - self.cap = new_cap; - - Ok(()) - } - } - - /// The same as try_reserve, but errors are lowered to a call to oom(). pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { match self.try_reserve(used_cap, needed_extra_cap) { Err(CapacityOverflow) => capacity_overflow(), diff --git a/src/liballoc/slice.rs b/src/liballoc/slice.rs index d50a3458f20d1..6caf12aa7eb81 100644 --- a/src/liballoc/slice.rs +++ b/src/liballoc/slice.rs @@ -10,6 +10,8 @@ //! A dynamically-sized view into a contiguous sequence, `[T]`. //! +//! *[See also the slice primitive type](../../std/primitive.slice.html).* +//! //! Slices are a view into a block of memory represented as a pointer and a //! length. //! @@ -78,8 +80,6 @@ //! * Further methods that return iterators are [`.split`], [`.splitn`], //! [`.chunks`], [`.windows`] and more. //! -//! *[See also the slice primitive type](../../std/primitive.slice.html).* -//! //! [`Clone`]: ../../std/clone/trait.Clone.html //! [`Eq`]: ../../std/cmp/trait.Eq.html //! [`Ord`]: ../../std/cmp/trait.Ord.html diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index 9e693c89be90d..42efdea74b1ab 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -10,6 +10,8 @@ //! Unicode string slices. //! +//! *[See also the `str` primitive type](../../std/primitive.str.html).* +//! //! The `&str` type is one of the two main string types, the other being `String`. //! Unlike its `String` counterpart, its contents are borrowed. //! @@ -29,8 +31,6 @@ //! ``` //! let hello_world: &'static str = "Hello, world!"; //! ``` -//! -//! *[See also the `str` primitive type](../../std/primitive.str.html).* #![stable(feature = "rust1", since = "1.0.0")] diff --git a/src/liballoc/tests/str.rs b/src/liballoc/tests/str.rs index a03b61ec97e51..1a47e5433ea90 100644 --- a/src/liballoc/tests/str.rs +++ b/src/liballoc/tests/str.rs @@ -291,113 +291,379 @@ fn test_replace_pattern() { assert_eq!(data.replace(|c| c == 'γ', "😺😺😺"), "abcdαβ😺😺😺δabcdαβ😺😺😺δ"); } -#[test] -fn test_slice() { - assert_eq!("ab", &"abc"[0..2]); - assert_eq!("bc", &"abc"[1..3]); - assert_eq!("", &"abc"[1..1]); - assert_eq!("\u{65e5}", &"\u{65e5}\u{672c}"[0..3]); +// The current implementation of SliceIndex fails to handle methods +// orthogonally from range types; therefore, it is worth testing +// all of the indexing operations on each input. +mod slice_index { + // Test a slicing operation **that should succeed,** + // testing it on all of the indexing methods. + // + // This is not suitable for testing failure on invalid inputs. + macro_rules! assert_range_eq { + ($s:expr, $range:expr, $expected:expr) + => { + let mut s: String = $s.to_owned(); + let mut expected: String = $expected.to_owned(); + { + let s: &str = &s; + let expected: &str = &expected; + + assert_eq!(&s[$range], expected, "(in assertion for: index)"); + assert_eq!(s.get($range), Some(expected), "(in assertion for: get)"); + unsafe { + assert_eq!( + s.get_unchecked($range), expected, + "(in assertion for: get_unchecked)", + ); + } + } + { + let s: &mut str = &mut s; + let expected: &mut str = &mut expected; + + assert_eq!( + &mut s[$range], expected, + "(in assertion for: index_mut)", + ); + assert_eq!( + s.get_mut($range), Some(&mut expected[..]), + "(in assertion for: get_mut)", + ); + unsafe { + assert_eq!( + s.get_unchecked_mut($range), expected, + "(in assertion for: get_unchecked_mut)", + ); + } + } + } + } - let data = "ประเทศไทย中华"; - assert_eq!("ป", &data[0..3]); - assert_eq!("ร", &data[3..6]); - assert_eq!("", &data[3..3]); - assert_eq!("华", &data[30..33]); + // Make sure the macro can actually detect bugs, + // because if it can't, then what are we even doing here? + // + // (Be aware this only demonstrates the ability to detect bugs + // in the FIRST method that panics, as the macro is not designed + // to be used in `should_panic`) + #[test] + #[should_panic(expected = "out of bounds")] + fn assert_range_eq_can_fail_by_panic() { + assert_range_eq!("abc", 0..5, "abc"); + } - fn a_million_letter_x() -> String { - let mut i = 0; - let mut rs = String::new(); - while i < 100000 { - rs.push_str("华华华华华华华华华华"); - i += 1; + // (Be aware this only demonstrates the ability to detect bugs + // in the FIRST method it calls, as the macro is not designed + // to be used in `should_panic`) + #[test] + #[should_panic(expected = "==")] + fn assert_range_eq_can_fail_by_inequality() { + assert_range_eq!("abc", 0..2, "abc"); + } + + // Generates test cases for bad index operations. + // + // This generates `should_panic` test cases for Index/IndexMut + // and `None` test cases for get/get_mut. + macro_rules! panic_cases { + ($( + in mod $case_name:ident { + data: $data:expr; + + // optional: + // + // a similar input for which DATA[input] succeeds, and the corresponding + // output str. This helps validate "critical points" where an input range + // straddles the boundary between valid and invalid. + // (such as the input `len..len`, which is just barely valid) + $( + good: data[$good:expr] == $output:expr; + )* + + bad: data[$bad:expr]; + message: $expect_msg:expr; // must be a literal + } + )*) => {$( + mod $case_name { + #[test] + fn pass() { + let mut v: String = $data.into(); + + $( assert_range_eq!(v, $good, $output); )* + + { + let v: &str = &v; + assert_eq!(v.get($bad), None, "(in None assertion for get)"); + } + + { + let v: &mut str = &mut v; + assert_eq!(v.get_mut($bad), None, "(in None assertion for get_mut)"); + } + } + + #[test] + #[should_panic(expected = $expect_msg)] + fn index_fail() { + let v: String = $data.into(); + let v: &str = &v; + let _v = &v[$bad]; + } + + #[test] + #[should_panic(expected = $expect_msg)] + fn index_mut_fail() { + let mut v: String = $data.into(); + let v: &mut str = &mut v; + let _v = &mut v[$bad]; + } + } + )*}; + } + + #[test] + fn simple_ascii() { + assert_range_eq!("abc", .., "abc"); + + assert_range_eq!("abc", 0..2, "ab"); + assert_range_eq!("abc", 0..=1, "ab"); + assert_range_eq!("abc", ..2, "ab"); + assert_range_eq!("abc", ..=1, "ab"); + + assert_range_eq!("abc", 1..3, "bc"); + assert_range_eq!("abc", 1..=2, "bc"); + assert_range_eq!("abc", 1..1, ""); + assert_range_eq!("abc", 1..=0, ""); + } + + #[test] + fn simple_unicode() { + // 日本 + assert_range_eq!("\u{65e5}\u{672c}", .., "\u{65e5}\u{672c}"); + + assert_range_eq!("\u{65e5}\u{672c}", 0..3, "\u{65e5}"); + assert_range_eq!("\u{65e5}\u{672c}", 0..=2, "\u{65e5}"); + assert_range_eq!("\u{65e5}\u{672c}", ..3, "\u{65e5}"); + assert_range_eq!("\u{65e5}\u{672c}", ..=2, "\u{65e5}"); + + assert_range_eq!("\u{65e5}\u{672c}", 3..6, "\u{672c}"); + assert_range_eq!("\u{65e5}\u{672c}", 3..=5, "\u{672c}"); + assert_range_eq!("\u{65e5}\u{672c}", 3.., "\u{672c}"); + + let data = "ประเทศไทย中华"; + assert_range_eq!(data, 0..3, "ป"); + assert_range_eq!(data, 3..6, "ร"); + assert_range_eq!(data, 3..3, ""); + assert_range_eq!(data, 30..33, "华"); + + /*0: 中 + 3: 华 + 6: V + 7: i + 8: ệ + 11: t + 12: + 13: N + 14: a + 15: m */ + let ss = "中华Việt Nam"; + assert_range_eq!(ss, 3..6, "华"); + assert_range_eq!(ss, 6..16, "Việt Nam"); + assert_range_eq!(ss, 6..=15, "Việt Nam"); + assert_range_eq!(ss, 6.., "Việt Nam"); + + assert_range_eq!(ss, 0..3, "中"); + assert_range_eq!(ss, 3..7, "华V"); + assert_range_eq!(ss, 3..=6, "华V"); + assert_range_eq!(ss, 3..3, ""); + assert_range_eq!(ss, 3..=2, ""); + } + + #[test] + #[cfg(not(target_arch = "asmjs"))] // hits an OOM + fn simple_big() { + fn a_million_letter_x() -> String { + let mut i = 0; + let mut rs = String::new(); + while i < 100000 { + rs.push_str("华华华华华华华华华华"); + i += 1; + } + rs } - rs + fn half_a_million_letter_x() -> String { + let mut i = 0; + let mut rs = String::new(); + while i < 100000 { + rs.push_str("华华华华华"); + i += 1; + } + rs + } + let letters = a_million_letter_x(); + assert_range_eq!(letters, 0..3 * 500000, half_a_million_letter_x()); } - fn half_a_million_letter_x() -> String { - let mut i = 0; - let mut rs = String::new(); - while i < 100000 { - rs.push_str("华华华华华"); - i += 1; + + #[test] + #[should_panic] + fn test_slice_fail() { + &"中华Việt Nam"[0..2]; + } + + panic_cases! { + in mod rangefrom_len { + data: "abcdef"; + good: data[6..] == ""; + bad: data[7..]; + message: "out of bounds"; + } + + in mod rangeto_len { + data: "abcdef"; + good: data[..6] == "abcdef"; + bad: data[..7]; + message: "out of bounds"; + } + + in mod rangetoinclusive_len { + data: "abcdef"; + good: data[..=5] == "abcdef"; + bad: data[..=6]; + message: "out of bounds"; + } + + in mod range_len_len { + data: "abcdef"; + good: data[6..6] == ""; + bad: data[7..7]; + message: "out of bounds"; + } + + in mod rangeinclusive_len_len { + data: "abcdef"; + good: data[6..=5] == ""; + bad: data[7..=6]; + message: "out of bounds"; } - rs } - let letters = a_million_letter_x(); - assert_eq!(half_a_million_letter_x(), &letters[0..3 * 500000]); -} -#[test] -fn test_slice_2() { - let ss = "中华Việt Nam"; + panic_cases! { + in mod range_neg_width { + data: "abcdef"; + good: data[4..4] == ""; + bad: data[4..3]; + message: "begin <= end (4 <= 3)"; + } - assert_eq!("华", &ss[3..6]); - assert_eq!("Việt Nam", &ss[6..16]); + in mod rangeinclusive_neg_width { + data: "abcdef"; + good: data[4..=3] == ""; + bad: data[4..=2]; + message: "begin <= end (4 <= 3)"; + } + } - assert_eq!("ab", &"abc"[0..2]); - assert_eq!("bc", &"abc"[1..3]); - assert_eq!("", &"abc"[1..1]); + mod overflow { + panic_cases! { + in mod rangeinclusive { + data: "hello"; + // note: using 0 specifically ensures that the result of overflowing is 0..0, + // so that `get` doesn't simply return None for the wrong reason. + bad: data[0..=usize::max_value()]; + message: "maximum usize"; + } - assert_eq!("中", &ss[0..3]); - assert_eq!("华V", &ss[3..7]); - assert_eq!("", &ss[3..3]); - /*0: 中 - 3: 华 - 6: V - 7: i - 8: ệ - 11: t - 12: - 13: N - 14: a - 15: m */ -} + in mod rangetoinclusive { + data: "hello"; + bad: data[..=usize::max_value()]; + message: "maximum usize"; + } + } + } -#[test] -#[should_panic] -fn test_slice_fail() { - &"中华Việt Nam"[0..2]; -} + mod boundary { + const DATA: &'static str = "abcαβγ"; + + const BAD_START: usize = 4; + const GOOD_START: usize = 3; + const BAD_END: usize = 6; + const GOOD_END: usize = 7; + const BAD_END_INCL: usize = BAD_END - 1; + const GOOD_END_INCL: usize = GOOD_END - 1; + + // it is especially important to test all of the different range types here + // because some of the logic may be duplicated as part of micro-optimizations + // to dodge unicode boundary checks on half-ranges. + panic_cases! { + in mod range_1 { + data: super::DATA; + bad: data[super::BAD_START..super::GOOD_END]; + message: + "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of"; + } -#[test] -#[should_panic] -fn test_str_slice_rangetoinclusive_max_panics() { - &"hello"[..=usize::max_value()]; -} + in mod range_2 { + data: super::DATA; + bad: data[super::GOOD_START..super::BAD_END]; + message: + "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; + } -#[test] -#[should_panic] -fn test_str_slice_rangeinclusive_max_panics() { - &"hello"[1..=usize::max_value()]; -} + in mod rangefrom { + data: super::DATA; + bad: data[super::BAD_START..]; + message: + "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of"; + } -#[test] -#[should_panic] -fn test_str_slicemut_rangetoinclusive_max_panics() { - let mut s = "hello".to_owned(); - let s: &mut str = &mut s; - &mut s[..=usize::max_value()]; -} + in mod rangeto { + data: super::DATA; + bad: data[..super::BAD_END]; + message: + "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; + } -#[test] -#[should_panic] -fn test_str_slicemut_rangeinclusive_max_panics() { - let mut s = "hello".to_owned(); - let s: &mut str = &mut s; - &mut s[1..=usize::max_value()]; -} + in mod rangeinclusive_1 { + data: super::DATA; + bad: data[super::BAD_START..=super::GOOD_END_INCL]; + message: + "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of"; + } -#[test] -fn test_str_get_maxinclusive() { - let mut s = "hello".to_owned(); - { - let s: &str = &s; - assert_eq!(s.get(..=usize::max_value()), None); - assert_eq!(s.get(1..=usize::max_value()), None); + in mod rangeinclusive_2 { + data: super::DATA; + bad: data[super::GOOD_START..=super::BAD_END_INCL]; + message: + "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; + } + + in mod rangetoinclusive { + data: super::DATA; + bad: data[..=super::BAD_END_INCL]; + message: + "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of"; + } + } } - { - let s: &mut str = &mut s; - assert_eq!(s.get(..=usize::max_value()), None); - assert_eq!(s.get(1..=usize::max_value()), None); + + const LOREM_PARAGRAPH: &'static str = "\ + Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem \ + sit amet dolor ultricies condimentum. Praesent iaculis purus elit, ac malesuada \ + quam malesuada in. Duis sed orci eros. Suspendisse sit amet magna mollis, mollis \ + nunc luctus, imperdiet mi. Integer fringilla non sem ut lacinia. Fusce varius \ + tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec tempus vel, \ + gravida nec quam."; + + // check the panic includes the prefix of the sliced string + #[test] + #[should_panic(expected="byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")] + fn test_slice_fail_truncated_1() { + &LOREM_PARAGRAPH[..1024]; + } + // check the truncation in the panic message + #[test] + #[should_panic(expected="luctus, im`[...]")] + fn test_slice_fail_truncated_2() { + &LOREM_PARAGRAPH[..1024]; } } @@ -446,50 +712,6 @@ fn test_is_char_boundary() { } } } -const LOREM_PARAGRAPH: &'static str = "\ -Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \ -ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \ -eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \ -sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \ -tempus vel, gravida nec quam."; - -// check the panic includes the prefix of the sliced string -#[test] -#[should_panic(expected="byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")] -fn test_slice_fail_truncated_1() { - &LOREM_PARAGRAPH[..1024]; -} -// check the truncation in the panic message -#[test] -#[should_panic(expected="luctus, im`[...]")] -fn test_slice_fail_truncated_2() { - &LOREM_PARAGRAPH[..1024]; -} - -#[test] -#[should_panic(expected="byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of")] -fn test_slice_fail_boundary_1() { - &"abcαβγ"[4..]; -} - -#[test] -#[should_panic(expected="byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of")] -fn test_slice_fail_boundary_2() { - &"abcαβγ"[2..6]; -} - -#[test] -fn test_slice_from() { - assert_eq!(&"abcd"[0..], "abcd"); - assert_eq!(&"abcd"[2..], "cd"); - assert_eq!(&"abcd"[4..], ""); -} -#[test] -fn test_slice_to() { - assert_eq!(&"abcd"[..0], ""); - assert_eq!(&"abcd"[..2], "ab"); - assert_eq!(&"abcd"[..4], "abcd"); -} #[test] fn test_trim_left_matches() { diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 35d0a69a05abe..690cbcb559bbf 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -2533,9 +2533,11 @@ impl<'a, T> Drop for Drain<'a, T> { // memmove back untouched tail, update to new length let start = source_vec.len(); let tail = self.tail_start; - let src = source_vec.as_ptr().offset(tail as isize); - let dst = source_vec.as_mut_ptr().offset(start as isize); - ptr::copy(src, dst, self.tail_len); + if tail != start { + let src = source_vec.as_ptr().offset(tail as isize); + let dst = source_vec.as_mut_ptr().offset(start as isize); + ptr::copy(src, dst, self.tail_len); + } source_vec.set_len(start + self.tail_len); } } diff --git a/src/libcore/num/f32.rs b/src/libcore/num/f32.rs index 672119eba7f9d..4a7dc13f0f2ca 100644 --- a/src/libcore/num/f32.rs +++ b/src/libcore/num/f32.rs @@ -11,9 +11,9 @@ //! This module provides constants which are specific to the implementation //! of the `f32` floating point data type. //! -//! Mathematically significant numbers are provided in the `consts` sub-module. -//! //! *[See also the `f32` primitive type](../../std/primitive.f32.html).* +//! +//! Mathematically significant numbers are provided in the `consts` sub-module. #![stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libcore/num/f64.rs b/src/libcore/num/f64.rs index 220b23a1e6a01..801de5e87bd10 100644 --- a/src/libcore/num/f64.rs +++ b/src/libcore/num/f64.rs @@ -11,9 +11,9 @@ //! This module provides constants which are specific to the implementation //! of the `f64` floating point data type. //! -//! Mathematically significant numbers are provided in the `consts` sub-module. -//! //! *[See also the `f64` primitive type](../../std/primitive.f64.html).* +//! +//! Mathematically significant numbers are provided in the `consts` sub-module. #![stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libcore/ops/range.rs b/src/libcore/ops/range.rs index b01a769eda7fd..697e6a3efde28 100644 --- a/src/libcore/ops/range.rs +++ b/src/libcore/ops/range.rs @@ -411,6 +411,21 @@ impl RangeInclusive { pub fn end(&self) -> &Idx { &self.end } + + /// Destructures the RangeInclusive into (lower bound, upper (inclusive) bound). + /// + /// # Examples + /// + /// ``` + /// #![feature(inclusive_range_methods)] + /// + /// assert_eq!((3..=5).into_inner(), (3, 5)); + /// ``` + #[unstable(feature = "inclusive_range_methods", issue = "49022")] + #[inline] + pub fn into_inner(self) -> (Idx, Idx) { + (self.start, self.end) + } } #[stable(feature = "inclusive_range", since = "1.26.0")] diff --git a/src/libcore/slice/mod.rs b/src/libcore/slice/mod.rs index 83e8a6e4b683a..93ebc23ac0b0e 100644 --- a/src/libcore/slice/mod.rs +++ b/src/libcore/slice/mod.rs @@ -2262,6 +2262,12 @@ fn slice_index_order_fail(index: usize, end: usize) -> ! { panic!("slice index starts at {} but ends at {}", index, end); } +#[inline(never)] +#[cold] +fn slice_index_overflow_fail() -> ! { + panic!("attempted to index slice up to maximum usize"); +} + /// A helper trait used for indexing operations. #[unstable(feature = "slice_get_slice", issue = "35729")] #[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"] @@ -2538,15 +2544,13 @@ impl SliceIndex<[T]> for ops::RangeInclusive { #[inline] fn index(self, slice: &[T]) -> &[T] { - assert!(self.end != usize::max_value(), - "attempted to index slice up to maximum usize"); + if self.end == usize::max_value() { slice_index_overflow_fail(); } (self.start..self.end + 1).index(slice) } #[inline] fn index_mut(self, slice: &mut [T]) -> &mut [T] { - assert!(self.end != usize::max_value(), - "attempted to index slice up to maximum usize"); + if self.end == usize::max_value() { slice_index_overflow_fail(); } (self.start..self.end + 1).index_mut(slice) } } diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index b39d9feb35b7e..df7b2f25a86df 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -1849,6 +1849,12 @@ mod traits { } } + #[inline(never)] + #[cold] + fn str_index_overflow_fail() -> ! { + panic!("attempted to index str up to maximum usize"); + } + #[stable(feature = "str_checked_slicing", since = "1.20.0")] impl SliceIndex for ops::RangeFull { type Output = str; @@ -2029,19 +2035,13 @@ mod traits { type Output = str; #[inline] fn get(self, slice: &str) -> Option<&Self::Output> { - if let Some(end) = self.end.checked_add(1) { - (self.start..end).get(slice) - } else { - None - } + if self.end == usize::max_value() { None } + else { (self.start..self.end+1).get(slice) } } #[inline] fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> { - if let Some(end) = self.end.checked_add(1) { - (self.start..end).get_mut(slice) - } else { - None - } + if self.end == usize::max_value() { None } + else { (self.start..self.end+1).get_mut(slice) } } #[inline] unsafe fn get_unchecked(self, slice: &str) -> &Self::Output { @@ -2053,14 +2053,12 @@ mod traits { } #[inline] fn index(self, slice: &str) -> &Self::Output { - assert!(self.end != usize::max_value(), - "attempted to index str up to maximum usize"); + if self.end == usize::max_value() { str_index_overflow_fail(); } (self.start..self.end+1).index(slice) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { - assert!(self.end != usize::max_value(), - "attempted to index str up to maximum usize"); + if self.end == usize::max_value() { str_index_overflow_fail(); } (self.start..self.end+1).index_mut(slice) } } @@ -2072,40 +2070,30 @@ mod traits { type Output = str; #[inline] fn get(self, slice: &str) -> Option<&Self::Output> { - if self.end < usize::max_value() && slice.is_char_boundary(self.end + 1) { - Some(unsafe { self.get_unchecked(slice) }) - } else { - None - } + if self.end == usize::max_value() { None } + else { (..self.end+1).get(slice) } } #[inline] fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> { - if self.end < usize::max_value() && slice.is_char_boundary(self.end + 1) { - Some(unsafe { self.get_unchecked_mut(slice) }) - } else { - None - } + if self.end == usize::max_value() { None } + else { (..self.end+1).get_mut(slice) } } #[inline] unsafe fn get_unchecked(self, slice: &str) -> &Self::Output { - let ptr = slice.as_ptr(); - super::from_utf8_unchecked(slice::from_raw_parts(ptr, self.end + 1)) + (..self.end+1).get_unchecked(slice) } #[inline] unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output { - let ptr = slice.as_ptr(); - super::from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr as *mut u8, self.end + 1)) + (..self.end+1).get_unchecked_mut(slice) } #[inline] fn index(self, slice: &str) -> &Self::Output { - assert!(self.end != usize::max_value(), - "attempted to index str up to maximum usize"); + if self.end == usize::max_value() { str_index_overflow_fail(); } (..self.end+1).index(slice) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { - assert!(self.end != usize::max_value(), - "attempted to index str up to maximum usize"); + if self.end == usize::max_value() { str_index_overflow_fail(); } (..self.end+1).index_mut(slice) } } diff --git a/src/libcore/tests/slice.rs b/src/libcore/tests/slice.rs index c81e5e97cbb7a..19b5c86c47406 100644 --- a/src/libcore/tests/slice.rs +++ b/src/libcore/tests/slice.rs @@ -376,48 +376,224 @@ fn test_windows_zip() { assert_eq!(res, [14, 18, 22, 26]); } -#[test] -fn get_range() { - let v: &[i32] = &[0, 1, 2, 3, 4, 5]; - assert_eq!(v.get(..), Some(&[0, 1, 2, 3, 4, 5][..])); - assert_eq!(v.get(..2), Some(&[0, 1][..])); - assert_eq!(v.get(2..), Some(&[2, 3, 4, 5][..])); - assert_eq!(v.get(1..4), Some(&[1, 2, 3][..])); - assert_eq!(v.get(7..), None); - assert_eq!(v.get(7..10), None); -} +// The current implementation of SliceIndex fails to handle methods +// orthogonally from range types; therefore, it is worth testing +// all of the indexing operations on each input. +mod slice_index { + // This checks all six indexing methods, given an input range that + // should succeed. (it is NOT suitable for testing invalid inputs) + macro_rules! assert_range_eq { + ($arr:expr, $range:expr, $expected:expr) + => { + let mut arr = $arr; + let mut expected = $expected; + { + let s: &[_] = &arr; + let expected: &[_] = &expected; + + assert_eq!(&s[$range], expected, "(in assertion for: index)"); + assert_eq!(s.get($range), Some(expected), "(in assertion for: get)"); + unsafe { + assert_eq!( + s.get_unchecked($range), expected, + "(in assertion for: get_unchecked)", + ); + } + } + { + let s: &mut [_] = &mut arr; + let expected: &mut [_] = &mut expected; + + assert_eq!( + &mut s[$range], expected, + "(in assertion for: index_mut)", + ); + assert_eq!( + s.get_mut($range), Some(&mut expected[..]), + "(in assertion for: get_mut)", + ); + unsafe { + assert_eq!( + s.get_unchecked_mut($range), expected, + "(in assertion for: get_unchecked_mut)", + ); + } + } + } + } -#[test] -fn get_mut_range() { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; - assert_eq!(v.get_mut(..), Some(&mut [0, 1, 2, 3, 4, 5][..])); - assert_eq!(v.get_mut(..2), Some(&mut [0, 1][..])); - assert_eq!(v.get_mut(2..), Some(&mut [2, 3, 4, 5][..])); - assert_eq!(v.get_mut(1..4), Some(&mut [1, 2, 3][..])); - assert_eq!(v.get_mut(7..), None); - assert_eq!(v.get_mut(7..10), None); -} + // Make sure the macro can actually detect bugs, + // because if it can't, then what are we even doing here? + // + // (Be aware this only demonstrates the ability to detect bugs + // in the FIRST method that panics, as the macro is not designed + // to be used in `should_panic`) + #[test] + #[should_panic(expected = "out of range")] + fn assert_range_eq_can_fail_by_panic() { + assert_range_eq!([0, 1, 2], 0..5, [0, 1, 2]); + } -#[test] -fn get_unchecked_range() { - unsafe { - let v: &[i32] = &[0, 1, 2, 3, 4, 5]; - assert_eq!(v.get_unchecked(..), &[0, 1, 2, 3, 4, 5][..]); - assert_eq!(v.get_unchecked(..2), &[0, 1][..]); - assert_eq!(v.get_unchecked(2..), &[2, 3, 4, 5][..]); - assert_eq!(v.get_unchecked(1..4), &[1, 2, 3][..]); + // (Be aware this only demonstrates the ability to detect bugs + // in the FIRST method it calls, as the macro is not designed + // to be used in `should_panic`) + #[test] + #[should_panic(expected = "==")] + fn assert_range_eq_can_fail_by_inequality() { + assert_range_eq!([0, 1, 2], 0..2, [0, 1, 2]); } -} -#[test] -fn get_unchecked_mut_range() { - unsafe { - let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; - assert_eq!(v.get_unchecked_mut(..), &mut [0, 1, 2, 3, 4, 5][..]); - assert_eq!(v.get_unchecked_mut(..2), &mut [0, 1][..]); - assert_eq!(v.get_unchecked_mut(2..), &mut[2, 3, 4, 5][..]); - assert_eq!(v.get_unchecked_mut(1..4), &mut [1, 2, 3][..]); + // Test cases for bad index operations. + // + // This generates `should_panic` test cases for Index/IndexMut + // and `None` test cases for get/get_mut. + macro_rules! panic_cases { + ($( + // each test case needs a unique name to namespace the tests + in mod $case_name:ident { + data: $data:expr; + + // optional: + // + // one or more similar inputs for which data[input] succeeds, + // and the corresponding output as an array. This helps validate + // "critical points" where an input range straddles the boundary + // between valid and invalid. + // (such as the input `len..len`, which is just barely valid) + $( + good: data[$good:expr] == $output:expr; + )* + + bad: data[$bad:expr]; + message: $expect_msg:expr; + } + )*) => {$( + mod $case_name { + #[test] + fn pass() { + let mut v = $data; + + $( assert_range_eq!($data, $good, $output); )* + + { + let v: &[_] = &v; + assert_eq!(v.get($bad), None, "(in None assertion for get)"); + } + + { + let v: &mut [_] = &mut v; + assert_eq!(v.get_mut($bad), None, "(in None assertion for get_mut)"); + } + } + + #[test] + #[should_panic(expected = $expect_msg)] + fn index_fail() { + let v = $data; + let v: &[_] = &v; + let _v = &v[$bad]; + } + + #[test] + #[should_panic(expected = $expect_msg)] + fn index_mut_fail() { + let mut v = $data; + let v: &mut [_] = &mut v; + let _v = &mut v[$bad]; + } + } + )*}; } + + #[test] + fn simple() { + let v = [0, 1, 2, 3, 4, 5]; + + assert_range_eq!(v, .., [0, 1, 2, 3, 4, 5]); + assert_range_eq!(v, ..2, [0, 1]); + assert_range_eq!(v, ..=1, [0, 1]); + assert_range_eq!(v, 2.., [2, 3, 4, 5]); + assert_range_eq!(v, 1..4, [1, 2, 3]); + assert_range_eq!(v, 1..=3, [1, 2, 3]); + } + + panic_cases! { + in mod rangefrom_len { + data: [0, 1, 2, 3, 4, 5]; + + good: data[6..] == []; + bad: data[7..]; + message: "but ends at"; // perhaps not ideal + } + + in mod rangeto_len { + data: [0, 1, 2, 3, 4, 5]; + + good: data[..6] == [0, 1, 2, 3, 4, 5]; + bad: data[..7]; + message: "out of range"; + } + + in mod rangetoinclusive_len { + data: [0, 1, 2, 3, 4, 5]; + + good: data[..=5] == [0, 1, 2, 3, 4, 5]; + bad: data[..=6]; + message: "out of range"; + } + + in mod range_len_len { + data: [0, 1, 2, 3, 4, 5]; + + good: data[6..6] == []; + bad: data[7..7]; + message: "out of range"; + } + + in mod rangeinclusive_len_len { + data: [0, 1, 2, 3, 4, 5]; + + good: data[6..=5] == []; + bad: data[7..=6]; + message: "out of range"; + } + } + + panic_cases! { + in mod range_neg_width { + data: [0, 1, 2, 3, 4, 5]; + + good: data[4..4] == []; + bad: data[4..3]; + message: "but ends at"; + } + + in mod rangeinclusive_neg_width { + data: [0, 1, 2, 3, 4, 5]; + + good: data[4..=3] == []; + bad: data[4..=2]; + message: "but ends at"; + } + } + + panic_cases! { + in mod rangeinclusive_overflow { + data: [0, 1]; + + // note: using 0 specifically ensures that the result of overflowing is 0..0, + // so that `get` doesn't simply return None for the wrong reason. + bad: data[0 ..= ::std::usize::MAX]; + message: "maximum usize"; + } + + in mod rangetoinclusive_overflow { + data: [0, 1]; + + bad: data[..= ::std::usize::MAX]; + message: "maximum usize"; + } + } // panic_cases! } #[test] diff --git a/src/libcore/tests/str.rs b/src/libcore/tests/str.rs index 08daafccc5404..343c9596c5383 100644 --- a/src/libcore/tests/str.rs +++ b/src/libcore/tests/str.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// All `str` tests live in collectionstests::str +// All `str` tests live in liballoc/tests diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index 22ab1b15c8b8e..03aff64100558 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -12,6 +12,7 @@ use errors::DiagnosticBuilder; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc_data_structures::small_vec::SmallVec; use rustc_data_structures::sync::{Lrc, RwLock, ReadGuard, Lock}; use std::env; use std::hash::Hash; @@ -77,7 +78,7 @@ struct DepGraphData { /// things available to us. If we find that they are not dirty, we /// load the path to the file storing those work-products here into /// this map. We can later look for and extract that data. - previous_work_products: RwLock>, + previous_work_products: FxHashMap, /// Work-products that we generate in this run. work_products: RwLock>, @@ -90,7 +91,8 @@ struct DepGraphData { impl DepGraph { - pub fn new(prev_graph: PreviousDepGraph) -> DepGraph { + pub fn new(prev_graph: PreviousDepGraph, + prev_work_products: FxHashMap) -> DepGraph { // Pre-allocate the fingerprints array. We over-allocate a little so // that we hopefully don't have to re-allocate during this compilation // session. @@ -100,7 +102,7 @@ impl DepGraph { (prev_graph_node_count * 115) / 100); DepGraph { data: Some(Lrc::new(DepGraphData { - previous_work_products: RwLock::new(FxHashMap()), + previous_work_products: prev_work_products, work_products: RwLock::new(FxHashMap()), dep_node_debug: Lock::new(FxHashMap()), current: Lock::new(CurrentDepGraph::new()), @@ -131,7 +133,7 @@ impl DepGraph { let mut edges = Vec::new(); for (index, edge_targets) in current_dep_graph.edges.iter_enumerated() { let from = current_dep_graph.nodes[index]; - for &edge_target in edge_targets { + for &edge_target in edge_targets.iter() { let to = current_dep_graph.nodes[edge_target]; edges.push((from, to)); } @@ -209,7 +211,7 @@ impl DepGraph { self.with_task_impl(key, cx, arg, false, task, |key| OpenTask::Regular(Lock::new(RegularOpenTask { node: key, - reads: Vec::new(), + reads: SmallVec::new(), read_set: FxHashSet(), })), |data, key, task| data.borrow_mut().complete_task(key, task)) @@ -230,7 +232,7 @@ impl DepGraph { self.with_task_impl(key, cx, input, true, identity_fn, |_| OpenTask::Ignore, - |data, key, _| data.borrow_mut().alloc_node(key, Vec::new())) + |data, key, _| data.borrow_mut().alloc_node(key, SmallVec::new())) } fn with_task_impl<'gcx, C, A, R>( @@ -353,7 +355,7 @@ impl DepGraph { if let Some(ref data) = self.data { let (result, open_task) = ty::tls::with_context(|icx| { let task = OpenTask::Anon(Lock::new(AnonOpenTask { - reads: Vec::new(), + reads: SmallVec::new(), read_set: FxHashSet(), })); @@ -460,19 +462,6 @@ impl DepGraph { self.data.as_ref().unwrap().previous.node_to_index(dep_node) } - /// Indicates that a previous work product exists for `v`. This is - /// invoked during initial start-up based on what nodes are clean - /// (and what files exist in the incr. directory). - pub fn insert_previous_work_product(&self, v: &WorkProductId, data: WorkProduct) { - debug!("insert_previous_work_product({:?}, {:?})", v, data); - self.data - .as_ref() - .unwrap() - .previous_work_products - .borrow_mut() - .insert(v.clone(), data); - } - /// Indicates that we created the given work-product in this run /// for `v`. This record will be preserved and loaded in the next /// run. @@ -492,7 +481,7 @@ impl DepGraph { self.data .as_ref() .and_then(|data| { - data.previous_work_products.borrow().get(v).cloned() + data.previous_work_products.get(v).cloned() }) } @@ -504,8 +493,8 @@ impl DepGraph { /// Access the map of work-products created during the cached run. Only /// used during saving of the dep-graph. - pub fn previous_work_products(&self) -> ReadGuard> { - self.data.as_ref().unwrap().previous_work_products.borrow() + pub fn previous_work_products(&self) -> &FxHashMap { + &self.data.as_ref().unwrap().previous_work_products } #[inline(always)] @@ -534,15 +523,9 @@ impl DepGraph { } pub fn serialize(&self) -> SerializedDepGraph { - let mut fingerprints = self.fingerprints.borrow_mut(); let current_dep_graph = self.data.as_ref().unwrap().current.borrow(); - // Make sure we don't run out of bounds below. - if current_dep_graph.nodes.len() > fingerprints.len() { - fingerprints.resize(current_dep_graph.nodes.len(), Fingerprint::ZERO); - } - - let fingerprints = fingerprints.clone().convert_index_type(); + let fingerprints = self.fingerprints.borrow().clone().convert_index_type(); let nodes = current_dep_graph.nodes.clone().convert_index_type(); let total_edge_count: usize = current_dep_graph.edges.iter() @@ -626,7 +609,7 @@ impl DepGraph { debug_assert!(data.colors.borrow().get(prev_dep_node_index).is_none()); - let mut current_deps = Vec::new(); + let mut current_deps = SmallVec::new(); for &dep_dep_node_index in prev_deps { let dep_dep_node_color = data.colors.borrow().get(dep_dep_node_index); @@ -923,7 +906,7 @@ pub enum WorkProductFileKind { pub(super) struct CurrentDepGraph { nodes: IndexVec, - edges: IndexVec>, + edges: IndexVec>, node_to_node_index: FxHashMap, forbidden_edge: Option, @@ -1061,7 +1044,7 @@ impl CurrentDepGraph { } = task { debug_assert_eq!(node, key); let krate_idx = self.node_to_node_index[&DepNode::new_no_params(DepKind::Krate)]; - self.alloc_node(node, vec![krate_idx]) + self.alloc_node(node, SmallVec::one(krate_idx)) } else { bug!("complete_eval_always_task() - Expected eval always task to be popped"); } @@ -1107,7 +1090,7 @@ impl CurrentDepGraph { fn alloc_node(&mut self, dep_node: DepNode, - edges: Vec) + edges: SmallVec<[DepNodeIndex; 8]>) -> DepNodeIndex { debug_assert_eq!(self.edges.len(), self.nodes.len()); debug_assert_eq!(self.node_to_node_index.len(), self.nodes.len()); @@ -1122,12 +1105,12 @@ impl CurrentDepGraph { pub struct RegularOpenTask { node: DepNode, - reads: Vec, + reads: SmallVec<[DepNodeIndex; 8]>, read_set: FxHashSet, } pub struct AnonOpenTask { - reads: Vec, + reads: SmallVec<[DepNodeIndex; 8]>, read_set: FxHashSet, } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 83dac033f9408..245663494ddef 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -95,6 +95,23 @@ pub enum Lto { Fat, } +#[derive(Clone, PartialEq, Hash)] +pub enum CrossLangLto { + LinkerPlugin(PathBuf), + NoLink, + Disabled +} + +impl CrossLangLto { + pub fn embed_bitcode(&self) -> bool { + match *self { + CrossLangLto::LinkerPlugin(_) | + CrossLangLto::NoLink => true, + CrossLangLto::Disabled => false, + } + } +} + #[derive(Clone, Copy, PartialEq, Hash)] pub enum DebugInfoLevel { NoDebugInfo, @@ -412,6 +429,7 @@ top_level_options!( // Remap source path prefixes in all output (messages, object files, debug, etc) remap_path_prefix: Vec<(PathBuf, PathBuf)> [UNTRACKED], + edition: Edition [TRACKED], } ); @@ -777,11 +795,15 @@ macro_rules! options { Some("`string` or `string=string`"); pub const parse_lto: Option<&'static str> = Some("one of `thin`, `fat`, or omitted"); + pub const parse_cross_lang_lto: Option<&'static str> = + Some("either a boolean (`yes`, `no`, `on`, `off`, etc), `no-link`, \ + or the path to the linker plugin"); } #[allow(dead_code)] mod $mod_set { - use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer, Lto}; + use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer, Lto, + CrossLangLto}; use rustc_target::spec::{LinkerFlavor, PanicStrategy, RelroLevel}; use std::path::PathBuf; @@ -986,6 +1008,26 @@ macro_rules! options { true } + fn parse_cross_lang_lto(slot: &mut CrossLangLto, v: Option<&str>) -> bool { + if v.is_some() { + let mut bool_arg = None; + if parse_opt_bool(&mut bool_arg, v) { + *slot = if bool_arg.unwrap() { + CrossLangLto::NoLink + } else { + CrossLangLto::Disabled + }; + return true + } + } + + *slot = match v { + None | + Some("no-link") => CrossLangLto::NoLink, + Some(path) => CrossLangLto::LinkerPlugin(PathBuf::from(path)), + }; + true + } } ) } @@ -1295,7 +1337,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "make the current crate share its generic instantiations"), chalk: bool = (false, parse_bool, [TRACKED], "enable the experimental Chalk-based trait solving engine"), - cross_lang_lto: bool = (false, parse_bool, [TRACKED], + cross_lang_lto: CrossLangLto = (CrossLangLto::Disabled, parse_cross_lang_lto, [TRACKED], "generate build artifacts that are compatible with linker-based LTO."), } @@ -2327,7 +2369,7 @@ mod dep_tracking { use std::path::PathBuf; use std::collections::hash_map::DefaultHasher; use super::{CrateType, DebugInfoLevel, ErrorOutputType, Lto, OptLevel, OutputTypes, - Passes, Sanitizer}; + Passes, Sanitizer, CrossLangLto}; use syntax::feature_gate::UnstableFeatures; use rustc_target::spec::{PanicStrategy, RelroLevel, TargetTriple}; use syntax::edition::Edition; @@ -2391,6 +2433,7 @@ mod dep_tracking { impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(TargetTriple); impl_dep_tracking_hash_via_hash!(Edition); + impl_dep_tracking_hash_via_hash!(CrossLangLto); impl_dep_tracking_hash_for_sortable_vec_of!(String); impl_dep_tracking_hash_for_sortable_vec_of!(PathBuf); @@ -2455,7 +2498,7 @@ mod tests { use lint; use middle::cstore; use session::config::{build_configuration, build_session_options_and_crate_config}; - use session::config::Lto; + use session::config::{Lto, CrossLangLto}; use session::build_session; use std::collections::{BTreeMap, BTreeSet}; use std::iter::FromIterator; @@ -3111,6 +3154,10 @@ mod tests { opts = reference.clone(); opts.debugging_opts.relro_level = Some(RelroLevel::Full); assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash()); + + opts = reference.clone(); + opts.debugging_opts.cross_lang_lto = CrossLangLto::NoLink; + assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash()); } #[test] diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 2ab72ba20bf4f..23f84881c7980 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -657,6 +657,13 @@ impl Session { } } + pub fn target_cpu(&self) -> &str { + match self.opts.cg.target_cpu { + Some(ref s) => &**s, + None => &*self.target.target.options.cpu + } + } + pub fn must_not_eliminate_frame_pointers(&self) -> bool { if let Some(x) = self.opts.cg.force_frame_pointers { x diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 2fb811eba1e9a..1e74039503d51 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -980,15 +980,16 @@ where let dep_graph = match future_dep_graph { None => DepGraph::new_disabled(), Some(future) => { - let prev_graph = time(sess, "blocked while dep-graph loading finishes", || { - future - .open() - .unwrap_or_else(|e| rustc_incremental::LoadResult::Error { - message: format!("could not decode incremental cache: {:?}", e), - }) - .open(sess) - }); - DepGraph::new(prev_graph) + let (prev_graph, prev_work_products) = + time(sess, "blocked while dep-graph loading finishes", || { + future + .open() + .unwrap_or_else(|e| rustc_incremental::LoadResult::Error { + message: format!("could not decode incremental cache: {:?}", e), + }) + .open(sess) + }); + DepGraph::new(prev_graph, prev_work_products) } }; let hir_forest = time(sess, "lowering ast -> hir", || { diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 44d6e532f79bb..f846759545eb9 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -10,7 +10,8 @@ //! Code to save/load the dep-graph from files. -use rustc::dep_graph::{PreviousDepGraph, SerializedDepGraph}; +use rustc_data_structures::fx::FxHashMap; +use rustc::dep_graph::{PreviousDepGraph, SerializedDepGraph, WorkProduct, WorkProductId}; use rustc::session::Session; use rustc::ty::TyCtxt; use rustc::ty::maps::OnDiskCache; @@ -32,65 +33,22 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tcx.allocate_metadata_dep_nodes(); tcx.precompute_in_scope_traits_hashes(); - - if tcx.sess.incr_comp_session_dir_opt().is_none() { - // If we are only building with -Zquery-dep-graph but without an actual - // incr. comp. session directory, we exit here. Otherwise we'd fail - // when trying to load work products. - return - } - - let work_products_path = work_products_path(tcx.sess); - let load_result = load_data(tcx.sess.opts.debugging_opts.incremental_info, &work_products_path); - - if let LoadResult::Ok { data: (work_products_data, start_pos) } = load_result { - // Decode the list of work_products - let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos); - let work_products: Vec = - RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| { - let msg = format!("Error decoding `work-products` from incremental \ - compilation session directory: {}", e); - tcx.sess.fatal(&msg[..]) - }); - - for swp in work_products { - let mut all_files_exist = true; - for &(_, ref file_name) in swp.work_product.saved_files.iter() { - let path = in_incr_comp_dir_sess(tcx.sess, file_name); - if !path.exists() { - all_files_exist = false; - - if tcx.sess.opts.debugging_opts.incremental_info { - eprintln!("incremental: could not find file for work \ - product: {}", path.display()); - } - } - } - - if all_files_exist { - debug!("reconcile_work_products: all files for {:?} exist", swp); - tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product); - } else { - debug!("reconcile_work_products: some file for {:?} does not exist", swp); - delete_dirty_work_product(tcx, swp); - } - } - } } +type WorkProductMap = FxHashMap; + pub enum LoadResult { Ok { data: T }, DataOutOfDate, Error { message: String }, } - -impl LoadResult { - pub fn open(self, sess: &Session) -> PreviousDepGraph { +impl LoadResult<(PreviousDepGraph, WorkProductMap)> { + pub fn open(self, sess: &Session) -> (PreviousDepGraph, WorkProductMap) { match self { LoadResult::Error { message } => { sess.warn(&message); - PreviousDepGraph::new(SerializedDepGraph::new()) + (PreviousDepGraph::new(SerializedDepGraph::new()), FxHashMap()) }, LoadResult::DataOutOfDate => { if let Err(err) = delete_all_session_dir_contents(sess) { @@ -98,7 +56,7 @@ impl LoadResult { incremental compilation session directory contents `{}`: {}.", dep_graph_path(sess).display(), err)); } - PreviousDepGraph::new(SerializedDepGraph::new()) + (PreviousDepGraph::new(SerializedDepGraph::new()), FxHashMap()) } LoadResult::Ok { data } => data } @@ -125,10 +83,10 @@ fn load_data(report_incremental_info: bool, path: &Path) -> LoadResult<(Vec, } } -fn delete_dirty_work_product(tcx: TyCtxt, +fn delete_dirty_work_product(sess: &Session, swp: SerializedWorkProduct) { debug!("delete_dirty_work_product({:?})", swp); - work_product::delete_workproduct_files(tcx.sess, &swp.work_product); + work_product::delete_workproduct_files(sess, &swp.work_product); } /// Either a result that has already be computed or a @@ -149,7 +107,7 @@ impl MaybeAsync { /// Launch a thread and load the dependency graph in the background. pub fn load_dep_graph(sess: &Session) -> - MaybeAsync> + MaybeAsync> { // Since `sess` isn't `Sync`, we perform all accesses to `sess` // before we fire the background thread. @@ -159,7 +117,7 @@ pub fn load_dep_graph(sess: &Session) -> if sess.opts.incremental.is_none() { // No incremental compilation. return MaybeAsync::Sync(LoadResult::Ok { - data: PreviousDepGraph::new(SerializedDepGraph::new()) + data: (PreviousDepGraph::new(SerializedDepGraph::new()), FxHashMap()) }); } @@ -169,6 +127,50 @@ pub fn load_dep_graph(sess: &Session) -> let report_incremental_info = sess.opts.debugging_opts.incremental_info; let expected_hash = sess.opts.dep_tracking_hash(); + let mut prev_work_products = FxHashMap(); + + // If we are only building with -Zquery-dep-graph but without an actual + // incr. comp. session directory, we skip this. Otherwise we'd fail + // when trying to load work products. + if sess.incr_comp_session_dir_opt().is_some() { + let work_products_path = work_products_path(sess); + let load_result = load_data(report_incremental_info, &work_products_path); + + if let LoadResult::Ok { data: (work_products_data, start_pos) } = load_result { + // Decode the list of work_products + let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos); + let work_products: Vec = + RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| { + let msg = format!("Error decoding `work-products` from incremental \ + compilation session directory: {}", e); + sess.fatal(&msg[..]) + }); + + for swp in work_products { + let mut all_files_exist = true; + for &(_, ref file_name) in swp.work_product.saved_files.iter() { + let path = in_incr_comp_dir_sess(sess, file_name); + if !path.exists() { + all_files_exist = false; + + if sess.opts.debugging_opts.incremental_info { + eprintln!("incremental: could not find file for work \ + product: {}", path.display()); + } + } + } + + if all_files_exist { + debug!("reconcile_work_products: all files for {:?} exist", swp); + prev_work_products.insert(swp.id, swp.work_product); + } else { + debug!("reconcile_work_products: some file for {:?} does not exist", swp); + delete_dirty_work_product(sess, swp); + } + } + } + } + MaybeAsync::Async(std::thread::spawn(move || { time_ext(time_passes, None, "background load prev dep-graph", move || { match load_data(report_incremental_info, &path) { @@ -195,7 +197,7 @@ pub fn load_dep_graph(sess: &Session) -> let dep_graph = SerializedDepGraph::decode(&mut decoder) .expect("Error reading cached dep-graph"); - LoadResult::Ok { data: PreviousDepGraph::new(dep_graph) } + LoadResult::Ok { data: (PreviousDepGraph::new(dep_graph), prev_work_products) } } } }) diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index d0237071a6058..87b3a2dc69ff9 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -51,7 +51,6 @@ pub struct Library { pub struct CrateLoader<'a> { pub sess: &'a Session, cstore: &'a CStore, - next_crate_num: CrateNum, local_crate_name: Symbol, } @@ -102,7 +101,6 @@ impl<'a> CrateLoader<'a> { CrateLoader { sess, cstore, - next_crate_num: cstore.next_crate_num(), local_crate_name: Symbol::intern(local_crate_name), } } @@ -198,8 +196,7 @@ impl<'a> CrateLoader<'a> { self.verify_no_symbol_conflicts(span, &crate_root); // Claim this crate number and cache it - let cnum = self.next_crate_num; - self.next_crate_num = CrateNum::from_u32(cnum.as_u32() + 1); + let cnum = self.cstore.alloc_new_crate_num(); // Stash paths for top-most crate locally if necessary. let crate_paths = if root.is_none() { @@ -219,6 +216,8 @@ impl<'a> CrateLoader<'a> { let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind); + let dependencies: Vec = cnum_map.iter().cloned().collect(); + let def_path_table = record_time(&self.sess.perf_stats.decode_def_path_tables_time, || { crate_root.def_path_table.decode((&metadata, self.sess)) }); @@ -239,8 +238,9 @@ impl<'a> CrateLoader<'a> { }), root: crate_root, blob: metadata, - cnum_map: Lock::new(cnum_map), + cnum_map, cnum, + dependencies: Lock::new(dependencies), codemap_import_info: RwLock::new(vec![]), attribute_cache: Lock::new([Vec::new(), Vec::new()]), dep_kind: Lock::new(dep_kind), @@ -392,7 +392,7 @@ impl<'a> CrateLoader<'a> { // Propagate the extern crate info to dependencies. extern_crate.direct = false; - for &dep_cnum in cmeta.cnum_map.borrow().iter() { + for &dep_cnum in cmeta.dependencies.borrow().iter() { self.update_extern_crate(dep_cnum, extern_crate, visited); } } @@ -1040,7 +1040,7 @@ impl<'a> CrateLoader<'a> { } info!("injecting a dep from {} to {}", cnum, krate); - data.cnum_map.borrow_mut().push(krate); + data.dependencies.borrow_mut().push(krate); }); } } diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 64bbcf436cb9e..9bbce563b61dc 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -64,8 +64,9 @@ pub struct CrateMetadata { pub extern_crate: Lock>, pub blob: MetadataBlob, - pub cnum_map: Lock, + pub cnum_map: CrateNumMap, pub cnum: CrateNum, + pub dependencies: Lock>, pub codemap_import_info: RwLock>, pub attribute_cache: Lock<[Vec>>; 2]>, @@ -96,32 +97,34 @@ pub struct CStore { impl CStore { pub fn new(metadata_loader: Box) -> CStore { CStore { - metas: RwLock::new(IndexVec::new()), + // We add an empty entry for LOCAL_CRATE (which maps to zero) in + // order to make array indices in `metas` match with the + // corresponding `CrateNum`. This first entry will always remain + // `None`. + metas: RwLock::new(IndexVec::from_elem_n(None, 1)), extern_mod_crate_map: Lock::new(FxHashMap()), metadata_loader, } } - /// You cannot use this function to allocate a CrateNum in a thread-safe manner. - /// It is currently only used in CrateLoader which is single-threaded code. - pub fn next_crate_num(&self) -> CrateNum { - CrateNum::new(self.metas.borrow().len() + 1) + pub(super) fn alloc_new_crate_num(&self) -> CrateNum { + let mut metas = self.metas.borrow_mut(); + let cnum = CrateNum::new(metas.len()); + metas.push(None); + cnum } - pub fn get_crate_data(&self, cnum: CrateNum) -> Lrc { + pub(super) fn get_crate_data(&self, cnum: CrateNum) -> Lrc { self.metas.borrow()[cnum].clone().unwrap() } - pub fn set_crate_data(&self, cnum: CrateNum, data: Lrc) { - use rustc_data_structures::indexed_vec::Idx; - let mut met = self.metas.borrow_mut(); - while met.len() <= cnum.index() { - met.push(None); - } - met[cnum] = Some(data); + pub(super) fn set_crate_data(&self, cnum: CrateNum, data: Lrc) { + let mut metas = self.metas.borrow_mut(); + assert!(metas[cnum].is_none(), "Overwriting crate metadata entry"); + metas[cnum] = Some(data); } - pub fn iter_crate_data(&self, mut i: I) + pub(super) fn iter_crate_data(&self, mut i: I) where I: FnMut(CrateNum, &Lrc) { for (k, v) in self.metas.borrow().iter_enumerated() { @@ -131,20 +134,22 @@ impl CStore { } } - pub fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec { + pub(super) fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec { let mut ordering = Vec::new(); self.push_dependencies_in_postorder(&mut ordering, krate); ordering.reverse(); ordering } - pub fn push_dependencies_in_postorder(&self, ordering: &mut Vec, krate: CrateNum) { + pub(super) fn push_dependencies_in_postorder(&self, + ordering: &mut Vec, + krate: CrateNum) { if ordering.contains(&krate) { return; } let data = self.get_crate_data(krate); - for &dep in data.cnum_map.borrow().iter() { + for &dep in data.dependencies.borrow().iter() { if dep != krate { self.push_dependencies_in_postorder(ordering, dep); } @@ -153,7 +158,7 @@ impl CStore { ordering.push(krate); } - pub fn do_postorder_cnums_untracked(&self) -> Vec { + pub(super) fn do_postorder_cnums_untracked(&self) -> Vec { let mut ordering = Vec::new(); for (num, v) in self.metas.borrow().iter_enumerated() { if let &Some(_) = v { @@ -163,11 +168,11 @@ impl CStore { return ordering } - pub fn add_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId, cnum: CrateNum) { + pub(super) fn add_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId, cnum: CrateNum) { self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum); } - pub fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { + pub(super) fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { self.extern_mod_crate_map.borrow().get(&emod_id).cloned() } } diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 57f92707ccfe3..53d1ff156274e 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -246,7 +246,7 @@ impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> { if cnum == LOCAL_CRATE { self.cdata().cnum } else { - self.cdata().cnum_map.borrow()[cnum] + self.cdata().cnum_map[cnum] } } } @@ -932,7 +932,7 @@ impl<'a, 'tcx> CrateMetadata { // Translate a DefId from the current compilation environment to a DefId // for an external crate. fn reverse_translate_def_id(&self, did: DefId) -> Option { - for (local, &global) in self.cnum_map.borrow().iter_enumerated() { + for (local, &global) in self.cnum_map.iter_enumerated() { if global == did.krate { return Some(DefId { krate: local, @@ -1007,7 +1007,7 @@ impl<'a, 'tcx> CrateMetadata { .enumerate() .flat_map(|(i, link)| { let cnum = CrateNum::new(i + 1); - link.map(|link| (self.cnum_map.borrow()[cnum], link)) + link.map(|link| (self.cnum_map[cnum], link)) }) .collect() } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 92f9a9e8ba974..d39556e9bb197 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -970,6 +970,9 @@ fn link_args(cmd: &mut Linker, out_filename: &Path, trans: &CrateTranslation) { + // Linker plugins should be specified early in the list of arguments + cmd.cross_lang_lto(); + // The default library location, we need this to find the runtime. // The location of crates will be determined as needed. let lib_path = sess.target_filesearch(PathKind::All).get_lib_path(); diff --git a/src/librustc_trans/back/linker.rs b/src/librustc_trans/back/linker.rs index ea3f5b408604e..2a84ffe79b285 100644 --- a/src/librustc_trans/back/linker.rs +++ b/src/librustc_trans/back/linker.rs @@ -21,7 +21,8 @@ use back::symbol_export; use rustc::hir::def_id::{LOCAL_CRATE, CrateNum}; use rustc::middle::dependency_format::Linkage; use rustc::session::Session; -use rustc::session::config::{self, CrateType, OptLevel, DebugInfoLevel}; +use rustc::session::config::{self, CrateType, OptLevel, DebugInfoLevel, + CrossLangLto}; use rustc::ty::TyCtxt; use rustc_target::spec::{LinkerFlavor, LldFlavor}; use serialize::{json, Encoder}; @@ -127,6 +128,7 @@ pub trait Linker { fn subsystem(&mut self, subsystem: &str); fn group_start(&mut self); fn group_end(&mut self); + fn cross_lang_lto(&mut self); // Should have been finalize(self), but we don't support self-by-value on trait objects (yet?). fn finalize(&mut self) -> Command; } @@ -434,6 +436,42 @@ impl<'a> Linker for GccLinker<'a> { self.linker_arg("--end-group"); } } + + fn cross_lang_lto(&mut self) { + match self.sess.opts.debugging_opts.cross_lang_lto { + CrossLangLto::Disabled | + CrossLangLto::NoLink => { + // Nothing to do + } + CrossLangLto::LinkerPlugin(ref path) => { + self.linker_arg(&format!("-plugin={}", path.display())); + + let opt_level = match self.sess.opts.optimize { + config::OptLevel::No => "O0", + config::OptLevel::Less => "O1", + config::OptLevel::Default => "O2", + config::OptLevel::Aggressive => "O3", + config::OptLevel::Size => "Os", + config::OptLevel::SizeMin => "Oz", + }; + + self.linker_arg(&format!("-plugin-opt={}", opt_level)); + self.linker_arg(&format!("-plugin-opt=mcpu={}", self.sess.target_cpu())); + + match self.sess.opts.cg.lto { + config::Lto::Thin | + config::Lto::ThinLocal => { + self.linker_arg(&format!("-plugin-opt=thin")); + } + config::Lto::Fat | + config::Lto::Yes | + config::Lto::No => { + // default to regular LTO + } + } + } + } + } } pub struct MsvcLinker<'a> { @@ -666,6 +704,10 @@ impl<'a> Linker for MsvcLinker<'a> { // MSVC doesn't need group indicators fn group_start(&mut self) {} fn group_end(&mut self) {} + + fn cross_lang_lto(&mut self) { + // Do nothing + } } pub struct EmLinker<'a> { @@ -832,6 +874,10 @@ impl<'a> Linker for EmLinker<'a> { // Appears not necessary on Emscripten fn group_start(&mut self) {} fn group_end(&mut self) {} + + fn cross_lang_lto(&mut self) { + // Do nothing + } } fn exported_symbols(tcx: TyCtxt, crate_type: CrateType) -> Vec { @@ -984,4 +1030,8 @@ impl Linker for WasmLd { // Not needed for now with LLD fn group_start(&mut self) {} fn group_end(&mut self) {} + + fn cross_lang_lto(&mut self) { + // Do nothing for now + } } diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index b6fae3eaff23a..64876e82309f0 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -174,10 +174,7 @@ pub fn target_machine_factory(sess: &Session, find_features: bool) let triple = &sess.target.target.llvm_target; let triple = CString::new(triple.as_bytes()).unwrap(); - let cpu = match sess.opts.cg.target_cpu { - Some(ref s) => &**s, - None => &*sess.target.target.options.cpu - }; + let cpu = sess.target_cpu(); let cpu = CString::new(cpu.as_bytes()).unwrap(); let features = attributes::llvm_target_features(sess) .collect::>() @@ -294,7 +291,7 @@ impl ModuleConfig { self.obj_is_bitcode = sess.target.target.options.obj_is_bitcode; let embed_bitcode = sess.target.target.options.embed_bitcode || sess.opts.debugging_opts.embed_bitcode || - sess.opts.debugging_opts.cross_lang_lto; + sess.opts.debugging_opts.cross_lang_lto.embed_bitcode(); if embed_bitcode { match sess.opts.optimize { config::OptLevel::No | @@ -1358,7 +1355,8 @@ fn execute_work_item(cgcx: &CodegenContext, // Don't run LTO passes when cross-lang LTO is enabled. The linker // will do that for us in this case. - let needs_lto = needs_lto && !cgcx.opts.debugging_opts.cross_lang_lto; + let needs_lto = needs_lto && + !cgcx.opts.debugging_opts.cross_lang_lto.embed_bitcode(); if needs_lto { Ok(WorkItemResult::NeedsLTO(mtrans)) diff --git a/src/libstd/f32.rs b/src/libstd/f32.rs index 26644c769575c..f849db4ec6027 100644 --- a/src/libstd/f32.rs +++ b/src/libstd/f32.rs @@ -11,9 +11,9 @@ //! This module provides constants which are specific to the implementation //! of the `f32` floating point data type. //! -//! Mathematically significant numbers are provided in the `consts` sub-module. -//! //! *[See also the `f32` primitive type](../../std/primitive.f32.html).* +//! +//! Mathematically significant numbers are provided in the `consts` sub-module. #![stable(feature = "rust1", since = "1.0.0")] #![allow(missing_docs)] @@ -195,8 +195,10 @@ impl f32 { } /// Fused multiply-add. Computes `(self * a) + b` with only one rounding - /// error. This produces a more accurate result with better performance than - /// a separate multiplication operation followed by an add. + /// error, yielding a more accurate result than an unfused multiply-add. + /// + /// Using `mul_add` can be more performant than an unfused multiply-add if + /// the target architecture has a dedicated `fma` CPU instruction. /// /// ``` /// use std::f32; diff --git a/src/libstd/f64.rs b/src/libstd/f64.rs index a7e63f59b1c67..40c3f4d0ef726 100644 --- a/src/libstd/f64.rs +++ b/src/libstd/f64.rs @@ -11,9 +11,9 @@ //! This module provides constants which are specific to the implementation //! of the `f64` floating point data type. //! -//! Mathematically significant numbers are provided in the `consts` sub-module. -//! //! *[See also the `f64` primitive type](../../std/primitive.f64.html).* +//! +//! Mathematically significant numbers are provided in the `consts` sub-module. #![stable(feature = "rust1", since = "1.0.0")] #![allow(missing_docs)] @@ -173,8 +173,10 @@ impl f64 { } /// Fused multiply-add. Computes `(self * a) + b` with only one rounding - /// error. This produces a more accurate result with better performance than - /// a separate multiplication operation followed by an add. + /// error, yielding a more accurate result than an unfused multiply-add. + /// + /// Using `mul_add` can be more performant than an unfused multiply-add if + /// the target architecture has a dedicated `fma` CPU instruction. /// /// ``` /// let m = 10.0_f64; diff --git a/src/libstd/primitive_docs.rs b/src/libstd/primitive_docs.rs index 437d7d74cae0d..7074928eaf6da 100644 --- a/src/libstd/primitive_docs.rs +++ b/src/libstd/primitive_docs.rs @@ -370,6 +370,8 @@ mod prim_unit { } // /// Raw, unsafe pointers, `*const T`, and `*mut T`. /// +/// *[See also the `std::ptr` module](ptr/index.html).* +/// /// Working with raw pointers in Rust is uncommon, /// typically limited to a few patterns. /// @@ -444,8 +446,6 @@ mod prim_unit { } /// but C APIs hand out a lot of pointers generally, so are a common source /// of raw pointers in Rust. /// -/// *[See also the `std::ptr` module](ptr/index.html).* -/// /// [`null`]: ../std/ptr/fn.null.html /// [`null_mut`]: ../std/ptr/fn.null_mut.html /// [`is_null`]: ../std/primitive.pointer.html#method.is_null @@ -563,6 +563,8 @@ mod prim_array { } // /// A dynamically-sized view into a contiguous sequence, `[T]`. /// +/// *[See also the `std::slice` module](slice/index.html).* +/// /// Slices are a view into a block of memory represented as a pointer and a /// length. /// @@ -585,8 +587,6 @@ mod prim_array { } /// assert_eq!(x, &[1, 7, 3]); /// ``` /// -/// *[See also the `std::slice` module](slice/index.html).* -/// #[stable(feature = "rust1", since = "1.0.0")] mod prim_slice { } @@ -594,15 +594,13 @@ mod prim_slice { } // /// String slices. /// +/// *[See also the `std::str` module](str/index.html).* +/// /// The `str` type, also called a 'string slice', is the most primitive string /// type. It is usually seen in its borrowed form, `&str`. It is also the type /// of string literals, `&'static str`. /// -/// Strings slices are always valid UTF-8. -/// -/// This documentation describes a number of methods and trait implementations -/// on the `str` type. For technical reasons, there is additional, separate -/// documentation in the [`std::str`](str/index.html) module as well. +/// String slices are always valid UTF-8. /// /// # Examples /// @@ -862,11 +860,11 @@ mod prim_u128 { } // /// The pointer-sized signed integer type. /// +/// *[See also the `std::isize` module](isize/index.html).* +/// /// The size of this primitive is how many bytes it takes to reference any /// location in memory. For example, on a 32 bit target, this is 4 bytes /// and on a 64 bit target, this is 8 bytes. -/// -/// *[See also the `std::isize` module](isize/index.html).* #[stable(feature = "rust1", since = "1.0.0")] mod prim_isize { } @@ -874,11 +872,11 @@ mod prim_isize { } // /// The pointer-sized unsigned integer type. /// +/// *[See also the `std::usize` module](usize/index.html).* +/// /// The size of this primitive is how many bytes it takes to reference any /// location in memory. For example, on a 32 bit target, this is 4 bytes /// and on a 64 bit target, this is 8 bytes. -/// -/// *[See also the `std::usize` module](usize/index.html).* #[stable(feature = "rust1", since = "1.0.0")] mod prim_usize { } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index bf4a68679df55..49b30c6f460fe 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5741,7 +5741,7 @@ impl<'a> Parser<'a> { let vis = p.parse_visibility(true)?; let ty = p.parse_ty()?; Ok(StructField { - span: lo.to(p.span), + span: lo.to(ty.span), vis, ident: None, id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax_pos/span_encoding.rs b/src/libsyntax_pos/span_encoding.rs index b55fe4bcb2672..601a0273ae911 100644 --- a/src/libsyntax_pos/span_encoding.rs +++ b/src/libsyntax_pos/span_encoding.rs @@ -31,11 +31,13 @@ pub struct Span(u32); impl Copy for Span {} impl Clone for Span { + #[inline] fn clone(&self) -> Span { *self } } impl PartialEq for Span { + #[inline] fn eq(&self, other: &Span) -> bool { let a = self.0; let b = other.0; @@ -44,6 +46,7 @@ impl PartialEq for Span { } impl Eq for Span {} impl Hash for Span { + #[inline] fn hash(&self, state: &mut H) { let a = self.0; a.hash(state) diff --git a/src/llvm b/src/llvm index b6c1a03fb498f..fd7dd99edf371 160000 --- a/src/llvm +++ b/src/llvm @@ -1 +1 @@ -Subproject commit b6c1a03fb498f6c03d1cbfd4404223a046f8c3b2 +Subproject commit fd7dd99edf371ac502ae4e70288c027f6692ace0 diff --git a/src/test/run-make/cross-lang-lto/Makefile b/src/test/run-make/cross-lang-lto/Makefile index 98b509cd81f54..925f686fe1161 100644 --- a/src/test/run-make/cross-lang-lto/Makefile +++ b/src/test/run-make/cross-lang-lto/Makefile @@ -18,9 +18,9 @@ endif OBJDUMP=llvm-objdump SECTION_HEADERS=$(OBJDUMP) -section-headers -BUILD_LIB=$(RUSTC) lib.rs -Copt-level=2 -Z cross-lang-lto -Ccodegen-units=1 +BUILD_LIB=$(RUSTC) lib.rs -Copt-level=2 -Z cross-lang-lto=no-link -Ccodegen-units=1 -BUILD_EXE=$(RUSTC) main.rs -Copt-level=2 -Z cross-lang-lto -Ccodegen-units=1 --emit=obj +BUILD_EXE=$(RUSTC) main.rs -Copt-level=2 -Z cross-lang-lto=no-link -Ccodegen-units=1 --emit=obj all: staticlib staticlib-fat-lto staticlib-thin-lto rlib exe cdylib rdylib diff --git a/src/test/ui/issue-3008-1.stderr b/src/test/ui/issue-3008-1.stderr index 05c1f2aae8141..fe3e294d9e32a 100644 --- a/src/test/ui/issue-3008-1.stderr +++ b/src/test/ui/issue-3008-1.stderr @@ -5,7 +5,7 @@ LL | enum Bar { | ^^^^^^^^ recursive type has infinite size ... LL | BarSome(Bar) - | ---- recursive without indirection + | --- recursive without indirection | = help: insert indirection (e.g., a `Box`, `Rc`, or `&`) at some point to make `Bar` representable diff --git a/src/test/ui/issue-32326.stderr b/src/test/ui/issue-32326.stderr index 1aa99c402ed8d..b7e359af64ad0 100644 --- a/src/test/ui/issue-32326.stderr +++ b/src/test/ui/issue-32326.stderr @@ -4,7 +4,7 @@ error[E0072]: recursive type `Expr` has infinite size LL | enum Expr { //~ ERROR E0072 | ^^^^^^^^^ recursive type has infinite size LL | Plus(Expr, Expr), - | ----- ----- recursive without indirection + | ---- ---- recursive without indirection | | | recursive without indirection | diff --git a/src/test/ui/rfc-2093-infer-outlives/enum.rs b/src/test/ui/rfc-2093-infer-outlives/enum.rs index 7d0427adb9f3e..5071465b5f647 100644 --- a/src/test/ui/rfc-2093-infer-outlives/enum.rs +++ b/src/test/ui/rfc-2093-infer-outlives/enum.rs @@ -20,14 +20,14 @@ enum Foo<'a, T> { // Type U needs to outlive lifetime 'b struct Bar<'b, U> { - field2: &'b U //~ ERROR 23:5: 23:18: the parameter type `U` may not live long enough [E0309] + field2: &'b U //~ ERROR the parameter type `U` may not live long enough [E0309] } // Type K needs to outlive lifetime 'c. enum Ying<'c, K> { - One(&'c Yang) //~ ERROR 30:9: 30:21: the parameter type `K` may not live long enough [E0309] + One(&'c Yang) //~ ERROR the parameter type `K` may not live long enough [E0309] } struct Yang { diff --git a/src/test/ui/rfc-2093-infer-outlives/enum.stderr b/src/test/ui/rfc-2093-infer-outlives/enum.stderr index e6eaf9b475450..604dd0b43c04a 100644 --- a/src/test/ui/rfc-2093-infer-outlives/enum.stderr +++ b/src/test/ui/rfc-2093-infer-outlives/enum.stderr @@ -3,13 +3,13 @@ error[E0309]: the parameter type `U` may not live long enough | LL | struct Bar<'b, U> { | - help: consider adding an explicit lifetime bound `U: 'b`... -LL | field2: &'b U //~ ERROR 23:5: 23:18: the parameter type `U` may not live long enough [E0309] +LL | field2: &'b U //~ ERROR the parameter type `U` may not live long enough [E0309] | ^^^^^^^^^^^^^ | note: ...so that the reference type `&'b U` does not outlive the data it points at --> $DIR/enum.rs:23:5 | -LL | field2: &'b U //~ ERROR 23:5: 23:18: the parameter type `U` may not live long enough [E0309] +LL | field2: &'b U //~ ERROR the parameter type `U` may not live long enough [E0309] | ^^^^^^^^^^^^^ error[E0309]: the parameter type `K` may not live long enough @@ -17,14 +17,14 @@ error[E0309]: the parameter type `K` may not live long enough | LL | enum Ying<'c, K> { | - help: consider adding an explicit lifetime bound `K: 'c`... -LL | One(&'c Yang) //~ ERROR 30:9: 30:21: the parameter type `K` may not live long enough [E0309] - | ^^^^^^^^^^^^ +LL | One(&'c Yang) //~ ERROR the parameter type `K` may not live long enough [E0309] + | ^^^^^^^^^^^ | note: ...so that the reference type `&'c Yang` does not outlive the data it points at --> $DIR/enum.rs:30:9 | -LL | One(&'c Yang) //~ ERROR 30:9: 30:21: the parameter type `K` may not live long enough [E0309] - | ^^^^^^^^^^^^ +LL | One(&'c Yang) //~ ERROR the parameter type `K` may not live long enough [E0309] + | ^^^^^^^^^^^ error: aborting due to 2 previous errors diff --git a/src/test/ui/rfc1598-generic-associated-types/collections.rs b/src/test/ui/rfc1598-generic-associated-types/collections.rs new file mode 100644 index 0000000000000..e71166ed65bba --- /dev/null +++ b/src/test/ui/rfc1598-generic-associated-types/collections.rs @@ -0,0 +1,97 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_associated_types)] +#![feature(associated_type_defaults)] + +//FIXME(#44265): "lifetime parameters are not allowed on this type" errors will be addressed in a +//follow-up PR + +// A Collection trait and collection families. Based on +// http://smallcultfollowing.com/babysteps/blog/2016/11/03/ +// associated-type-constructors-part-2-family-traits/ + +trait Collection { + type Iter<'iter>: Iterator; + type Family: CollectionFamily; + // Test associated type defaults with parameters + type Sibling: Collection = + <>::Family as CollectionFamily>::Member; + //~^ ERROR type parameters are not allowed on this type [E0109] + + fn empty() -> Self; + + fn add(&mut self, value: T); + + fn iterate<'iter>(&'iter self) -> Self::Iter<'iter>; + //~^ ERROR lifetime parameters are not allowed on this type [E0110] +} + +trait CollectionFamily { + type Member: Collection; +} + +struct VecFamily; + +impl CollectionFamily for VecFamily { + type Member = Vec; +} + +impl Collection for Vec { + type Iter<'iter> = std::slice::Iter<'iter, T>; + type Family = VecFamily; + + fn empty() -> Self { + Vec::new() + } + + fn add(&mut self, value: T) { + self.push(value) + } + + fn iterate<'iter>(&'iter self) -> Self::Iter<'iter> { + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + self.iter() + } +} + +fn floatify(ints: &C) -> <>::Family as CollectionFamily>::Member +//~^ ERROR type parameters are not allowed on this type [E0109] +where + C: Collection, +{ + let mut res = C::Family::Member::::empty(); + for &v in ints.iterate() { + res.add(v as f32); + } + res +} + +fn floatify_sibling(ints: &C) -> >::Sibling +//~^ ERROR type parameters are not allowed on this type [E0109] +where + C: Collection, +{ + let mut res = C::Family::Member::::empty(); + for &v in ints.iterate() { + res.add(v as f32); + } + res +} + +fn use_floatify() { + let a = vec![1i32, 2, 3]; + let b = floatify(a); + println!("{}", b.iterate().next()); + let c = floatify_sibling(a); + println!("{}", c.iterate().next()); +} + +fn main() {} diff --git a/src/test/ui/rfc1598-generic-associated-types/collections.stderr b/src/test/ui/rfc1598-generic-associated-types/collections.stderr new file mode 100644 index 0000000000000..ed96570583f4f --- /dev/null +++ b/src/test/ui/rfc1598-generic-associated-types/collections.stderr @@ -0,0 +1,34 @@ +error[E0109]: type parameters are not allowed on this type + --> $DIR/collections.rs:65:90 + | +LL | fn floatify(ints: &C) -> <>::Family as CollectionFamily>::Member + | ^^^ type parameter not allowed + +error[E0109]: type parameters are not allowed on this type + --> $DIR/collections.rs:77:69 + | +LL | fn floatify_sibling(ints: &C) -> >::Sibling + | ^^^ type parameter not allowed + +error[E0109]: type parameters are not allowed on this type + --> $DIR/collections.rs:26:71 + | +LL | <>::Family as CollectionFamily>::Member; + | ^ type parameter not allowed + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/collections.rs:33:50 + | +LL | fn iterate<'iter>(&'iter self) -> Self::Iter<'iter>; + | ^^^^^ lifetime parameter not allowed on this type + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/collections.rs:59:50 + | +LL | fn iterate<'iter>(&'iter self) -> Self::Iter<'iter> { + | ^^^^^ lifetime parameter not allowed on this type + +error: aborting due to 5 previous errors + +Some errors occurred: E0109, E0110. +For more information about an error, try `rustc --explain E0109`. diff --git a/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.rs b/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.rs index 0d9b487876e21..0429410031526 100644 --- a/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.rs +++ b/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.rs @@ -10,6 +10,8 @@ #![feature(generic_associated_types)] +use std::ops::Deref; + //FIXME(#44265): "lifetime parameters are not allowed on this type" errors will be addressed in a //follow-up PR @@ -18,11 +20,18 @@ trait Foo { } trait Baz { - type Quux<'a>; + type Quux<'a>: Foo; + + // This weird type tests that we can use universal function call syntax to access the Item on + type Baa<'a>: Deref as Foo>::Bar<'a, 'static>>; + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + //~| ERROR lifetime parameters are not allowed on this type [E0110] } impl Baz for T where T: Foo { - type Quux<'a> = ::Bar<'a, 'static>; + type Quux<'a> = T; + + type Baa<'a> = &'a ::Bar<'a, 'static>; //~^ ERROR lifetime parameters are not allowed on this type [E0110] } diff --git a/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.stderr b/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.stderr index 054530e24bd18..764a0db2478a8 100644 --- a/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.stderr +++ b/src/test/ui/rfc1598-generic-associated-types/construct_with_other_type.stderr @@ -1,9 +1,21 @@ error[E0110]: lifetime parameters are not allowed on this type - --> $DIR/construct_with_other_type.rs:25:37 + --> $DIR/construct_with_other_type.rs:26:46 | -LL | type Quux<'a> = ::Bar<'a, 'static>; - | ^^ lifetime parameter not allowed on this type +LL | type Baa<'a>: Deref as Foo>::Bar<'a, 'static>>; + | ^^ lifetime parameter not allowed on this type -error: aborting due to previous error +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/construct_with_other_type.rs:26:63 + | +LL | type Baa<'a>: Deref as Foo>::Bar<'a, 'static>>; + | ^^ lifetime parameter not allowed on this type + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/construct_with_other_type.rs:34:40 + | +LL | type Baa<'a> = &'a ::Bar<'a, 'static>; + | ^^ lifetime parameter not allowed on this type + +error: aborting due to 3 previous errors For more information about this error, try `rustc --explain E0110`. diff --git a/src/test/ui/rfc1598-generic-associated-types/iterable.rs b/src/test/ui/rfc1598-generic-associated-types/iterable.rs index 1287ddaf7f7fe..38967dbbe4530 100644 --- a/src/test/ui/rfc1598-generic-associated-types/iterable.rs +++ b/src/test/ui/rfc1598-generic-associated-types/iterable.rs @@ -20,13 +20,40 @@ trait Iterable { type Iter<'a>: Iterator>; //~^ ERROR lifetime parameters are not allowed on this type [E0110] - // This weird type tests that we can use universal function call syntax to access the Item on - // Self::Iter which we have declared to be an Iterator - type Iter2<'a>: Deref as Iterator>::Item>; + fn iter<'a>(&'a self) -> Self::Iter<'a>; //~^ ERROR lifetime parameters are not allowed on this type [E0110] +} - fn iter<'a>(&'a self) -> Self::Iter<'a>; +// Impl for struct type +impl Iterable for Vec { + type Item<'a> = &'a T; + type Iter<'a> = std::slice::Iter<'a, T>; + + fn iter<'a>(&'a self) -> Self::Iter<'a> { + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + self.iter() + } +} + +// Impl for a primitive type +impl Iterable for [T] { + type Item<'a> = &'a T; + type Iter<'a> = std::slice::Iter<'a, T>; + + fn iter<'a>(&'a self) -> Self::Iter<'a> { + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + self.iter() + } +} + +fn make_iter<'a, I: Iterable>(it: &'a I) -> I::Iter<'a> { + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + it.iter() +} + +fn get_first<'a, I: Iterable>(it: &'a I) -> Option> { //~^ ERROR lifetime parameters are not allowed on this type [E0110] + it.iter().next() } fn main() {} diff --git a/src/test/ui/rfc1598-generic-associated-types/iterable.stderr b/src/test/ui/rfc1598-generic-associated-types/iterable.stderr index d33eebb42d607..0e251300e451f 100644 --- a/src/test/ui/rfc1598-generic-associated-types/iterable.stderr +++ b/src/test/ui/rfc1598-generic-associated-types/iterable.stderr @@ -5,17 +5,35 @@ LL | type Iter<'a>: Iterator>; | ^^ lifetime parameter not allowed on this type error[E0110]: lifetime parameters are not allowed on this type - --> $DIR/iterable.rs:25:48 + --> $DIR/iterable.rs:49:53 | -LL | type Iter2<'a>: Deref as Iterator>::Item>; - | ^^ lifetime parameter not allowed on this type +LL | fn make_iter<'a, I: Iterable>(it: &'a I) -> I::Iter<'a> { + | ^^ lifetime parameter not allowed on this type error[E0110]: lifetime parameters are not allowed on this type - --> $DIR/iterable.rs:28:41 + --> $DIR/iterable.rs:54:60 + | +LL | fn get_first<'a, I: Iterable>(it: &'a I) -> Option> { + | ^^ lifetime parameter not allowed on this type + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/iterable.rs:23:41 | LL | fn iter<'a>(&'a self) -> Self::Iter<'a>; | ^^ lifetime parameter not allowed on this type -error: aborting due to 3 previous errors +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/iterable.rs:32:41 + | +LL | fn iter<'a>(&'a self) -> Self::Iter<'a> { + | ^^ lifetime parameter not allowed on this type + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/iterable.rs:43:41 + | +LL | fn iter<'a>(&'a self) -> Self::Iter<'a> { + | ^^ lifetime parameter not allowed on this type + +error: aborting due to 6 previous errors For more information about this error, try `rustc --explain E0110`. diff --git a/src/test/ui/rfc1598-generic-associated-types/parameter_number_and_kind.rs b/src/test/ui/rfc1598-generic-associated-types/parameter_number_and_kind.rs new file mode 100644 index 0000000000000..51527d4117c2c --- /dev/null +++ b/src/test/ui/rfc1598-generic-associated-types/parameter_number_and_kind.rs @@ -0,0 +1,56 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_associated_types)] +#![feature(associated_type_defaults)] + +//FIXME(#44265): "lifetime parameters are not allowed on this type" errors will be addressed in a +//follow-up PR + +//FIXME(#44265): Update expected errors once E110 is resolved, now does not get past `trait Foo` + +trait Foo { + type A<'a>; + type B<'a, 'b>; + type C; + type D; + type E<'a, T>; + // Test parameters in default values + type FOk = Self::E<'static, T>; + //~^ ERROR type parameters are not allowed on this type [E0109] + //~| ERROR lifetime parameters are not allowed on this type [E0110] + type FErr1 = Self::E<'static, 'static>; // Error + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + type FErr2 = Self::E<'static, T, u32>; // Error + //~^ ERROR type parameters are not allowed on this type [E0109] + //~| ERROR lifetime parameters are not allowed on this type [E0110] +} + +struct Fooy; + +impl Foo for Fooy { + type A = u32; // Error: parameter expected + type B<'a, T> = Vec; // Error: lifetime param expected + type C<'a> = u32; // Error: no param expected + type D<'a> = u32; // Error: type param expected + type E = u32; // Error: lifetime expected as the first param +} + +struct Fooer; + +impl Foo for Fooer { + type A = u32; // Error: lifetime parameter expected + type B<'a> = u32; // Error: another lifetime param expected + type C = T; // Error: no param expected + type D<'b, T> = u32; // Error: unexpected lifetime param + type E<'a, 'b> = u32; // Error: type expected as the second param +} + +fn main() {} diff --git a/src/test/ui/rfc1598-generic-associated-types/parameter_number_and_kind.stderr b/src/test/ui/rfc1598-generic-associated-types/parameter_number_and_kind.stderr new file mode 100644 index 0000000000000..df83fdaad5bfa --- /dev/null +++ b/src/test/ui/rfc1598-generic-associated-types/parameter_number_and_kind.stderr @@ -0,0 +1,34 @@ +error[E0109]: type parameters are not allowed on this type + --> $DIR/parameter_number_and_kind.rs:26:36 + | +LL | type FOk = Self::E<'static, T>; + | ^ type parameter not allowed + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/parameter_number_and_kind.rs:26:27 + | +LL | type FOk = Self::E<'static, T>; + | ^^^^^^^ lifetime parameter not allowed on this type + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/parameter_number_and_kind.rs:29:26 + | +LL | type FErr1 = Self::E<'static, 'static>; // Error + | ^^^^^^^ lifetime parameter not allowed on this type + +error[E0109]: type parameters are not allowed on this type + --> $DIR/parameter_number_and_kind.rs:31:38 + | +LL | type FErr2 = Self::E<'static, T, u32>; // Error + | ^ type parameter not allowed + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/parameter_number_and_kind.rs:31:29 + | +LL | type FErr2 = Self::E<'static, T, u32>; // Error + | ^^^^^^^ lifetime parameter not allowed on this type + +error: aborting due to 5 previous errors + +Some errors occurred: E0109, E0110. +For more information about an error, try `rustc --explain E0109`. diff --git a/src/test/ui/rfc1598-generic-associated-types/shadowing.rs b/src/test/ui/rfc1598-generic-associated-types/shadowing.rs new file mode 100644 index 0000000000000..6cdcaf2568394 --- /dev/null +++ b/src/test/ui/rfc1598-generic-associated-types/shadowing.rs @@ -0,0 +1,42 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_associated_types)] + +//FIXME(#44265): The lifetime shadowing and type parameter shadowing +// should cause an error. Now it compiles (errorneously) and this will be addressed +// by a future PR. Then remove the following: +// compile-pass + +trait Shadow<'a> { + type Bar<'a>; // Error: shadowed lifetime +} + +trait NoShadow<'a> { + type Bar<'b>; // OK +} + +impl<'a> NoShadow<'a> for &'a u32 { + type Bar<'a> = i32; // Error: shadowed lifetime +} + +trait ShadowT { + type Bar; // Error: shadowed type parameter +} + +trait NoShadowT { + type Bar; // OK +} + +impl NoShadowT for Option { + type Bar = i32; // Error: shadowed type parameter +} + +fn main() {} diff --git a/src/test/ui/rfc1598-generic-associated-types/shadowing.stdout b/src/test/ui/rfc1598-generic-associated-types/shadowing.stdout new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.rs b/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.rs index f9e270ee92e22..522ddb5dc135e 100644 --- a/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.rs +++ b/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.rs @@ -35,4 +35,48 @@ struct Foo { fn foo(iter: T) where T: StreamingIterator, for<'a> T::Item<'a>: Display { /* ... */ } //~^ ERROR lifetime parameters are not allowed on this type [E0110] +// Full example of enumerate iterator + +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +struct StreamEnumerate { + iter: I, + count: usize, +} + +impl StreamingIterator for StreamEnumerate { + type Item<'a> = (usize, I::Item<'a>); + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + fn next<'a>(&'a self) -> Option> { + //~^ ERROR lifetime parameters are not allowed on this type [E0110] + match self.iter.next() { + None => None, + Some(val) => { + let r = Some((self.count, val)); + self.count += 1; + r + } + } + } +} + +impl StreamEnumerate { + pub fn new(iter: I) -> Self { + StreamEnumerate { + count: 0, + iter: iter, + } + } +} + +fn test_stream_enumerate() { + let v = vec!["a", "b", "c"]; + let se = StreamEnumerate::new(v.iter()); + let a: &str = se.next().unwrap().1; + for (i, s) in se { + println!("{} {}", i, s); + } + println!("{}", a); +} + + fn main() {} diff --git a/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.stderr b/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.stderr index 9ab80151a7ed3..607a4b8d57996 100644 --- a/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.stderr +++ b/src/test/ui/rfc1598-generic-associated-types/streaming_iterator.stderr @@ -16,6 +16,18 @@ error[E0110]: lifetime parameters are not allowed on this type LL | fn next<'a>(&'a self) -> Option>; | ^^ lifetime parameter not allowed on this type -error: aborting due to 3 previous errors +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/streaming_iterator.rs:47:37 + | +LL | type Item<'a> = (usize, I::Item<'a>); + | ^^ lifetime parameter not allowed on this type + +error[E0110]: lifetime parameters are not allowed on this type + --> $DIR/streaming_iterator.rs:49:48 + | +LL | fn next<'a>(&'a self) -> Option> { + | ^^ lifetime parameter not allowed on this type + +error: aborting due to 5 previous errors For more information about this error, try `rustc --explain E0110`. diff --git a/src/test/ui/span/E0204.stderr b/src/test/ui/span/E0204.stderr index 8be1af61d80b3..2949a22747b87 100644 --- a/src/test/ui/span/E0204.stderr +++ b/src/test/ui/span/E0204.stderr @@ -32,7 +32,7 @@ LL | #[derive(Copy)] //~ ERROR may not be implemented for this type | ^^^^ LL | enum EFoo2<'a> { LL | Bar(&'a mut bool), - | ------------- this field does not implement `Copy` + | ------------ this field does not implement `Copy` error: aborting due to 4 previous errors diff --git a/src/test/ui/union/union-sized-field.stderr b/src/test/ui/union/union-sized-field.stderr index fb81a7b695d4e..ba80af6c7e037 100644 --- a/src/test/ui/union/union-sized-field.stderr +++ b/src/test/ui/union/union-sized-field.stderr @@ -22,7 +22,7 @@ error[E0277]: the trait bound `T: std::marker::Sized` is not satisfied --> $DIR/union-sized-field.rs:23:11 | LL | Value(T), //~ ERROR the trait bound `T: std::marker::Sized` is not satisfied - | ^^ `T` does not have a constant size known at compile-time + | ^ `T` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `T` = help: consider adding a `where T: std::marker::Sized` bound diff --git a/src/test/ui/unsized-enum2.stderr b/src/test/ui/unsized-enum2.stderr index c05c1cfe41296..0e18efbf9da3e 100644 --- a/src/test/ui/unsized-enum2.stderr +++ b/src/test/ui/unsized-enum2.stderr @@ -2,7 +2,7 @@ error[E0277]: the trait bound `W: std::marker::Sized` is not satisfied --> $DIR/unsized-enum2.rs:33:8 | LL | VA(W), //~ ERROR `W: std::marker::Sized` is not satisfied - | ^^ `W` does not have a constant size known at compile-time + | ^ `W` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `W` = help: consider adding a `where W: std::marker::Sized` bound @@ -22,7 +22,7 @@ error[E0277]: the trait bound `Y: std::marker::Sized` is not satisfied --> $DIR/unsized-enum2.rs:35:15 | LL | VC(isize, Y), //~ ERROR `Y: std::marker::Sized` is not satisfied - | ^^ `Y` does not have a constant size known at compile-time + | ^ `Y` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `Y` = help: consider adding a `where Y: std::marker::Sized` bound @@ -42,7 +42,7 @@ error[E0277]: the trait bound `[u8]: std::marker::Sized` is not satisfied --> $DIR/unsized-enum2.rs:39:8 | LL | VE([u8]), //~ ERROR `[u8]: std::marker::Sized` is not satisfied - | ^^^^^ `[u8]` does not have a constant size known at compile-time + | ^^^^ `[u8]` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `[u8]` = note: no field of an enum variant may have a dynamically sized type @@ -60,7 +60,7 @@ error[E0277]: the trait bound `[f32]: std::marker::Sized` is not satisfied --> $DIR/unsized-enum2.rs:41:15 | LL | VG(isize, [f32]), //~ ERROR `[f32]: std::marker::Sized` is not satisfied - | ^^^^^^ `[f32]` does not have a constant size known at compile-time + | ^^^^^ `[f32]` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `[f32]` = note: no field of an enum variant may have a dynamically sized type @@ -78,7 +78,7 @@ error[E0277]: the trait bound `Foo + 'static: std::marker::Sized` is not satisfi --> $DIR/unsized-enum2.rs:51:8 | LL | VM(Foo), //~ ERROR `Foo + 'static: std::marker::Sized` is not satisfied - | ^^^^ `Foo + 'static` does not have a constant size known at compile-time + | ^^^ `Foo + 'static` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `Foo + 'static` = note: no field of an enum variant may have a dynamically sized type @@ -96,7 +96,7 @@ error[E0277]: the trait bound `FooBar + 'static: std::marker::Sized` is not sati --> $DIR/unsized-enum2.rs:53:15 | LL | VO(isize, FooBar), //~ ERROR `FooBar + 'static: std::marker::Sized` is not satisfied - | ^^^^^^^ `FooBar + 'static` does not have a constant size known at compile-time + | ^^^^^^ `FooBar + 'static` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `FooBar + 'static` = note: no field of an enum variant may have a dynamically sized type @@ -114,7 +114,7 @@ error[E0277]: the trait bound `[i8]: std::marker::Sized` is not satisfied --> $DIR/unsized-enum2.rs:57:8 | LL | VQ(<&'static [i8] as Deref>::Target), //~ ERROR `[i8]: std::marker::Sized` is not satisfied - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `[i8]` does not have a constant size known at compile-time + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `[i8]` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `[i8]` = note: no field of an enum variant may have a dynamically sized type @@ -132,7 +132,7 @@ error[E0277]: the trait bound `[f64]: std::marker::Sized` is not satisfied --> $DIR/unsized-enum2.rs:60:15 | LL | VS(isize, <&'static [f64] as Deref>::Target), - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `[f64]` does not have a constant size known at compile-time + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `[f64]` does not have a constant size known at compile-time | = help: the trait `std::marker::Sized` is not implemented for `[f64]` = note: no field of an enum variant may have a dynamically sized type @@ -150,7 +150,7 @@ error[E0277]: the trait bound `PathHelper1 + 'static: std::marker::Sized` is not --> $DIR/unsized-enum2.rs:45:8 | LL | VI(Path1), //~ ERROR `PathHelper1 + 'static: std::marker::Sized` is not satisfied - | ^^^^^^ `PathHelper1 + 'static` does not have a constant size known at compile-time + | ^^^^^ `PathHelper1 + 'static` does not have a constant size known at compile-time | = help: within `Path1`, the trait `std::marker::Sized` is not implemented for `PathHelper1 + 'static` = note: required because it appears within the type `Path1` @@ -170,7 +170,7 @@ error[E0277]: the trait bound `PathHelper3 + 'static: std::marker::Sized` is not --> $DIR/unsized-enum2.rs:47:15 | LL | VK(isize, Path3), //~ ERROR `PathHelper3 + 'static: std::marker::Sized` is not satisfied - | ^^^^^^ `PathHelper3 + 'static` does not have a constant size known at compile-time + | ^^^^^ `PathHelper3 + 'static` does not have a constant size known at compile-time | = help: within `Path3`, the trait `std::marker::Sized` is not implemented for `PathHelper3 + 'static` = note: required because it appears within the type `Path3` diff --git a/src/test/ui/update-references.sh b/src/test/ui/update-references.sh index 47a85352b0044..00b4b5c5caa78 100755 --- a/src/test/ui/update-references.sh +++ b/src/test/ui/update-references.sh @@ -26,6 +26,7 @@ if [[ "$1" == "--help" || "$1" == "-h" || "$1" == "" || "$2" == "" ]]; then echo " $0 ../../../build/x86_64-apple-darwin/test/ui *.rs */*.rs" fi +MYDIR=$(dirname $0) BUILD_DIR="$1" shift @@ -33,13 +34,13 @@ shift shopt -s nullglob while [[ "$1" != "" ]]; do - MYDIR=$(dirname $1) for EXT in "stderr" "stdout" "fixed"; do for OUT_NAME in $BUILD_DIR/${1%.rs}.*$EXT; do + OUT_DIR=`dirname "$1"` OUT_BASE=`basename "$OUT_NAME"` - if ! (diff $OUT_NAME $MYDIR/$OUT_BASE >& /dev/null); then - echo updating $MYDIR/$OUT_BASE - cp $OUT_NAME $MYDIR + if ! (diff $OUT_NAME $MYDIR/$OUT_DIR/$OUT_BASE >& /dev/null); then + echo updating $MYDIR/$OUT_DIR/$OUT_BASE + cp $OUT_NAME $MYDIR/$OUT_DIR fi done done