Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 5 pull requests #114084

Closed
wants to merge 10 commits into from
5 changes: 3 additions & 2 deletions compiler/rustc_codegen_ssa/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ use rustc_span::symbol::sym;
use rustc_span::Symbol;
use rustc_target::abi::{Align, FIRST_VARIANT};

use std::cmp;
use std::collections::BTreeSet;
use std::time::{Duration, Instant};

Expand Down Expand Up @@ -682,10 +683,10 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
// are large size variations, this can reduce memory usage significantly.
let codegen_units: Vec<_> = {
let mut sorted_cgus = codegen_units.iter().collect::<Vec<_>>();
sorted_cgus.sort_by_cached_key(|cgu| cgu.size_estimate());
sorted_cgus.sort_by_key(|cgu| cmp::Reverse(cgu.size_estimate()));

let (first_half, second_half) = sorted_cgus.split_at(sorted_cgus.len() / 2);
second_half.iter().rev().interleave(first_half).copied().collect()
first_half.iter().interleave(second_half.iter().rev()).copied().collect()
};

// Calculate the CGU reuse
Expand Down
12 changes: 9 additions & 3 deletions compiler/rustc_expand/src/parse/tests.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
use crate::tests::{
matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error,
};

use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token};
Expand Down Expand Up @@ -51,11 +53,15 @@ fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
with_error_checking_parse(source_str, &sess(), |p| p.parse_item(ForceCollect::No))
}

#[should_panic]
#[test]
fn bad_path_expr_1() {
// This should trigger error: expected identifier, found keyword `return`
create_default_session_globals_then(|| {
string_to_expr("::abc::def::return".to_string());
with_expected_parse_error(
"::abc::def::return",
"expected identifier, found keyword `return`",
|p| p.parse_expr(),
);
})
}

Expand Down
69 changes: 48 additions & 21 deletions compiler/rustc_expand/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,33 @@ fn string_to_parser(ps: &ParseSess, source_str: String) -> Parser<'_> {
new_parser_from_source_str(ps, PathBuf::from("bogofile").into(), source_str)
}

fn create_test_handler() -> (Handler, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
let output = Arc::new(Mutex::new(Vec::new()));
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
false,
);
let emitter = EmitterWriter::new(
Box::new(Shared { data: output.clone() }),
Some(source_map.clone()),
None,
fallback_bundle,
false,
false,
false,
Some(140),
false,
false,
TerminalUrl::No,
);
let handler = Handler::with_emitter(true, None, Box::new(emitter), None);
(handler, source_map, output)
}

/// Returns the result of parsing the given string via the given callback.
///
/// If there are any errors, this will panic.
pub(crate) fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T
where
F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
Expand All @@ -32,6 +59,26 @@ where
x
}

/// Verifies that parsing the given string using the given callback will
/// generate an error that contains the given text.
pub(crate) fn with_expected_parse_error<T, F>(source_str: &str, expected_output: &str, f: F)
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
{
let (handler, source_map, output) = create_test_handler();
let ps = ParseSess::with_span_handler(handler, source_map);
let mut p = string_to_parser(&ps, source_str.to_string());
let result = f(&mut p);
assert!(result.is_ok());

let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap();
println!("expected output:\n------\n{}------", expected_output);
println!("actual output:\n------\n{}------", actual_output);

assert!(actual_output.contains(expected_output))
}

/// Maps a string to tts, using a made-up filename.
pub(crate) fn string_to_stream(source_str: String) -> TokenStream {
let ps = ParseSess::new(
Expand Down Expand Up @@ -130,13 +177,7 @@ impl<T: Write> Write for Shared<T> {

fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
create_default_session_if_not_set_then(|_| {
let output = Arc::new(Mutex::new(Vec::new()));

let fallback_bundle = rustc_errors::fallback_fluent_bundle(
vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
false,
);
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let (handler, source_map, output) = create_test_handler();
source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());

let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
Expand All @@ -148,20 +189,6 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
println!("text: {:?}", source_map.span_to_snippet(span));
}

let emitter = EmitterWriter::new(
Box::new(Shared { data: output.clone() }),
Some(source_map.clone()),
None,
fallback_bundle,
false,
false,
false,
None,
false,
false,
TerminalUrl::No,
);
let handler = Handler::with_emitter(true, None, Box::new(emitter), None);
#[allow(rustc::untranslatable_diagnostic)]
handler.span_err(msp, "foo");

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_monomorphize/src/partitioning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -394,7 +394,7 @@ fn merge_codegen_units<'tcx>(
&& codegen_units.iter().any(|cgu| cgu.size_estimate() < NON_INCR_MIN_CGU_SIZE)
{
// Sort small cgus to the back.
codegen_units.sort_by_cached_key(|cgu| cmp::Reverse(cgu.size_estimate()));
codegen_units.sort_by_key(|cgu| cmp::Reverse(cgu.size_estimate()));

let mut smallest = codegen_units.pop().unwrap();
let second_smallest = codegen_units.last_mut().unwrap();
Expand Down
2 changes: 0 additions & 2 deletions compiler/rustc_session/src/options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1433,8 +1433,6 @@ options! {
dep_tasks: bool = (false, parse_bool, [UNTRACKED],
"print tasks that execute and the color their dep node gets (requires debug build) \
(default: no)"),
diagnostic_width: Option<usize> = (None, parse_opt_number, [UNTRACKED],
"set the current output width for diagnostic truncation"),
dont_buffer_diagnostics: bool = (false, parse_bool, [UNTRACKED],
"emit diagnostics rather than buffering (breaks NLL error downgrading, sorting) \
(default: no)"),
Expand Down
2 changes: 2 additions & 0 deletions library/std/src/fs/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -957,6 +957,7 @@ fn readlink_not_symlink() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating hardlinks
fn links_work() {
let tmpdir = tmpdir();
let input = tmpdir.join("in.txt");
Expand Down Expand Up @@ -1453,6 +1454,7 @@ fn metadata_access_times() {

/// Test creating hard links to symlinks.
#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating hardlinks
fn symlink_hard_link() {
let tmpdir = tmpdir();
if !got_symlink_permission(&tmpdir) {
Expand Down
18 changes: 18 additions & 0 deletions library/std/src/os/unix/net/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ macro_rules! or_panic {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn basic() {
let dir = tmpdir();
let socket_path = dir.path().join("sock");
Expand Down Expand Up @@ -93,6 +94,7 @@ fn pair() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn try_clone() {
let dir = tmpdir();
let socket_path = dir.path().join("sock");
Expand All @@ -119,6 +121,7 @@ fn try_clone() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn iter() {
let dir = tmpdir();
let socket_path = dir.path().join("sock");
Expand Down Expand Up @@ -168,6 +171,7 @@ fn long_path() {

#[test]
#[cfg(not(target_os = "nto"))]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn timeouts() {
let dir = tmpdir();
let socket_path = dir.path().join("sock");
Expand Down Expand Up @@ -195,6 +199,7 @@ fn timeouts() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_read_timeout() {
let dir = tmpdir();
let socket_path = dir.path().join("sock");
Expand All @@ -214,6 +219,7 @@ fn test_read_timeout() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_read_with_timeout() {
let dir = tmpdir();
let socket_path = dir.path().join("sock");
Expand Down Expand Up @@ -241,6 +247,7 @@ fn test_read_with_timeout() {
// Ensure the `set_read_timeout` and `set_write_timeout` calls return errors
// when passed zero Durations
#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_stream_timeout_zero_duration() {
let dir = tmpdir();
let socket_path = dir.path().join("sock");
Expand All @@ -260,6 +267,7 @@ fn test_unix_stream_timeout_zero_duration() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_datagram() {
let dir = tmpdir();
let path1 = dir.path().join("sock1");
Expand All @@ -276,6 +284,7 @@ fn test_unix_datagram() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unnamed_unix_datagram() {
let dir = tmpdir();
let path1 = dir.path().join("sock1");
Expand All @@ -293,6 +302,7 @@ fn test_unnamed_unix_datagram() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_datagram_connect_to_recv_addr() {
let dir = tmpdir();
let path1 = dir.path().join("sock1");
Expand All @@ -317,6 +327,7 @@ fn test_unix_datagram_connect_to_recv_addr() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_connect_unix_datagram() {
let dir = tmpdir();
let path1 = dir.path().join("sock1");
Expand All @@ -343,6 +354,7 @@ fn test_connect_unix_datagram() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_datagram_recv() {
let dir = tmpdir();
let path1 = dir.path().join("sock1");
Expand Down Expand Up @@ -385,6 +397,7 @@ fn datagram_pair() {
// Ensure the `set_read_timeout` and `set_write_timeout` calls return errors
// when passed zero Durations
#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_datagram_timeout_zero_duration() {
let dir = tmpdir();
let path = dir.path().join("sock");
Expand Down Expand Up @@ -529,6 +542,7 @@ fn test_abstract_no_pathname_and_not_unnamed() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_stream_peek() {
let (txdone, rxdone) = crate::sync::mpsc::channel();

Expand Down Expand Up @@ -561,6 +575,7 @@ fn test_unix_stream_peek() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_datagram_peek() {
let dir = tmpdir();
let path1 = dir.path().join("sock");
Expand All @@ -585,6 +600,7 @@ fn test_unix_datagram_peek() {
}

#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_unix_datagram_peek_from() {
let dir = tmpdir();
let path1 = dir.path().join("sock");
Expand Down Expand Up @@ -648,6 +664,7 @@ fn test_send_vectored_fds_unix_stream() {

#[cfg(any(target_os = "android", target_os = "linux", target_os = "freebsd"))]
#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_send_vectored_with_ancillary_to_unix_datagram() {
fn getpid() -> libc::pid_t {
unsafe { libc::getpid() }
Expand Down Expand Up @@ -715,6 +732,7 @@ fn test_send_vectored_with_ancillary_to_unix_datagram() {

#[cfg(any(target_os = "android", target_os = "linux"))]
#[test]
#[cfg_attr(target_os = "android", ignore)] // Android SELinux rules prevent creating Unix sockets
fn test_send_vectored_with_ancillary_unix_datagram() {
let dir = tmpdir();
let path1 = dir.path().join("sock1");
Expand Down
19 changes: 19 additions & 0 deletions library/std/src/sys_common/backtrace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ unsafe fn _print_fmt(fmt: &mut fmt::Formatter<'_>, print_fmt: PrintFmt) -> fmt::
bt_fmt.add_context()?;
let mut idx = 0;
let mut res = Ok(());
let mut omitted_count: usize = 0;
let mut first_omit = true;
// Start immediately if we're not using a short backtrace.
let mut start = print_fmt != PrintFmt::Short;
backtrace_rs::trace_unsynchronized(|frame| {
Expand All @@ -85,10 +87,27 @@ unsafe fn _print_fmt(fmt: &mut fmt::Formatter<'_>, print_fmt: PrintFmt) -> fmt::
start = true;
return;
}
if !start {
omitted_count += 1;
}
}
}

if start {
if omitted_count > 0 {
debug_assert!(print_fmt == PrintFmt::Short);
// only print the message between the middle of frames
if !first_omit {
let _ = writeln!(
bt_fmt.formatter(),
" [... omitted {} frame{} ...]",
omitted_count,
if omitted_count > 1 { "s" } else { "" }
);
}
first_omit = false;
omitted_count = 0;
}
res = bt_fmt.frame().symbol(frame, symbol);
}
});
Expand Down
2 changes: 1 addition & 1 deletion src/tools/build-manifest/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ toml = "0.5"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
anyhow = "1.0.32"
flate2 = "1.0.16"
flate2 = "1.0.26"
xz2 = "0.1.7"
tar = "0.4.29"
sha2 = "0.10.1"
Expand Down
2 changes: 1 addition & 1 deletion tests/ui/consts/const-eval/const-eval-query-stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
// normalize-stderr-test "\s\d{1,}: .*\n" -> ""
// normalize-stderr-test "\s at .*\n" -> ""
// normalize-stderr-test ".*note: Some details.*\n" -> ""

// normalize-stderr-test ".*omitted \d{1,} frames.*\n" -> ""
#![allow(unconditional_panic)]

const X: i32 = 1 / 0; //~ERROR constant
Expand Down
2 changes: 2 additions & 0 deletions tests/ui/panics/short-ice-remove-middle-frames-2.run.stderr
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@ stack backtrace:
0: std::panicking::begin_panic
1: short_ice_remove_middle_frames_2::eight
2: short_ice_remove_middle_frames_2::seven::{{closure}}
[... omitted 3 frames ...]
3: short_ice_remove_middle_frames_2::fifth
4: short_ice_remove_middle_frames_2::fourth::{{closure}}
[... omitted 4 frames ...]
5: short_ice_remove_middle_frames_2::first
6: short_ice_remove_middle_frames_2::main
7: core::ops::function::FnOnce::call_once
Expand Down
1 change: 1 addition & 0 deletions tests/ui/panics/short-ice-remove-middle-frames.run.stderr
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ stack backtrace:
1: short_ice_remove_middle_frames::seven
2: short_ice_remove_middle_frames::sixth
3: short_ice_remove_middle_frames::fifth::{{closure}}
[... omitted 4 frames ...]
4: short_ice_remove_middle_frames::second
5: short_ice_remove_middle_frames::first::{{closure}}
6: short_ice_remove_middle_frames::first
Expand Down