diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs index cb4209116ac65..7ab254e5f4b84 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs @@ -29,10 +29,9 @@ use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, FatalError, struct_span_code_err, }; -use rustc_hir as hir; use rustc_hir::def::{CtorKind, CtorOf, DefKind, Namespace, Res}; use rustc_hir::def_id::{DefId, LocalDefId}; -use rustc_hir::{GenericArg, GenericArgs, HirId}; +use rustc_hir::{self as hir, AnonConst, GenericArg, GenericArgs, HirId}; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; use rustc_infer::traits::ObligationCause; use rustc_middle::middle::stability::AllowUnstable; @@ -2089,7 +2088,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { qpath.span(), format!("Const::lower_const_arg: invalid qpath {qpath:?}"), ), - hir::ConstArgKind::Anon(anon) => self.lower_anon_const(anon.def_id), + hir::ConstArgKind::Anon(anon) => self.lower_anon_const(anon), hir::ConstArgKind::Infer(span) => self.ct_infer(None, span), } } @@ -2180,27 +2179,22 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { /// Literals and const generic parameters are eagerly converted to a constant, everything else /// becomes `Unevaluated`. #[instrument(skip(self), level = "debug")] - fn lower_anon_const(&self, def: LocalDefId) -> Const<'tcx> { + fn lower_anon_const(&self, anon: &AnonConst) -> Const<'tcx> { let tcx = self.tcx(); - let body_id = match tcx.hir_node_by_def_id(def) { - hir::Node::AnonConst(ac) => ac.body, - node => span_bug!( - tcx.def_span(def.to_def_id()), - "from_anon_const can only process anonymous constants, not {node:?}" - ), - }; - - let expr = &tcx.hir().body(body_id).value; + let expr = &tcx.hir().body(anon.body).value; debug!(?expr); - let ty = tcx.type_of(def).no_bound_vars().expect("const parameter types cannot be generic"); + let ty = tcx + .type_of(anon.def_id) + .no_bound_vars() + .expect("const parameter types cannot be generic"); match self.try_lower_anon_const_lit(ty, expr) { Some(v) => v, None => ty::Const::new_unevaluated(tcx, ty::UnevaluatedConst { - def: def.to_def_id(), - args: ty::GenericArgs::identity_for_item(tcx, def.to_def_id()), + def: anon.def_id.to_def_id(), + args: ty::GenericArgs::identity_for_item(tcx, anon.def_id.to_def_id()), }), } } diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index df110fdc20aea..8861f0814856a 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -916,6 +916,12 @@ rustc_queries! { cache_on_disk_if { true } } + /// Checks well-formedness of tail calls (`become f()`). + query check_tail_calls(key: LocalDefId) -> Result<(), rustc_errors::ErrorGuaranteed> { + desc { |tcx| "tail-call-checking `{}`", tcx.def_path_str(key) } + cache_on_disk_if { true } + } + /// Returns the types assumed to be well formed while "inside" of the given item. /// /// Note that we've liberated the late bound regions of function signatures, so diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs index 3317f3b7f8acb..f43c29d8f5d68 100644 --- a/compiler/rustc_mir_build/src/build/mod.rs +++ b/compiler/rustc_mir_build/src/build/mod.rs @@ -50,6 +50,10 @@ pub(crate) fn mir_build<'tcx>(tcx: TyCtxtAt<'tcx>, def: LocalDefId) -> Body<'tcx return construct_error(tcx, def, e); } + if let Err(err) = tcx.check_tail_calls(def) { + return construct_error(tcx, def, err); + } + let body = match tcx.thir_body(def) { Err(error_reported) => construct_error(tcx, def, error_reported), Ok((thir, expr)) => { diff --git a/compiler/rustc_mir_build/src/check_tail_calls.rs b/compiler/rustc_mir_build/src/check_tail_calls.rs new file mode 100644 index 0000000000000..b1f46d37d5062 --- /dev/null +++ b/compiler/rustc_mir_build/src/check_tail_calls.rs @@ -0,0 +1,386 @@ +use rustc_abi::ExternAbi; +use rustc_errors::Applicability; +use rustc_hir::LangItem; +use rustc_hir::def::DefKind; +use rustc_middle::span_bug; +use rustc_middle::thir::visit::{self, Visitor}; +use rustc_middle::thir::{BodyTy, Expr, ExprId, ExprKind, Thir}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::def_id::{DefId, LocalDefId}; +use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span}; + +pub(crate) fn check_tail_calls(tcx: TyCtxt<'_>, def: LocalDefId) -> Result<(), ErrorGuaranteed> { + let (thir, expr) = tcx.thir_body(def)?; + let thir = &thir.borrow(); + + // If `thir` is empty, a type error occurred, skip this body. + if thir.exprs.is_empty() { + return Ok(()); + } + + let is_closure = matches!(tcx.def_kind(def), DefKind::Closure); + let caller_ty = tcx.type_of(def).skip_binder(); + + let mut visitor = TailCallCkVisitor { + tcx, + thir, + found_errors: Ok(()), + // FIXME(#132279): we're clearly in a body here. + typing_env: ty::TypingEnv::non_body_analysis(tcx, def), + is_closure, + caller_ty, + }; + + visitor.visit_expr(&thir[expr]); + + visitor.found_errors +} + +struct TailCallCkVisitor<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + thir: &'a Thir<'tcx>, + typing_env: ty::TypingEnv<'tcx>, + /// Whatever the currently checked body is one of a closure + is_closure: bool, + /// The result of the checks, `Err(_)` if there was a problem with some + /// tail call, `Ok(())` if all of them were fine. + found_errors: Result<(), ErrorGuaranteed>, + /// Type of the caller function. + caller_ty: Ty<'tcx>, +} + +impl<'tcx> TailCallCkVisitor<'_, 'tcx> { + fn check_tail_call(&mut self, call: &Expr<'_>, expr: &Expr<'_>) { + if self.is_closure { + self.report_in_closure(expr); + return; + } + + let BodyTy::Fn(caller_sig) = self.thir.body_type else { + span_bug!( + call.span, + "`become` outside of functions should have been disallowed by hit_typeck" + ) + }; + + let ExprKind::Scope { value, .. } = call.kind else { + span_bug!(call.span, "expected scope, found: {call:?}") + }; + let value = &self.thir[value]; + + if matches!( + value.kind, + ExprKind::Binary { .. } + | ExprKind::Unary { .. } + | ExprKind::AssignOp { .. } + | ExprKind::Index { .. } + ) { + self.report_builtin_op(call, expr); + return; + } + + let ExprKind::Call { ty, fun, ref args, from_hir_call, fn_span } = value.kind else { + self.report_non_call(value, expr); + return; + }; + + if !from_hir_call { + self.report_op(ty, args, fn_span, expr); + } + + // Closures in thir look something akin to + // `for<'a> extern "rust-call" fn(&'a [closure@...], ()) -> <[closure@...] as FnOnce<()>>::Output {<[closure@...] as Fn<()>>::call}` + // So we have to check for them in this weird way... + if let &ty::FnDef(did, args) = ty.kind() { + let parent = self.tcx.parent(did); + if self.tcx.fn_trait_kind_from_def_id(parent).is_some() + && args.first().and_then(|arg| arg.as_type()).is_some_and(Ty::is_closure) + { + self.report_calling_closure(&self.thir[fun], args[1].as_type().unwrap(), expr); + + // Tail calling is likely to cause unrelated errors (ABI, argument mismatches), + // skip them, producing an error about calling a closure is enough. + return; + }; + } + + // Erase regions since tail calls don't care about lifetimes + let callee_sig = + self.tcx.normalize_erasing_late_bound_regions(self.typing_env, ty.fn_sig(self.tcx)); + + if caller_sig.abi != callee_sig.abi { + self.report_abi_mismatch(expr.span, caller_sig.abi, callee_sig.abi); + } + + if caller_sig.inputs_and_output != callee_sig.inputs_and_output { + if caller_sig.inputs() != callee_sig.inputs() { + self.report_arguments_mismatch(expr.span, caller_sig, callee_sig); + } + + // FIXME(explicit_tail_calls): this currenly fails for cases where opaques are used. + // e.g. + // ``` + // fn a() -> impl Sized { become b() } // ICE + // fn b() -> u8 { 0 } + // ``` + // we should think what is the expected behavior here. + // (we should probably just accept this by revealing opaques?) + if caller_sig.output() != callee_sig.output() { + span_bug!(expr.span, "hir typeck should have checked the return type already"); + } + } + + { + let caller_needs_location = self.needs_location(self.caller_ty); + let callee_needs_location = self.needs_location(ty); + + if caller_needs_location != callee_needs_location { + self.report_track_caller_mismatch(expr.span, caller_needs_location); + } + } + + if caller_sig.c_variadic { + self.report_c_variadic_caller(expr.span); + } + + if callee_sig.c_variadic { + self.report_c_variadic_callee(expr.span); + } + } + + /// Returns true if function of type `ty` needs location argument + /// (i.e. if a function is marked as `#[track_caller]`) + fn needs_location(&self, ty: Ty<'tcx>) -> bool { + if let &ty::FnDef(did, substs) = ty.kind() { + let instance = + ty::Instance::expect_resolve(self.tcx, self.typing_env, did, substs, DUMMY_SP); + + instance.def.requires_caller_location(self.tcx) + } else { + false + } + } + + fn report_in_closure(&mut self, expr: &Expr<'_>) { + let err = self.tcx.dcx().span_err(expr.span, "`become` is not allowed in closures"); + self.found_errors = Err(err); + } + + fn report_builtin_op(&mut self, value: &Expr<'_>, expr: &Expr<'_>) { + let err = self + .tcx + .dcx() + .struct_span_err(value.span, "`become` does not support operators") + .with_note("using `become` on a builtin operator is not useful") + .with_span_suggestion( + value.span.until(expr.span), + "try using `return` instead", + "return ", + Applicability::MachineApplicable, + ) + .emit(); + self.found_errors = Err(err); + } + + fn report_op(&mut self, fun_ty: Ty<'_>, args: &[ExprId], fn_span: Span, expr: &Expr<'_>) { + let mut err = + self.tcx.dcx().struct_span_err(fn_span, "`become` does not support operators"); + + if let &ty::FnDef(did, _substs) = fun_ty.kind() + && let parent = self.tcx.parent(did) + && matches!(self.tcx.def_kind(parent), DefKind::Trait) + && let Some(method) = op_trait_as_method_name(self.tcx, parent) + { + match args { + &[arg] => { + let arg = &self.thir[arg]; + + err.multipart_suggestion( + "try using the method directly", + vec![ + (fn_span.shrink_to_lo().until(arg.span), "(".to_owned()), + (arg.span.shrink_to_hi(), format!(").{method}()")), + ], + Applicability::MaybeIncorrect, + ); + } + &[lhs, rhs] => { + let lhs = &self.thir[lhs]; + let rhs = &self.thir[rhs]; + + err.multipart_suggestion( + "try using the method directly", + vec![ + (lhs.span.shrink_to_lo(), format!("(")), + (lhs.span.between(rhs.span), format!(").{method}(")), + (rhs.span.between(expr.span.shrink_to_hi()), ")".to_owned()), + ], + Applicability::MaybeIncorrect, + ); + } + _ => span_bug!(expr.span, "operator with more than 2 args? {args:?}"), + } + } + + self.found_errors = Err(err.emit()); + } + + fn report_non_call(&mut self, value: &Expr<'_>, expr: &Expr<'_>) { + let err = self + .tcx + .dcx() + .struct_span_err(value.span, "`become` requires a function call") + .with_span_note(value.span, "not a function call") + .with_span_suggestion( + value.span.until(expr.span), + "try using `return` instead", + "return ", + Applicability::MaybeIncorrect, + ) + .emit(); + self.found_errors = Err(err); + } + + fn report_calling_closure(&mut self, fun: &Expr<'_>, tupled_args: Ty<'_>, expr: &Expr<'_>) { + let underscored_args = match tupled_args.kind() { + ty::Tuple(tys) if tys.is_empty() => "".to_owned(), + ty::Tuple(tys) => std::iter::repeat("_, ").take(tys.len() - 1).chain(["_"]).collect(), + _ => "_".to_owned(), + }; + + let err = self + .tcx + .dcx() + .struct_span_err(expr.span, "tail calling closures directly is not allowed") + .with_multipart_suggestion( + "try casting the closure to a function pointer type", + vec![ + (fun.span.shrink_to_lo(), "(".to_owned()), + (fun.span.shrink_to_hi(), format!(" as fn({underscored_args}) -> _)")), + ], + Applicability::MaybeIncorrect, + ) + .emit(); + self.found_errors = Err(err); + } + + fn report_abi_mismatch(&mut self, sp: Span, caller_abi: ExternAbi, callee_abi: ExternAbi) { + let err = self + .tcx + .dcx() + .struct_span_err(sp, "mismatched function ABIs") + .with_note("`become` requires caller and callee to have the same ABI") + .with_note(format!("caller ABI is `{caller_abi}`, while callee ABI is `{callee_abi}`")) + .emit(); + self.found_errors = Err(err); + } + + fn report_arguments_mismatch( + &mut self, + sp: Span, + caller_sig: ty::FnSig<'_>, + callee_sig: ty::FnSig<'_>, + ) { + let err = self + .tcx + .dcx() + .struct_span_err(sp, "mismatched signatures") + .with_note("`become` requires caller and callee to have matching signatures") + .with_note(format!("caller signature: `{caller_sig}`")) + .with_note(format!("callee signature: `{callee_sig}`")) + .emit(); + self.found_errors = Err(err); + } + + fn report_track_caller_mismatch(&mut self, sp: Span, caller_needs_location: bool) { + let err = match caller_needs_location { + true => self + .tcx + .dcx() + .struct_span_err( + sp, + "a function marked with `#[track_caller]` cannot tail-call one that is not", + ) + .emit(), + false => self + .tcx + .dcx() + .struct_span_err( + sp, + "a function mot marked with `#[track_caller]` cannot tail-call one that is", + ) + .emit(), + }; + + self.found_errors = Err(err); + } + + fn report_c_variadic_caller(&mut self, sp: Span) { + let err = self + .tcx + .dcx() + // FIXME(explicit_tail_calls): highlight the `...` + .struct_span_err(sp, "tail-calls are not allowed in c-variadic functions") + .emit(); + + self.found_errors = Err(err); + } + + fn report_c_variadic_callee(&mut self, sp: Span) { + let err = self + .tcx + .dcx() + // FIXME(explicit_tail_calls): highlight the function or something... + .struct_span_err(sp, "c-variadic functions can't be tail-called") + .emit(); + + self.found_errors = Err(err); + } +} + +impl<'a, 'tcx> Visitor<'a, 'tcx> for TailCallCkVisitor<'a, 'tcx> { + fn thir(&self) -> &'a Thir<'tcx> { + &self.thir + } + + fn visit_expr(&mut self, expr: &'a Expr<'tcx>) { + if let ExprKind::Become { value } = expr.kind { + let call = &self.thir[value]; + self.check_tail_call(call, expr); + } + + visit::walk_expr(self, expr); + } +} + +fn op_trait_as_method_name(tcx: TyCtxt<'_>, trait_did: DefId) -> Option<&'static str> { + let m = match tcx.as_lang_item(trait_did)? { + LangItem::Add => "add", + LangItem::Sub => "sub", + LangItem::Mul => "mul", + LangItem::Div => "div", + LangItem::Rem => "rem", + LangItem::Neg => "neg", + LangItem::Not => "not", + LangItem::BitXor => "bitxor", + LangItem::BitAnd => "bitand", + LangItem::BitOr => "bitor", + LangItem::Shl => "shl", + LangItem::Shr => "shr", + LangItem::AddAssign => "add_assign", + LangItem::SubAssign => "sub_assign", + LangItem::MulAssign => "mul_assign", + LangItem::DivAssign => "div_assign", + LangItem::RemAssign => "rem_assign", + LangItem::BitXorAssign => "bitxor_assign", + LangItem::BitAndAssign => "bitand_assign", + LangItem::BitOrAssign => "bitor_assign", + LangItem::ShlAssign => "shl_assign", + LangItem::ShrAssign => "shr_assign", + LangItem::Index => "index", + LangItem::IndexMut => "index_mut", + _ => return None, + }; + + Some(m) +} diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs index 3dbb552cdbbef..833e5019865e8 100644 --- a/compiler/rustc_mir_build/src/lib.rs +++ b/compiler/rustc_mir_build/src/lib.rs @@ -12,6 +12,7 @@ // tidy-alphabetical-end mod build; +mod check_tail_calls; mod check_unsafety; mod errors; pub mod lints; @@ -28,6 +29,7 @@ pub fn provide(providers: &mut Providers) { providers.closure_saved_names_of_captured_variables = build::closure_saved_names_of_captured_variables; providers.check_unsafety = check_unsafety::check_unsafety; + providers.check_tail_calls = check_tail_calls::check_tail_calls; providers.thir_body = thir::cx::thir_body; providers.hooks.thir_tree = thir::print::thir_tree; providers.hooks.thir_flat = thir::print::thir_flat; diff --git a/library/core/src/unicode/printable.py b/library/core/src/unicode/printable.py index 4d39ace066c46..260fa9f9e6ad2 100755 --- a/library/core/src/unicode/printable.py +++ b/library/core/src/unicode/printable.py @@ -9,7 +9,8 @@ import os import subprocess -NUM_CODEPOINTS=0x110000 +NUM_CODEPOINTS = 0x110000 + def to_ranges(iter): current = None @@ -23,11 +24,15 @@ def to_ranges(iter): if current is not None: yield tuple(current) + def get_escaped(codepoints): for c in codepoints: - if (c.class_ or "Cn") in "Cc Cf Cs Co Cn Zl Zp Zs".split() and c.value != ord(' '): + if (c.class_ or "Cn") in "Cc Cf Cs Co Cn Zl Zp Zs".split() and c.value != ord( + " " + ): yield c.value + def get_file(f): try: return open(os.path.basename(f)) @@ -35,7 +40,9 @@ def get_file(f): subprocess.run(["curl", "-O", f], check=True) return open(os.path.basename(f)) -Codepoint = namedtuple('Codepoint', 'value class_') + +Codepoint = namedtuple("Codepoint", "value class_") + def get_codepoints(f): r = csv.reader(f, delimiter=";") @@ -66,13 +73,14 @@ def get_codepoints(f): for c in range(prev_codepoint + 1, NUM_CODEPOINTS): yield Codepoint(c, None) + def compress_singletons(singletons): - uppers = [] # (upper, # items in lowers) + uppers = [] # (upper, # items in lowers) lowers = [] for i in singletons: upper = i >> 8 - lower = i & 0xff + lower = i & 0xFF if len(uppers) == 0 or uppers[-1][0] != upper: uppers.append((upper, 1)) else: @@ -82,10 +90,11 @@ def compress_singletons(singletons): return uppers, lowers + def compress_normal(normal): # lengths 0x00..0x7f are encoded as 00, 01, ..., 7e, 7f # lengths 0x80..0x7fff are encoded as 80 80, 80 81, ..., ff fe, ff ff - compressed = [] # [truelen, (truelenaux), falselen, (falselenaux)] + compressed = [] # [truelen, (truelenaux), falselen, (falselenaux)] prev_start = 0 for start, count in normal: @@ -95,21 +104,22 @@ def compress_normal(normal): assert truelen < 0x8000 and falselen < 0x8000 entry = [] - if truelen > 0x7f: + if truelen > 0x7F: entry.append(0x80 | (truelen >> 8)) - entry.append(truelen & 0xff) + entry.append(truelen & 0xFF) else: - entry.append(truelen & 0x7f) - if falselen > 0x7f: + entry.append(truelen & 0x7F) + if falselen > 0x7F: entry.append(0x80 | (falselen >> 8)) - entry.append(falselen & 0xff) + entry.append(falselen & 0xFF) else: - entry.append(falselen & 0x7f) + entry.append(falselen & 0x7F) compressed.append(entry) return compressed + def print_singletons(uppers, lowers, uppersname, lowersname): print("#[rustfmt::skip]") print("const {}: &[(u8, u8)] = &[".format(uppersname)) @@ -119,9 +129,12 @@ def print_singletons(uppers, lowers, uppersname, lowersname): print("#[rustfmt::skip]") print("const {}: &[u8] = &[".format(lowersname)) for i in range(0, len(lowers), 8): - print(" {}".format(" ".join("{:#04x},".format(x) for x in lowers[i:i+8]))) + print( + " {}".format(" ".join("{:#04x},".format(x) for x in lowers[i : i + 8])) + ) print("];") + def print_normal(normal, normalname): print("#[rustfmt::skip]") print("const {}: &[u8] = &[".format(normalname)) @@ -129,12 +142,13 @@ def print_normal(normal, normalname): print(" {}".format(" ".join("{:#04x},".format(i) for i in v))) print("];") + def main(): file = get_file("https://www.unicode.org/Public/UNIDATA/UnicodeData.txt") codepoints = get_codepoints(file) - CUTOFF=0x10000 + CUTOFF = 0x10000 singletons0 = [] singletons1 = [] normal0 = [] @@ -234,10 +248,11 @@ def main(): }\ """) print() - print_singletons(singletons0u, singletons0l, 'SINGLETONS0U', 'SINGLETONS0L') - print_singletons(singletons1u, singletons1l, 'SINGLETONS1U', 'SINGLETONS1L') - print_normal(normal0, 'NORMAL0') - print_normal(normal1, 'NORMAL1') + print_singletons(singletons0u, singletons0l, "SINGLETONS0U", "SINGLETONS0L") + print_singletons(singletons1u, singletons1l, "SINGLETONS1U", "SINGLETONS1L") + print_normal(normal0, "NORMAL0") + print_normal(normal1, "NORMAL1") + -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 9ef9ccec1937f..89415afbe3bc4 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -19,14 +19,17 @@ except ImportError: lzma = None + def platform_is_win32(): - return sys.platform == 'win32' + return sys.platform == "win32" + if platform_is_win32(): EXE_SUFFIX = ".exe" else: EXE_SUFFIX = "" + def get_cpus(): if hasattr(os, "sched_getaffinity"): return len(os.sched_getaffinity(0)) @@ -51,11 +54,14 @@ def get(base, url, path, checksums, verbose=False): try: if url not in checksums: - raise RuntimeError(("src/stage0 doesn't contain a checksum for {}. " - "Pre-built artifacts might not be available for this " - "target at this time, see https://doc.rust-lang.org/nightly" - "/rustc/platform-support.html for more information.") - .format(url)) + raise RuntimeError( + ( + "src/stage0 doesn't contain a checksum for {}. " + "Pre-built artifacts might not be available for this " + "target at this time, see https://doc.rust-lang.org/nightly" + "/rustc/platform-support.html for more information." + ).format(url) + ) sha256 = checksums[url] if os.path.exists(path): if verify(path, sha256, False): @@ -64,8 +70,11 @@ def get(base, url, path, checksums, verbose=False): return else: if verbose: - eprint("ignoring already-download file", - path, "due to failed verification") + eprint( + "ignoring already-download file", + path, + "due to failed verification", + ) os.unlink(path) download(temp_path, "{}/{}".format(base, url), True, verbose) if not verify(temp_path, sha256, verbose): @@ -79,12 +88,14 @@ def get(base, url, path, checksums, verbose=False): eprint("removing", temp_path) os.unlink(temp_path) + def curl_version(): m = re.match(bytes("^curl ([0-9]+)\\.([0-9]+)", "utf8"), require(["curl", "-V"])) if m is None: return (0, 0) return (int(m[1]), int(m[2])) + def download(path, url, probably_big, verbose): for _ in range(4): try: @@ -114,32 +125,53 @@ def _download(path, url, probably_big, verbose, exception): require(["curl", "--version"], exception=platform_is_win32()) extra_flags = [] if curl_version() > (7, 70): - extra_flags = [ "--retry-all-errors" ] + extra_flags = ["--retry-all-errors"] # options should be kept in sync with # src/bootstrap/src/core/download.rs # for consistency. # they are also more compreprensivly explained in that file. - run(["curl", option] + extra_flags + [ - # Follow redirect. - "--location", - # timeout if speed is < 10 bytes/sec for > 30 seconds - "--speed-time", "30", "--speed-limit", "10", - # timeout if cannot connect within 30 seconds - "--connect-timeout", "30", - "--output", path, - "--continue-at", "-", - "--retry", "3", "--show-error", "--remote-time", "--fail", url], + run( + ["curl", option] + + extra_flags + + [ + # Follow redirect. + "--location", + # timeout if speed is < 10 bytes/sec for > 30 seconds + "--speed-time", + "30", + "--speed-limit", + "10", + # timeout if cannot connect within 30 seconds + "--connect-timeout", + "30", + "--output", + path, + "--continue-at", + "-", + "--retry", + "3", + "--show-error", + "--remote-time", + "--fail", + url, + ], verbose=verbose, - exception=True, # Will raise RuntimeError on failure + exception=True, # Will raise RuntimeError on failure ) except (subprocess.CalledProcessError, OSError, RuntimeError): # see http://serverfault.com/questions/301128/how-to-download + script = "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;" if platform_is_win32(): - run_powershell([ - "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", - "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(url, path)], + run_powershell( + [ + script, + "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format( + url, path + ), + ], verbose=verbose, - exception=exception) + exception=exception, + ) # Check if the RuntimeError raised by run(curl) should be silenced elif verbose or exception: raise @@ -153,9 +185,11 @@ def verify(path, expected, verbose): found = hashlib.sha256(source.read()).hexdigest() verified = found == expected if not verified: - eprint("invalid checksum:\n" - " found: {}\n" - " expected: {}".format(found, expected)) + eprint( + "invalid checksum:\n" " found: {}\n" " expected: {}".format( + found, expected + ) + ) return verified @@ -170,7 +204,7 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None): name = member.replace(fname + "/", "", 1) if match is not None and not name.startswith(match): continue - name = name[len(match) + 1:] + name = name[len(match) + 1 :] dst_path = os.path.join(dst, name) if verbose: @@ -186,18 +220,18 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None): def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs): """Run a child program in a new process""" if verbose: - eprint("running: " + ' '.join(args)) + eprint("running: " + " ".join(args)) sys.stdout.flush() # Ensure that the .exe is used on Windows just in case a Linux ELF has been # compiled in the same directory. - if os.name == 'nt' and not args[0].endswith('.exe'): - args[0] += '.exe' + if os.name == "nt" and not args[0].endswith(".exe"): + args[0] += ".exe" # Use Popen here instead of call() as it apparently allows powershell on # Windows to not lock up waiting for input presumably. ret = subprocess.Popen(args, **kwargs) code = ret.wait() if code != 0: - err = "failed to run: " + ' '.join(args) + err = "failed to run: " + " ".join(args) if verbose or exception: raise RuntimeError(err) # For most failures, we definitely do want to print this error, or the user will have no @@ -209,30 +243,30 @@ def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs): else: sys.exit(err) + def run_powershell(script, *args, **kwargs): """Run a powershell script""" run(["PowerShell.exe", "/nologo", "-Command"] + script, *args, **kwargs) def require(cmd, exit=True, exception=False): - '''Run a command, returning its output. + """Run a command, returning its output. On error, If `exception` is `True`, raise the error Otherwise If `exit` is `True`, exit the process - Else return None.''' + Else return None.""" try: return subprocess.check_output(cmd).strip() except (subprocess.CalledProcessError, OSError) as exc: if exception: raise elif exit: - eprint("ERROR: unable to run `{}`: {}".format(' '.join(cmd), exc)) + eprint("ERROR: unable to run `{}`: {}".format(" ".join(cmd), exc)) eprint("Please make sure it's installed and in the path.") sys.exit(1) return None - def format_build_time(duration): """Return a nicer format for build time @@ -252,13 +286,16 @@ def default_build_triple(verbose): if platform_is_win32(): try: - version = subprocess.check_output(["rustc", "--version", "--verbose"], - stderr=subprocess.DEVNULL) + version = subprocess.check_output( + ["rustc", "--version", "--verbose"], stderr=subprocess.DEVNULL + ) version = version.decode(default_encoding) - host = next(x for x in version.split('\n') if x.startswith("host: ")) + host = next(x for x in version.split("\n") if x.startswith("host: ")) triple = host.split("host: ")[1] if verbose: - eprint("detected default triple {} from pre-installed rustc".format(triple)) + eprint( + "detected default triple {} from pre-installed rustc".format(triple) + ) return triple except Exception as e: if verbose: @@ -270,148 +307,149 @@ def default_build_triple(verbose): # If we do not have `uname`, assume Windows. if uname is None: - return 'x86_64-pc-windows-msvc' + return "x86_64-pc-windows-msvc" kernel, cputype, processor = uname.decode(default_encoding).split(maxsplit=2) # The goal here is to come up with the same triple as LLVM would, # at least for the subset of platforms we're willing to target. kerneltype_mapper = { - 'Darwin': 'apple-darwin', - 'DragonFly': 'unknown-dragonfly', - 'FreeBSD': 'unknown-freebsd', - 'Haiku': 'unknown-haiku', - 'NetBSD': 'unknown-netbsd', - 'OpenBSD': 'unknown-openbsd', - 'GNU': 'unknown-hurd', + "Darwin": "apple-darwin", + "DragonFly": "unknown-dragonfly", + "FreeBSD": "unknown-freebsd", + "Haiku": "unknown-haiku", + "NetBSD": "unknown-netbsd", + "OpenBSD": "unknown-openbsd", + "GNU": "unknown-hurd", } # Consider the direct transformation first and then the special cases if kernel in kerneltype_mapper: kernel = kerneltype_mapper[kernel] - elif kernel == 'Linux': + elif kernel == "Linux": # Apple doesn't support `-o` so this can't be used in the combined # uname invocation above ostype = require(["uname", "-o"], exit=required).decode(default_encoding) - if ostype == 'Android': - kernel = 'linux-android' + if ostype == "Android": + kernel = "linux-android" else: - kernel = 'unknown-linux-gnu' - elif kernel == 'SunOS': - kernel = 'pc-solaris' + kernel = "unknown-linux-gnu" + elif kernel == "SunOS": + kernel = "pc-solaris" # On Solaris, uname -m will return a machine classification instead # of a cpu type, so uname -p is recommended instead. However, the # output from that option is too generic for our purposes (it will # always emit 'i386' on x86/amd64 systems). As such, isainfo -k # must be used instead. - cputype = require(['isainfo', '-k']).decode(default_encoding) + cputype = require(["isainfo", "-k"]).decode(default_encoding) # sparc cpus have sun as a target vendor - if 'sparc' in cputype: - kernel = 'sun-solaris' - elif kernel.startswith('MINGW'): + if "sparc" in cputype: + kernel = "sun-solaris" + elif kernel.startswith("MINGW"): # msys' `uname` does not print gcc configuration, but prints msys # configuration. so we cannot believe `uname -m`: # msys1 is always i686 and msys2 is always x86_64. # instead, msys defines $MSYSTEM which is MINGW32 on i686 and # MINGW64 on x86_64. - kernel = 'pc-windows-gnu' - cputype = 'i686' - if os.environ.get('MSYSTEM') == 'MINGW64': - cputype = 'x86_64' - elif kernel.startswith('MSYS'): - kernel = 'pc-windows-gnu' - elif kernel.startswith('CYGWIN_NT'): - cputype = 'i686' - if kernel.endswith('WOW64'): - cputype = 'x86_64' - kernel = 'pc-windows-gnu' + kernel = "pc-windows-gnu" + cputype = "i686" + if os.environ.get("MSYSTEM") == "MINGW64": + cputype = "x86_64" + elif kernel.startswith("MSYS"): + kernel = "pc-windows-gnu" + elif kernel.startswith("CYGWIN_NT"): + cputype = "i686" + if kernel.endswith("WOW64"): + cputype = "x86_64" + kernel = "pc-windows-gnu" elif platform_is_win32(): # Some Windows platforms might have a `uname` command that returns a # non-standard string (e.g. gnuwin32 tools returns `windows32`). In # these cases, fall back to using sys.platform. - return 'x86_64-pc-windows-msvc' - elif kernel == 'AIX': + return "x86_64-pc-windows-msvc" + elif kernel == "AIX": # `uname -m` returns the machine ID rather than machine hardware on AIX, # so we are unable to use cputype to form triple. AIX 7.2 and # above supports 32-bit and 64-bit mode simultaneously and `uname -p` # returns `powerpc`, however we only supports `powerpc64-ibm-aix` in # rust on AIX. For above reasons, kerneltype_mapper and cputype_mapper # are not used to infer AIX's triple. - return 'powerpc64-ibm-aix' + return "powerpc64-ibm-aix" else: err = "unknown OS type: {}".format(kernel) sys.exit(err) - if cputype in ['powerpc', 'riscv'] and kernel == 'unknown-freebsd': - cputype = subprocess.check_output( - ['uname', '-p']).strip().decode(default_encoding) + if cputype in ["powerpc", "riscv"] and kernel == "unknown-freebsd": + cputype = ( + subprocess.check_output(["uname", "-p"]).strip().decode(default_encoding) + ) cputype_mapper = { - 'BePC': 'i686', - 'aarch64': 'aarch64', - 'aarch64eb': 'aarch64', - 'amd64': 'x86_64', - 'arm64': 'aarch64', - 'i386': 'i686', - 'i486': 'i686', - 'i686': 'i686', - 'i686-AT386': 'i686', - 'i786': 'i686', - 'loongarch64': 'loongarch64', - 'm68k': 'm68k', - 'csky': 'csky', - 'powerpc': 'powerpc', - 'powerpc64': 'powerpc64', - 'powerpc64le': 'powerpc64le', - 'ppc': 'powerpc', - 'ppc64': 'powerpc64', - 'ppc64le': 'powerpc64le', - 'riscv64': 'riscv64gc', - 's390x': 's390x', - 'x64': 'x86_64', - 'x86': 'i686', - 'x86-64': 'x86_64', - 'x86_64': 'x86_64' + "BePC": "i686", + "aarch64": "aarch64", + "aarch64eb": "aarch64", + "amd64": "x86_64", + "arm64": "aarch64", + "i386": "i686", + "i486": "i686", + "i686": "i686", + "i686-AT386": "i686", + "i786": "i686", + "loongarch64": "loongarch64", + "m68k": "m68k", + "csky": "csky", + "powerpc": "powerpc", + "powerpc64": "powerpc64", + "powerpc64le": "powerpc64le", + "ppc": "powerpc", + "ppc64": "powerpc64", + "ppc64le": "powerpc64le", + "riscv64": "riscv64gc", + "s390x": "s390x", + "x64": "x86_64", + "x86": "i686", + "x86-64": "x86_64", + "x86_64": "x86_64", } # Consider the direct transformation first and then the special cases if cputype in cputype_mapper: cputype = cputype_mapper[cputype] - elif cputype in {'xscale', 'arm'}: - cputype = 'arm' - if kernel == 'linux-android': - kernel = 'linux-androideabi' - elif kernel == 'unknown-freebsd': + elif cputype in {"xscale", "arm"}: + cputype = "arm" + if kernel == "linux-android": + kernel = "linux-androideabi" + elif kernel == "unknown-freebsd": cputype = processor - kernel = 'unknown-freebsd' - elif cputype == 'armv6l': - cputype = 'arm' - if kernel == 'linux-android': - kernel = 'linux-androideabi' + kernel = "unknown-freebsd" + elif cputype == "armv6l": + cputype = "arm" + if kernel == "linux-android": + kernel = "linux-androideabi" else: - kernel += 'eabihf' - elif cputype in {'armv7l', 'armv8l'}: - cputype = 'armv7' - if kernel == 'linux-android': - kernel = 'linux-androideabi' + kernel += "eabihf" + elif cputype in {"armv7l", "armv8l"}: + cputype = "armv7" + if kernel == "linux-android": + kernel = "linux-androideabi" else: - kernel += 'eabihf' - elif cputype == 'mips': - if sys.byteorder == 'big': - cputype = 'mips' - elif sys.byteorder == 'little': - cputype = 'mipsel' + kernel += "eabihf" + elif cputype == "mips": + if sys.byteorder == "big": + cputype = "mips" + elif sys.byteorder == "little": + cputype = "mipsel" else: raise ValueError("unknown byteorder: {}".format(sys.byteorder)) - elif cputype == 'mips64': - if sys.byteorder == 'big': - cputype = 'mips64' - elif sys.byteorder == 'little': - cputype = 'mips64el' + elif cputype == "mips64": + if sys.byteorder == "big": + cputype = "mips64" + elif sys.byteorder == "little": + cputype = "mips64el" else: - raise ValueError('unknown byteorder: {}'.format(sys.byteorder)) + raise ValueError("unknown byteorder: {}".format(sys.byteorder)) # only the n64 ABI is supported, indicate it - kernel += 'abi64' - elif cputype == 'sparc' or cputype == 'sparcv9' or cputype == 'sparc64': + kernel += "abi64" + elif cputype == "sparc" or cputype == "sparcv9" or cputype == "sparc64": pass else: err = "unknown cpu type: {}".format(cputype) @@ -422,8 +460,8 @@ def default_build_triple(verbose): @contextlib.contextmanager def output(filepath): - tmp = filepath + '.tmp' - with open(tmp, 'w') as f: + tmp = filepath + ".tmp" + with open(tmp, "w") as f: yield f try: if os.path.exists(filepath): @@ -467,6 +505,7 @@ def __init__( self.pattern = pattern self.verbose = verbose + def download_component(download_info): if not os.path.exists(download_info.tarball_path): get( @@ -477,6 +516,7 @@ def download_component(download_info): verbose=download_info.verbose, ) + def unpack_component(download_info): unpack( download_info.tarball_path, @@ -486,26 +526,30 @@ def unpack_component(download_info): verbose=download_info.verbose, ) + class FakeArgs: """Used for unit tests to avoid updating all call sites""" + def __init__(self): - self.build = '' - self.build_dir = '' + self.build = "" + self.build_dir = "" self.clean = False self.verbose = False self.json_output = False - self.color = 'auto' - self.warnings = 'default' + self.color = "auto" + self.warnings = "default" + class RustBuild(object): """Provide all the methods required to build Rust""" + def __init__(self, config_toml="", args=None): if args is None: args = FakeArgs() self.git_version = None self.nix_deps_dir = None self._should_fix_bins_and_dylibs = None - self.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) + self.rust_root = os.path.abspath(os.path.join(__file__, "../../..")) self.config_toml = config_toml @@ -515,26 +559,28 @@ def __init__(self, config_toml="", args=None): self.color = args.color self.warnings = args.warnings - config_verbose_count = self.get_toml('verbose', 'build') + config_verbose_count = self.get_toml("verbose", "build") if config_verbose_count is not None: self.verbose = max(self.verbose, int(config_verbose_count)) - self.use_vendored_sources = self.get_toml('vendor', 'build') == 'true' - self.use_locked_deps = self.get_toml('locked-deps', 'build') == 'true' + self.use_vendored_sources = self.get_toml("vendor", "build") == "true" + self.use_locked_deps = self.get_toml("locked-deps", "build") == "true" - build_dir = args.build_dir or self.get_toml('build-dir', 'build') or 'build' + build_dir = args.build_dir or self.get_toml("build-dir", "build") or "build" self.build_dir = os.path.abspath(build_dir) - self.stage0_data = parse_stage0_file(os.path.join(self.rust_root, "src", "stage0")) + self.stage0_data = parse_stage0_file( + os.path.join(self.rust_root, "src", "stage0") + ) self.stage0_compiler = Stage0Toolchain( - self.stage0_data["compiler_date"], - self.stage0_data["compiler_version"] + self.stage0_data["compiler_date"], self.stage0_data["compiler_version"] + ) + self.download_url = ( + os.getenv("RUSTUP_DIST_SERVER") or self.stage0_data["dist_server"] ) - self.download_url = os.getenv("RUSTUP_DIST_SERVER") or self.stage0_data["dist_server"] self.build = args.build or self.build_triple() - def download_toolchain(self): """Fetch the build system for Rust, written in Rust @@ -550,58 +596,73 @@ def download_toolchain(self): key = self.stage0_compiler.date is_outdated = self.program_out_of_date(self.rustc_stamp(), key) - need_rustc = self.rustc().startswith(bin_root) and (not os.path.exists(self.rustc()) \ - or is_outdated) - need_cargo = self.cargo().startswith(bin_root) and (not os.path.exists(self.cargo()) \ - or is_outdated) + need_rustc = self.rustc().startswith(bin_root) and ( + not os.path.exists(self.rustc()) or is_outdated + ) + need_cargo = self.cargo().startswith(bin_root) and ( + not os.path.exists(self.cargo()) or is_outdated + ) if need_rustc or need_cargo: if os.path.exists(bin_root): # HACK: On Windows, we can't delete rust-analyzer-proc-macro-server while it's # running. Kill it. if platform_is_win32(): - print("Killing rust-analyzer-proc-macro-srv before deleting stage0 toolchain") - regex = '{}\\\\(host|{})\\\\stage0\\\\libexec'.format( - os.path.basename(self.build_dir), - self.build + print( + "Killing rust-analyzer-proc-macro-srv before deleting stage0 toolchain" + ) + regex = "{}\\\\(host|{})\\\\stage0\\\\libexec".format( + os.path.basename(self.build_dir), self.build ) script = ( # NOTE: can't use `taskkill` or `Get-Process -Name` because they error if # the server isn't running. - 'Get-Process | ' + - 'Where-Object {$_.Name -eq "rust-analyzer-proc-macro-srv"} |' + - 'Where-Object {{$_.Path -match "{}"}} |'.format(regex) + - 'Stop-Process' + "Get-Process | " + + 'Where-Object {$_.Name -eq "rust-analyzer-proc-macro-srv"} |' + + 'Where-Object {{$_.Path -match "{}"}} |'.format(regex) + + "Stop-Process" ) run_powershell([script]) shutil.rmtree(bin_root) - cache_dst = (self.get_toml('bootstrap-cache-path', 'build') or - os.path.join(self.build_dir, "cache")) + cache_dst = self.get_toml("bootstrap-cache-path", "build") or os.path.join( + self.build_dir, "cache" + ) rustc_cache = os.path.join(cache_dst, key) if not os.path.exists(rustc_cache): os.makedirs(rustc_cache) - tarball_suffix = '.tar.gz' if lzma is None else '.tar.xz' + tarball_suffix = ".tar.gz" if lzma is None else ".tar.xz" - toolchain_suffix = "{}-{}{}".format(rustc_channel, self.build, tarball_suffix) + toolchain_suffix = "{}-{}{}".format( + rustc_channel, self.build, tarball_suffix + ) tarballs_to_download = [] if need_rustc: tarballs_to_download.append( - ("rust-std-{}".format(toolchain_suffix), "rust-std-{}".format(self.build)) + ( + "rust-std-{}".format(toolchain_suffix), + "rust-std-{}".format(self.build), + ) + ) + tarballs_to_download.append( + ("rustc-{}".format(toolchain_suffix), "rustc") ) - tarballs_to_download.append(("rustc-{}".format(toolchain_suffix), "rustc")) if need_cargo: - tarballs_to_download.append(("cargo-{}".format(toolchain_suffix), "cargo")) + tarballs_to_download.append( + ("cargo-{}".format(toolchain_suffix), "cargo") + ) tarballs_download_info = [ DownloadInfo( base_download_url=self.download_url, - download_path="dist/{}/{}".format(self.stage0_compiler.date, filename), + download_path="dist/{}/{}".format( + self.stage0_compiler.date, filename + ), bin_root=self.bin_root(), tarball_path=os.path.join(rustc_cache, filename), tarball_suffix=tarball_suffix, @@ -620,7 +681,11 @@ def download_toolchain(self): # In Python 2.7, Pool cannot be used as a context manager. pool_size = min(len(tarballs_download_info), get_cpus()) if self.verbose: - print('Choosing a pool size of', pool_size, 'for the unpacking of the tarballs') + print( + "Choosing a pool size of", + pool_size, + "for the unpacking of the tarballs", + ) p = Pool(pool_size) try: # FIXME: A cheap workaround for https://github.com/rust-lang/rust/issues/125578, @@ -639,7 +704,9 @@ def download_toolchain(self): self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root)) self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root)) - self.fix_bin_or_dylib("{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root)) + self.fix_bin_or_dylib( + "{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root) + ) lib_dir = "{}/lib".format(bin_root) rustlib_bin_dir = "{}/rustlib/{}/bin".format(lib_dir, self.build) self.fix_bin_or_dylib("{}/rust-lld".format(rustlib_bin_dir)) @@ -667,12 +734,15 @@ def should_fix_bins_and_dylibs(self): def get_answer(): default_encoding = sys.getdefaultencoding() try: - ostype = subprocess.check_output( - ['uname', '-s']).strip().decode(default_encoding) + ostype = ( + subprocess.check_output(["uname", "-s"]) + .strip() + .decode(default_encoding) + ) except subprocess.CalledProcessError: return False except OSError as reason: - if getattr(reason, 'winerror', None) is not None: + if getattr(reason, "winerror", None) is not None: return False raise reason @@ -690,17 +760,23 @@ def get_answer(): # The latter one does not exist on NixOS when using tmpfs as root. try: with open("/etc/os-release", "r") as f: - is_nixos = any(ln.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"') - for ln in f) + is_nixos = any( + ln.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"') + for ln in f + ) except FileNotFoundError: is_nixos = False # If not on NixOS, then warn if user seems to be atop Nix shell if not is_nixos: - in_nix_shell = os.getenv('IN_NIX_SHELL') + in_nix_shell = os.getenv("IN_NIX_SHELL") if in_nix_shell: - eprint("The IN_NIX_SHELL environment variable is `{}`;".format(in_nix_shell), - "you may need to set `patch-binaries-for-nix=true` in config.toml") + eprint( + "The IN_NIX_SHELL environment variable is `{}`;".format( + in_nix_shell + ), + "you may need to set `patch-binaries-for-nix=true` in config.toml", + ) return is_nixos @@ -736,7 +812,7 @@ def fix_bin_or_dylib(self, fname): # zlib: Needed as a system dependency of `libLLVM-*.so`. # patchelf: Needed for patching ELF binaries (see doc comment above). nix_deps_dir = "{}/{}".format(self.build_dir, ".nix-deps") - nix_expr = ''' + nix_expr = """ with (import {}); symlinkJoin { name = "rust-stage0-dependencies"; @@ -746,24 +822,30 @@ def fix_bin_or_dylib(self, fname): stdenv.cc.bintools ]; } - ''' + """ try: - subprocess.check_output([ - "nix-build", "-E", nix_expr, "-o", nix_deps_dir, - ]) + subprocess.check_output( + [ + "nix-build", + "-E", + nix_expr, + "-o", + nix_deps_dir, + ] + ) except subprocess.CalledProcessError as reason: eprint("WARNING: failed to call nix-build:", reason) return self.nix_deps_dir = nix_deps_dir patchelf = "{}/bin/patchelf".format(nix_deps_dir) - rpath_entries = [ - os.path.join(os.path.realpath(nix_deps_dir), "lib") - ] + rpath_entries = [os.path.join(os.path.realpath(nix_deps_dir), "lib")] patchelf_args = ["--add-rpath", ":".join(rpath_entries)] if ".so" not in fname: # Finally, set the correct .interp for binaries - with open("{}/nix-support/dynamic-linker".format(nix_deps_dir)) as dynamic_linker: + with open( + "{}/nix-support/dynamic-linker".format(nix_deps_dir) + ) as dynamic_linker: patchelf_args += ["--set-interpreter", dynamic_linker.read().rstrip()] try: @@ -781,13 +863,13 @@ def rustc_stamp(self): >>> expected = os.path.join("build", "host", "stage0", ".rustc-stamp") >>> assert rb.rustc_stamp() == expected, rb.rustc_stamp() """ - return os.path.join(self.bin_root(), '.rustc-stamp') + return os.path.join(self.bin_root(), ".rustc-stamp") def program_out_of_date(self, stamp_path, key): """Check if the given program stamp is out of date""" if not os.path.exists(stamp_path) or self.clean: return True - with open(stamp_path, 'r') as stamp: + with open(stamp_path, "r") as stamp: return key != stamp.read() def bin_root(self): @@ -834,11 +916,11 @@ def get_toml(self, key, section=None): def get_toml_static(config_toml, key, section=None): cur_section = None for line in config_toml.splitlines(): - section_match = re.match(r'^\s*\[(.*)\]\s*$', line) + section_match = re.match(r"^\s*\[(.*)\]\s*$", line) if section_match is not None: cur_section = section_match.group(1) - match = re.match(r'^{}\s*=(.*)$'.format(key), line) + match = re.match(r"^{}\s*=(.*)$".format(key), line) if match is not None: value = match.group(1) if section is None or section == cur_section: @@ -847,11 +929,11 @@ def get_toml_static(config_toml, key, section=None): def cargo(self): """Return config path for cargo""" - return self.program_config('cargo') + return self.program_config("cargo") def rustc(self): """Return config path for rustc""" - return self.program_config('rustc') + return self.program_config("rustc") def program_config(self, program): """Return config path for the given program at the given stage @@ -886,12 +968,12 @@ def get_string(line): """ start = line.find('"') if start != -1: - end = start + 1 + line[start + 1:].find('"') - return line[start + 1:end] - start = line.find('\'') + end = start + 1 + line[start + 1 :].find('"') + return line[start + 1 : end] + start = line.find("'") if start != -1: - end = start + 1 + line[start + 1:].find('\'') - return line[start + 1:end] + end = start + 1 + line[start + 1 :].find("'") + return line[start + 1 : end] return None def bootstrap_out(self): @@ -941,24 +1023,37 @@ def build_bootstrap_cmd(self, env): del env["CARGO_BUILD_TARGET"] env["CARGO_TARGET_DIR"] = build_dir env["RUSTC"] = self.rustc() - env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \ - (os.pathsep + env["LD_LIBRARY_PATH"]) \ - if "LD_LIBRARY_PATH" in env else "" - env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \ - (os.pathsep + env["DYLD_LIBRARY_PATH"]) \ - if "DYLD_LIBRARY_PATH" in env else "" - env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \ - (os.pathsep + env["LIBRARY_PATH"]) \ - if "LIBRARY_PATH" in env else "" - env["LIBPATH"] = os.path.join(self.bin_root(), "lib") + \ - (os.pathsep + env["LIBPATH"]) \ - if "LIBPATH" in env else "" + env["LD_LIBRARY_PATH"] = ( + os.path.join(self.bin_root(), "lib") + (os.pathsep + env["LD_LIBRARY_PATH"]) + if "LD_LIBRARY_PATH" in env + else "" + ) + env["DYLD_LIBRARY_PATH"] = ( + os.path.join(self.bin_root(), "lib") + + (os.pathsep + env["DYLD_LIBRARY_PATH"]) + if "DYLD_LIBRARY_PATH" in env + else "" + ) + env["LIBRARY_PATH"] = ( + os.path.join(self.bin_root(), "lib") + (os.pathsep + env["LIBRARY_PATH"]) + if "LIBRARY_PATH" in env + else "" + ) + env["LIBPATH"] = ( + os.path.join(self.bin_root(), "lib") + (os.pathsep + env["LIBPATH"]) + if "LIBPATH" in env + else "" + ) # Export Stage0 snapshot compiler related env variables build_section = "target.{}".format(self.build) host_triple_sanitized = self.build.replace("-", "_") var_data = { - "CC": "cc", "CXX": "cxx", "LD": "linker", "AR": "ar", "RANLIB": "ranlib" + "CC": "cc", + "CXX": "cxx", + "LD": "linker", + "AR": "ar", + "RANLIB": "ranlib", } for var_name, toml_key in var_data.items(): toml_val = self.get_toml(toml_key, build_section) @@ -1023,14 +1118,16 @@ def build_bootstrap_cmd(self, env): if "RUSTFLAGS_BOOTSTRAP" in env: env["RUSTFLAGS"] += " " + env["RUSTFLAGS_BOOTSTRAP"] - env["PATH"] = os.path.join(self.bin_root(), "bin") + \ - os.pathsep + env["PATH"] + env["PATH"] = os.path.join(self.bin_root(), "bin") + os.pathsep + env["PATH"] if not os.path.isfile(self.cargo()): - raise Exception("no cargo executable found at `{}`".format( - self.cargo())) - args = [self.cargo(), "build", "--manifest-path", - os.path.join(self.rust_root, "src/bootstrap/Cargo.toml"), - "-Zroot-dir="+self.rust_root] + raise Exception("no cargo executable found at `{}`".format(self.cargo())) + args = [ + self.cargo(), + "build", + "--manifest-path", + os.path.join(self.rust_root, "src/bootstrap/Cargo.toml"), + "-Zroot-dir=" + self.rust_root, + ] args.extend("--verbose" for _ in range(self.verbose)) if self.use_locked_deps: args.append("--locked") @@ -1058,83 +1155,103 @@ def build_triple(self): Note that `default_build_triple` is moderately expensive, so use `self.build` where possible. """ - config = self.get_toml('build') + config = self.get_toml("build") return config or default_build_triple(self.verbose) def check_vendored_status(self): """Check that vendoring is configured properly""" # keep this consistent with the equivalent check in bootstrap: # https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/lib.rs#L399-L405 - if 'SUDO_USER' in os.environ and not self.use_vendored_sources: + if "SUDO_USER" in os.environ and not self.use_vendored_sources: if os.getuid() == 0: self.use_vendored_sources = True - eprint('INFO: looks like you\'re trying to run this command as root') - eprint(' and so in order to preserve your $HOME this will now') - eprint(' use vendored sources by default.') + eprint("INFO: looks like you're trying to run this command as root") + eprint(" and so in order to preserve your $HOME this will now") + eprint(" use vendored sources by default.") - cargo_dir = os.path.join(self.rust_root, '.cargo') + cargo_dir = os.path.join(self.rust_root, ".cargo") + url = "https://ci-artifacts.rust-lang.org/rustc-builds//rustc-nightly-src.tar.xz" if self.use_vendored_sources: - vendor_dir = os.path.join(self.rust_root, 'vendor') + vendor_dir = os.path.join(self.rust_root, "vendor") if not os.path.exists(vendor_dir): - eprint('ERROR: vendoring required, but vendor directory does not exist.') - eprint(' Run `x.py vendor` to initialize the vendor directory.') - eprint(' Alternatively, use the pre-vendored `rustc-src` dist component.') - eprint(' To get a stable/beta/nightly version, download it from: ') - eprint(' ' - 'https://forge.rust-lang.org/infra/other-installation-methods.html#source-code') - eprint(' To get a specific commit version, download it using the below URL,') - eprint(' replacing with a specific commit checksum: ') - eprint(' ' - 'https://ci-artifacts.rust-lang.org/rustc-builds//rustc-nightly-src.tar.xz') - eprint(' Once you have the source downloaded, place the vendor directory') - eprint(' from the archive in the root of the rust project.') + eprint( + "ERROR: vendoring required, but vendor directory does not exist." + ) + eprint(" Run `x.py vendor` to initialize the vendor directory.") + eprint( + " Alternatively, use the pre-vendored `rustc-src` dist component." + ) + eprint( + " To get a stable/beta/nightly version, download it from: " + ) + eprint( + " " + "https://forge.rust-lang.org/infra/other-installation-methods.html#source-code" + ) + eprint( + " To get a specific commit version, download it using the below URL," + ) + eprint(" replacing with a specific commit checksum: ") + eprint(" ", url) + eprint( + " Once you have the source downloaded, place the vendor directory" + ) + eprint(" from the archive in the root of the rust project.") raise Exception("{} not found".format(vendor_dir)) if not os.path.exists(cargo_dir): - eprint('ERROR: vendoring required, but .cargo/config does not exist.') + eprint("ERROR: vendoring required, but .cargo/config does not exist.") raise Exception("{} not found".format(cargo_dir)) + def parse_args(args): """Parse the command line arguments that the python script needs.""" parser = argparse.ArgumentParser(add_help=False) - parser.add_argument('-h', '--help', action='store_true') - parser.add_argument('--config') - parser.add_argument('--build-dir') - parser.add_argument('--build') - parser.add_argument('--color', choices=['always', 'never', 'auto']) - parser.add_argument('--clean', action='store_true') - parser.add_argument('--json-output', action='store_true') - parser.add_argument('--warnings', choices=['deny', 'warn', 'default'], default='default') - parser.add_argument('-v', '--verbose', action='count', default=0) + parser.add_argument("-h", "--help", action="store_true") + parser.add_argument("--config") + parser.add_argument("--build-dir") + parser.add_argument("--build") + parser.add_argument("--color", choices=["always", "never", "auto"]) + parser.add_argument("--clean", action="store_true") + parser.add_argument("--json-output", action="store_true") + parser.add_argument( + "--warnings", choices=["deny", "warn", "default"], default="default" + ) + parser.add_argument("-v", "--verbose", action="count", default=0) return parser.parse_known_args(args)[0] + def parse_stage0_file(path): result = {} - with open(path, 'r') as file: + with open(path, "r") as file: for line in file: line = line.strip() - if line and not line.startswith('#'): - key, value = line.split('=', 1) + if line and not line.startswith("#"): + key, value = line.split("=", 1) result[key.strip()] = value.strip() return result + def bootstrap(args): """Configure, fetch, build and run the initial bootstrap""" - rust_root = os.path.abspath(os.path.join(__file__, '../../..')) + rust_root = os.path.abspath(os.path.join(__file__, "../../..")) - if not os.path.exists(os.path.join(rust_root, '.git')) and \ - os.path.exists(os.path.join(rust_root, '.github')): - eprint("warn: Looks like you are trying to bootstrap Rust from a source that is neither a " - "git clone nor distributed tarball.\nThis build may fail due to missing submodules " - "unless you put them in place manually.") + if not os.path.exists(os.path.join(rust_root, ".git")) and os.path.exists( + os.path.join(rust_root, ".github") + ): + eprint( + "warn: Looks like you are trying to bootstrap Rust from a source that is neither a " + "git clone nor distributed tarball.\nThis build may fail due to missing submodules " + "unless you put them in place manually." + ) # Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`, # then `config.toml` in the root directory. - toml_path = args.config or os.getenv('RUST_BOOTSTRAP_CONFIG') + toml_path = args.config or os.getenv("RUST_BOOTSTRAP_CONFIG") using_default_path = toml_path is None if using_default_path: - toml_path = 'config.toml' + toml_path = "config.toml" if not os.path.exists(toml_path): toml_path = os.path.join(rust_root, toml_path) @@ -1144,23 +1261,23 @@ def bootstrap(args): with open(toml_path) as config: config_toml = config.read() else: - config_toml = '' + config_toml = "" - profile = RustBuild.get_toml_static(config_toml, 'profile') + profile = RustBuild.get_toml_static(config_toml, "profile") if profile is not None: # Allows creating alias for profile names, allowing # profiles to be renamed while maintaining back compatibility # Keep in sync with `profile_aliases` in config.rs - profile_aliases = { - "user": "dist" - } - include_file = 'config.{}.toml'.format(profile_aliases.get(profile) or profile) - include_dir = os.path.join(rust_root, 'src', 'bootstrap', 'defaults') + profile_aliases = {"user": "dist"} + include_file = "config.{}.toml".format(profile_aliases.get(profile) or profile) + include_dir = os.path.join(rust_root, "src", "bootstrap", "defaults") include_path = os.path.join(include_dir, include_file) if not os.path.exists(include_path): - raise Exception("Unrecognized config profile '{}'. Check src/bootstrap/defaults" - " for available options.".format(profile)) + raise Exception( + "Unrecognized config profile '{}'. Check src/bootstrap/defaults" + " for available options.".format(profile) + ) # HACK: This works because `self.get_toml()` returns the first match it finds for a # specific key, so appending our defaults at the end allows the user to override them @@ -1196,8 +1313,8 @@ def main(): start_time = time() # x.py help ... - if len(sys.argv) > 1 and sys.argv[1] == 'help': - sys.argv[1] = '-h' + if len(sys.argv) > 1 and sys.argv[1] == "help": + sys.argv[1] = "-h" args = parse_args(sys.argv) help_triggered = args.help or len(sys.argv) == 1 @@ -1207,14 +1324,15 @@ def main(): if help_triggered: eprint( "INFO: Downloading and building bootstrap before processing --help command.\n" - " See src/bootstrap/README.md for help with common commands.") + " See src/bootstrap/README.md for help with common commands." + ) exit_code = 0 success_word = "successfully" try: bootstrap(args) except (SystemExit, KeyboardInterrupt) as error: - if hasattr(error, 'code') and isinstance(error.code, int): + if hasattr(error, "code") and isinstance(error.code, int): exit_code = error.code else: exit_code = 1 @@ -1222,9 +1340,14 @@ def main(): success_word = "unsuccessfully" if not help_triggered: - eprint("Build completed", success_word, "in", format_build_time(time() - start_time)) + eprint( + "Build completed", + success_word, + "in", + format_build_time(time() - start_time), + ) sys.exit(exit_code) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/bootstrap/bootstrap_test.py b/src/bootstrap/bootstrap_test.py index 70ed12b96e875..7494536539d7b 100644 --- a/src/bootstrap/bootstrap_test.py +++ b/src/bootstrap/bootstrap_test.py @@ -16,8 +16,9 @@ bootstrap_dir = os.path.dirname(os.path.abspath(__file__)) # For the import below, have Python search in src/bootstrap first. sys.path.insert(0, bootstrap_dir) -import bootstrap # noqa: E402 -import configure # noqa: E402 +import bootstrap # noqa: E402 +import configure # noqa: E402 + def serialize_and_parse(configure_args, bootstrap_args=None): from io import StringIO @@ -32,15 +33,20 @@ def serialize_and_parse(configure_args, bootstrap_args=None): try: import tomllib + # Verify this is actually valid TOML. tomllib.loads(build.config_toml) except ImportError: - print("WARNING: skipping TOML validation, need at least python 3.11", file=sys.stderr) + print( + "WARNING: skipping TOML validation, need at least python 3.11", + file=sys.stderr, + ) return build class VerifyTestCase(unittest.TestCase): """Test Case for verify""" + def setUp(self): self.container = tempfile.mkdtemp() self.src = os.path.join(self.container, "src.txt") @@ -68,14 +74,14 @@ def test_invalid_file(self): class ProgramOutOfDate(unittest.TestCase): """Test if a program is out of date""" + def setUp(self): self.container = tempfile.mkdtemp() os.mkdir(os.path.join(self.container, "stage0")) self.build = bootstrap.RustBuild() self.build.date = "2017-06-15" self.build.build_dir = self.container - self.rustc_stamp_path = os.path.join(self.container, "stage0", - ".rustc-stamp") + self.rustc_stamp_path = os.path.join(self.container, "stage0", ".rustc-stamp") self.key = self.build.date + str(None) def tearDown(self): @@ -97,11 +103,14 @@ def test_same_dates(self): """Return False both dates match""" with open(self.rustc_stamp_path, "w") as rustc_stamp: rustc_stamp.write("2017-06-15None") - self.assertFalse(self.build.program_out_of_date(self.rustc_stamp_path, self.key)) + self.assertFalse( + self.build.program_out_of_date(self.rustc_stamp_path, self.key) + ) class ParseArgsInConfigure(unittest.TestCase): """Test if `parse_args` function in `configure.py` works properly""" + @patch("configure.err") def test_unknown_args(self, err): # It should be print an error message if the argument doesn't start with '--' @@ -148,28 +157,35 @@ def test_known_args(self): class GenerateAndParseConfig(unittest.TestCase): """Test that we can serialize and deserialize a config.toml file""" + def test_no_args(self): build = serialize_and_parse([]) - self.assertEqual(build.get_toml("profile"), 'dist') + self.assertEqual(build.get_toml("profile"), "dist") self.assertIsNone(build.get_toml("llvm.download-ci-llvm")) def test_set_section(self): build = serialize_and_parse(["--set", "llvm.download-ci-llvm"]) - self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), 'true') + self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), "true") def test_set_target(self): build = serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"]) - self.assertEqual(build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), 'gcc') + self.assertEqual( + build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), "gcc" + ) def test_set_top_level(self): build = serialize_and_parse(["--set", "profile=compiler"]) - self.assertEqual(build.get_toml("profile"), 'compiler') + self.assertEqual(build.get_toml("profile"), "compiler") def test_set_codegen_backends(self): build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift"]) - self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift']"), -1) + self.assertNotEqual( + build.config_toml.find("codegen-backends = ['cranelift']"), -1 + ) build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift,llvm"]) - self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1) + self.assertNotEqual( + build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1 + ) build = serialize_and_parse(["--enable-full-tools"]) self.assertNotEqual(build.config_toml.find("codegen-backends = ['llvm']"), -1) @@ -223,7 +239,7 @@ def test_cargoflags(self): self.assertTrue("--timings" in args) def test_warnings(self): - for toml_warnings in ['false', 'true', None]: + for toml_warnings in ["false", "true", None]: configure_args = [] if toml_warnings is not None: configure_args = ["--set", "rust.deny-warnings=" + toml_warnings] diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index 70f4e70962abe..717500221454a 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -6,11 +6,12 @@ import shlex import sys import os + rust_dir = os.path.dirname(os.path.abspath(__file__)) rust_dir = os.path.dirname(rust_dir) rust_dir = os.path.dirname(rust_dir) sys.path.append(os.path.join(rust_dir, "src", "bootstrap")) -import bootstrap # noqa: E402 +import bootstrap # noqa: E402 class Option(object): @@ -32,26 +33,62 @@ def v(*args): options.append(Option(*args, value=True)) -o("debug", "rust.debug", "enables debugging environment; does not affect optimization of bootstrapped code") +o( + "debug", + "rust.debug", + "enables debugging environment; does not affect optimization of bootstrapped code", +) o("docs", "build.docs", "build standard library documentation") o("compiler-docs", "build.compiler-docs", "build compiler documentation") o("optimize-tests", "rust.optimize-tests", "build tests with optimizations") o("verbose-tests", "rust.verbose-tests", "enable verbose output when running tests") -o("ccache", "llvm.ccache", "invoke gcc/clang via ccache to reuse object files between builds") +o( + "ccache", + "llvm.ccache", + "invoke gcc/clang via ccache to reuse object files between builds", +) o("sccache", None, "invoke gcc/clang via sccache to reuse object files between builds") o("local-rust", None, "use an installed rustc rather than downloading a snapshot") v("local-rust-root", None, "set prefix for local rust binary") -o("local-rebuild", "build.local-rebuild", "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version") -o("llvm-static-stdcpp", "llvm.static-libstdcpp", "statically link to libstdc++ for LLVM") -o("llvm-link-shared", "llvm.link-shared", "prefer shared linking to LLVM (llvm-config --link-shared)") +o( + "local-rebuild", + "build.local-rebuild", + "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version", +) +o( + "llvm-static-stdcpp", + "llvm.static-libstdcpp", + "statically link to libstdc++ for LLVM", +) +o( + "llvm-link-shared", + "llvm.link-shared", + "prefer shared linking to LLVM (llvm-config --link-shared)", +) o("rpath", "rust.rpath", "build rpaths into rustc itself") o("codegen-tests", "rust.codegen-tests", "run the tests/codegen tests") -o("ninja", "llvm.ninja", "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)") +o( + "ninja", + "llvm.ninja", + "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)", +) o("locked-deps", "build.locked-deps", "force Cargo.lock to be up to date") o("vendor", "build.vendor", "enable usage of vendored Rust crates") -o("sanitizers", "build.sanitizers", "build the sanitizer runtimes (asan, dfsan, lsan, msan, tsan, hwasan)") -o("dist-src", "rust.dist-src", "when building tarballs enables building a source tarball") -o("cargo-native-static", "build.cargo-native-static", "static native libraries in cargo") +o( + "sanitizers", + "build.sanitizers", + "build the sanitizer runtimes (asan, dfsan, lsan, msan, tsan, hwasan)", +) +o( + "dist-src", + "rust.dist-src", + "when building tarballs enables building a source tarball", +) +o( + "cargo-native-static", + "build.cargo-native-static", + "static native libraries in cargo", +) o("profiler", "build.profiler", "build the profiler runtime") o("full-tools", None, "enable all tools") o("lld", "rust.lld", "build lld") @@ -59,7 +96,11 @@ def v(*args): o("clang", "llvm.clang", "build clang") o("use-libcxx", "llvm.use-libcxx", "build LLVM with libc++") o("control-flow-guard", "rust.control-flow-guard", "Enable Control Flow Guard") -o("patch-binaries-for-nix", "build.patch-binaries-for-nix", "whether patch binaries for usage with Nix toolchains") +o( + "patch-binaries-for-nix", + "build.patch-binaries-for-nix", + "whether patch binaries for usage with Nix toolchains", +) o("new-symbol-mangling", "rust.new-symbol-mangling", "use symbol-mangling-version v0") v("llvm-cflags", "llvm.cflags", "build LLVM with these extra compiler flags") @@ -76,16 +117,48 @@ def v(*args): o("llvm-offload", "llvm.offload", "build LLVM with gpu offload support") o("llvm-plugins", "llvm.plugins", "build LLVM with plugin interface") o("debug-assertions", "rust.debug-assertions", "build with debugging assertions") -o("debug-assertions-std", "rust.debug-assertions-std", "build the standard library with debugging assertions") +o( + "debug-assertions-std", + "rust.debug-assertions-std", + "build the standard library with debugging assertions", +) o("overflow-checks", "rust.overflow-checks", "build with overflow checks") -o("overflow-checks-std", "rust.overflow-checks-std", "build the standard library with overflow checks") -o("llvm-release-debuginfo", "llvm.release-debuginfo", "build LLVM with debugger metadata") +o( + "overflow-checks-std", + "rust.overflow-checks-std", + "build the standard library with overflow checks", +) +o( + "llvm-release-debuginfo", + "llvm.release-debuginfo", + "build LLVM with debugger metadata", +) v("debuginfo-level", "rust.debuginfo-level", "debuginfo level for Rust code") -v("debuginfo-level-rustc", "rust.debuginfo-level-rustc", "debuginfo level for the compiler") -v("debuginfo-level-std", "rust.debuginfo-level-std", "debuginfo level for the standard library") -v("debuginfo-level-tools", "rust.debuginfo-level-tools", "debuginfo level for the tools") -v("debuginfo-level-tests", "rust.debuginfo-level-tests", "debuginfo level for the test suites run with compiletest") -v("save-toolstates", "rust.save-toolstates", "save build and test status of external tools into this file") +v( + "debuginfo-level-rustc", + "rust.debuginfo-level-rustc", + "debuginfo level for the compiler", +) +v( + "debuginfo-level-std", + "rust.debuginfo-level-std", + "debuginfo level for the standard library", +) +v( + "debuginfo-level-tools", + "rust.debuginfo-level-tools", + "debuginfo level for the tools", +) +v( + "debuginfo-level-tests", + "rust.debuginfo-level-tests", + "debuginfo level for the test suites run with compiletest", +) +v( + "save-toolstates", + "rust.save-toolstates", + "save build and test status of external tools into this file", +) v("prefix", "install.prefix", "set installation prefix") v("localstatedir", "install.localstatedir", "local state directory") @@ -102,50 +175,117 @@ def v(*args): v("llvm-filecheck", None, "set path to LLVM's FileCheck utility") v("python", "build.python", "set path to python") v("android-ndk", "build.android-ndk", "set path to Android NDK") -v("musl-root", "target.x86_64-unknown-linux-musl.musl-root", - "MUSL root installation directory (deprecated)") -v("musl-root-x86_64", "target.x86_64-unknown-linux-musl.musl-root", - "x86_64-unknown-linux-musl install directory") -v("musl-root-i586", "target.i586-unknown-linux-musl.musl-root", - "i586-unknown-linux-musl install directory") -v("musl-root-i686", "target.i686-unknown-linux-musl.musl-root", - "i686-unknown-linux-musl install directory") -v("musl-root-arm", "target.arm-unknown-linux-musleabi.musl-root", - "arm-unknown-linux-musleabi install directory") -v("musl-root-armhf", "target.arm-unknown-linux-musleabihf.musl-root", - "arm-unknown-linux-musleabihf install directory") -v("musl-root-armv5te", "target.armv5te-unknown-linux-musleabi.musl-root", - "armv5te-unknown-linux-musleabi install directory") -v("musl-root-armv7", "target.armv7-unknown-linux-musleabi.musl-root", - "armv7-unknown-linux-musleabi install directory") -v("musl-root-armv7hf", "target.armv7-unknown-linux-musleabihf.musl-root", - "armv7-unknown-linux-musleabihf install directory") -v("musl-root-aarch64", "target.aarch64-unknown-linux-musl.musl-root", - "aarch64-unknown-linux-musl install directory") -v("musl-root-mips", "target.mips-unknown-linux-musl.musl-root", - "mips-unknown-linux-musl install directory") -v("musl-root-mipsel", "target.mipsel-unknown-linux-musl.musl-root", - "mipsel-unknown-linux-musl install directory") -v("musl-root-mips64", "target.mips64-unknown-linux-muslabi64.musl-root", - "mips64-unknown-linux-muslabi64 install directory") -v("musl-root-mips64el", "target.mips64el-unknown-linux-muslabi64.musl-root", - "mips64el-unknown-linux-muslabi64 install directory") -v("musl-root-riscv32gc", "target.riscv32gc-unknown-linux-musl.musl-root", - "riscv32gc-unknown-linux-musl install directory") -v("musl-root-riscv64gc", "target.riscv64gc-unknown-linux-musl.musl-root", - "riscv64gc-unknown-linux-musl install directory") -v("musl-root-loongarch64", "target.loongarch64-unknown-linux-musl.musl-root", - "loongarch64-unknown-linux-musl install directory") -v("qemu-armhf-rootfs", "target.arm-unknown-linux-gnueabihf.qemu-rootfs", - "rootfs in qemu testing, you probably don't want to use this") -v("qemu-aarch64-rootfs", "target.aarch64-unknown-linux-gnu.qemu-rootfs", - "rootfs in qemu testing, you probably don't want to use this") -v("qemu-riscv64-rootfs", "target.riscv64gc-unknown-linux-gnu.qemu-rootfs", - "rootfs in qemu testing, you probably don't want to use this") -v("experimental-targets", "llvm.experimental-targets", - "experimental LLVM targets to build") +v( + "musl-root", + "target.x86_64-unknown-linux-musl.musl-root", + "MUSL root installation directory (deprecated)", +) +v( + "musl-root-x86_64", + "target.x86_64-unknown-linux-musl.musl-root", + "x86_64-unknown-linux-musl install directory", +) +v( + "musl-root-i586", + "target.i586-unknown-linux-musl.musl-root", + "i586-unknown-linux-musl install directory", +) +v( + "musl-root-i686", + "target.i686-unknown-linux-musl.musl-root", + "i686-unknown-linux-musl install directory", +) +v( + "musl-root-arm", + "target.arm-unknown-linux-musleabi.musl-root", + "arm-unknown-linux-musleabi install directory", +) +v( + "musl-root-armhf", + "target.arm-unknown-linux-musleabihf.musl-root", + "arm-unknown-linux-musleabihf install directory", +) +v( + "musl-root-armv5te", + "target.armv5te-unknown-linux-musleabi.musl-root", + "armv5te-unknown-linux-musleabi install directory", +) +v( + "musl-root-armv7", + "target.armv7-unknown-linux-musleabi.musl-root", + "armv7-unknown-linux-musleabi install directory", +) +v( + "musl-root-armv7hf", + "target.armv7-unknown-linux-musleabihf.musl-root", + "armv7-unknown-linux-musleabihf install directory", +) +v( + "musl-root-aarch64", + "target.aarch64-unknown-linux-musl.musl-root", + "aarch64-unknown-linux-musl install directory", +) +v( + "musl-root-mips", + "target.mips-unknown-linux-musl.musl-root", + "mips-unknown-linux-musl install directory", +) +v( + "musl-root-mipsel", + "target.mipsel-unknown-linux-musl.musl-root", + "mipsel-unknown-linux-musl install directory", +) +v( + "musl-root-mips64", + "target.mips64-unknown-linux-muslabi64.musl-root", + "mips64-unknown-linux-muslabi64 install directory", +) +v( + "musl-root-mips64el", + "target.mips64el-unknown-linux-muslabi64.musl-root", + "mips64el-unknown-linux-muslabi64 install directory", +) +v( + "musl-root-riscv32gc", + "target.riscv32gc-unknown-linux-musl.musl-root", + "riscv32gc-unknown-linux-musl install directory", +) +v( + "musl-root-riscv64gc", + "target.riscv64gc-unknown-linux-musl.musl-root", + "riscv64gc-unknown-linux-musl install directory", +) +v( + "musl-root-loongarch64", + "target.loongarch64-unknown-linux-musl.musl-root", + "loongarch64-unknown-linux-musl install directory", +) +v( + "qemu-armhf-rootfs", + "target.arm-unknown-linux-gnueabihf.qemu-rootfs", + "rootfs in qemu testing, you probably don't want to use this", +) +v( + "qemu-aarch64-rootfs", + "target.aarch64-unknown-linux-gnu.qemu-rootfs", + "rootfs in qemu testing, you probably don't want to use this", +) +v( + "qemu-riscv64-rootfs", + "target.riscv64gc-unknown-linux-gnu.qemu-rootfs", + "rootfs in qemu testing, you probably don't want to use this", +) +v( + "experimental-targets", + "llvm.experimental-targets", + "experimental LLVM targets to build", +) v("release-channel", "rust.channel", "the name of the release channel to build") -v("release-description", "rust.description", "optional descriptive string for version output") +v( + "release-description", + "rust.description", + "optional descriptive string for version output", +) v("dist-compression-formats", None, "List of compression formats to use") # Used on systems where "cc" is unavailable @@ -154,7 +294,11 @@ def v(*args): # Many of these are saved below during the "writing configuration" step # (others are conditionally saved). o("manage-submodules", "build.submodules", "let the build manage the git submodules") -o("full-bootstrap", "build.full-bootstrap", "build three compilers instead of two (not recommended except for testing reproducible builds)") +o( + "full-bootstrap", + "build.full-bootstrap", + "build three compilers instead of two (not recommended except for testing reproducible builds)", +) o("extended", "build.extended", "build an extended rust tool set") v("bootstrap-cache-path", None, "use provided path for the bootstrap cache") @@ -165,8 +309,16 @@ def v(*args): v("target", None, "List of GNUs ./configure syntax LLVM target triples") # Options specific to this configure script -o("option-checking", None, "complain about unrecognized options in this configure script") -o("verbose-configure", None, "don't truncate options when printing them in this configure script") +o( + "option-checking", + None, + "complain about unrecognized options in this configure script", +) +o( + "verbose-configure", + None, + "don't truncate options when printing them in this configure script", +) v("set", None, "set arbitrary key/value pairs in TOML configuration") @@ -178,39 +330,42 @@ def err(msg): print("\nconfigure: ERROR: " + msg + "\n") sys.exit(1) + def is_value_list(key): for option in options: - if option.name == key and option.desc.startswith('List of'): + if option.name == key and option.desc.startswith("List of"): return True return False -if '--help' in sys.argv or '-h' in sys.argv: - print('Usage: ./configure [options]') - print('') - print('Options') + +if "--help" in sys.argv or "-h" in sys.argv: + print("Usage: ./configure [options]") + print("") + print("Options") for option in options: - if 'android' in option.name: + if "android" in option.name: # no one needs to know about these obscure options continue if option.value: - print('\t{:30} {}'.format('--{}=VAL'.format(option.name), option.desc)) + print("\t{:30} {}".format("--{}=VAL".format(option.name), option.desc)) else: - print('\t--enable-{:25} OR --disable-{}'.format(option.name, option.name)) - print('\t\t' + option.desc) - print('') - print('This configure script is a thin configuration shim over the true') - print('configuration system, `config.toml`. You can explore the comments') - print('in `config.example.toml` next to this configure script to see') - print('more information about what each option is. Additionally you can') - print('pass `--set` as an argument to set arbitrary key/value pairs') - print('in the TOML configuration if desired') - print('') - print('Also note that all options which take `--enable` can similarly') - print('be passed with `--disable-foo` to forcibly disable the option') + print("\t--enable-{:25} OR --disable-{}".format(option.name, option.name)) + print("\t\t" + option.desc) + print("") + print("This configure script is a thin configuration shim over the true") + print("configuration system, `config.toml`. You can explore the comments") + print("in `config.example.toml` next to this configure script to see") + print("more information about what each option is. Additionally you can") + print("pass `--set` as an argument to set arbitrary key/value pairs") + print("in the TOML configuration if desired") + print("") + print("Also note that all options which take `--enable` can similarly") + print("be passed with `--disable-foo` to forcibly disable the option") sys.exit(0) VERBOSE = False + # Parse all command line arguments into one of these three lists, handling # boolean and value-based options separately def parse_args(args): @@ -222,7 +377,7 @@ def parse_args(args): while i < len(args): arg = args[i] i += 1 - if not arg.startswith('--'): + if not arg.startswith("--"): unknown_args.append(arg) continue @@ -230,7 +385,7 @@ def parse_args(args): for option in options: value = None if option.value: - keyval = arg[2:].split('=', 1) + keyval = arg[2:].split("=", 1) key = keyval[0] if option.name != key: continue @@ -244,9 +399,9 @@ def parse_args(args): need_value_args.append(arg) continue else: - if arg[2:] == 'enable-' + option.name: + if arg[2:] == "enable-" + option.name: value = True - elif arg[2:] == 'disable-' + option.name: + elif arg[2:] == "disable-" + option.name: value = False else: continue @@ -263,8 +418,9 @@ def parse_args(args): # NOTE: here and a few other places, we use [-1] to apply the *last* value # passed. But if option-checking is enabled, then the known_args loop will # also assert that options are only passed once. - option_checking = ('option-checking' not in known_args - or known_args['option-checking'][-1][1]) + option_checking = ( + "option-checking" not in known_args or known_args["option-checking"][-1][1] + ) if option_checking: if len(unknown_args) > 0: err("Option '" + unknown_args[0] + "' is not recognized") @@ -272,18 +428,18 @@ def parse_args(args): err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0])) global VERBOSE - VERBOSE = 'verbose-configure' in known_args + VERBOSE = "verbose-configure" in known_args config = {} - set('build.configure-args', args, config) + set("build.configure-args", args, config) apply_args(known_args, option_checking, config) return parse_example_config(known_args, config) def build(known_args): - if 'build' in known_args: - return known_args['build'][-1][1] + if "build" in known_args: + return known_args["build"][-1][1] return bootstrap.default_build_triple(verbose=False) @@ -291,7 +447,7 @@ def set(key, value, config): if isinstance(value, list): # Remove empty values, which value.split(',') tends to generate and # replace single quotes for double quotes to ensure correct parsing. - value = [v.replace('\'', '"') for v in value if v] + value = [v.replace("'", '"') for v in value if v] s = "{:20} := {}".format(key, value) if len(s) < 70 or VERBOSE: @@ -310,7 +466,7 @@ def set(key, value, config): for i, part in enumerate(parts): if i == len(parts) - 1: if is_value_list(part) and isinstance(value, str): - value = value.split(',') + value = value.split(",") arr[part] = value else: if part not in arr: @@ -321,9 +477,9 @@ def set(key, value, config): def apply_args(known_args, option_checking, config): for key in known_args: # The `set` option is special and can be passed a bunch of times - if key == 'set': + if key == "set": for _option, value in known_args[key]: - keyval = value.split('=', 1) + keyval = value.split("=", 1) if len(keyval) == 1 or keyval[1] == "true": value = True elif keyval[1] == "false": @@ -348,50 +504,55 @@ def apply_args(known_args, option_checking, config): # that here. build_triple = build(known_args) - if option.name == 'sccache': - set('llvm.ccache', 'sccache', config) - elif option.name == 'local-rust': - for path in os.environ['PATH'].split(os.pathsep): - if os.path.exists(path + '/rustc'): - set('build.rustc', path + '/rustc', config) + if option.name == "sccache": + set("llvm.ccache", "sccache", config) + elif option.name == "local-rust": + for path in os.environ["PATH"].split(os.pathsep): + if os.path.exists(path + "/rustc"): + set("build.rustc", path + "/rustc", config) break - for path in os.environ['PATH'].split(os.pathsep): - if os.path.exists(path + '/cargo'): - set('build.cargo', path + '/cargo', config) + for path in os.environ["PATH"].split(os.pathsep): + if os.path.exists(path + "/cargo"): + set("build.cargo", path + "/cargo", config) break - elif option.name == 'local-rust-root': - set('build.rustc', value + '/bin/rustc', config) - set('build.cargo', value + '/bin/cargo', config) - elif option.name == 'llvm-root': - set('target.{}.llvm-config'.format(build_triple), value + '/bin/llvm-config', config) - elif option.name == 'llvm-config': - set('target.{}.llvm-config'.format(build_triple), value, config) - elif option.name == 'llvm-filecheck': - set('target.{}.llvm-filecheck'.format(build_triple), value, config) - elif option.name == 'tools': - set('build.tools', value.split(','), config) - elif option.name == 'bootstrap-cache-path': - set('build.bootstrap-cache-path', value, config) - elif option.name == 'codegen-backends': - set('rust.codegen-backends', value.split(','), config) - elif option.name == 'host': - set('build.host', value.split(','), config) - elif option.name == 'target': - set('build.target', value.split(','), config) - elif option.name == 'full-tools': - set('rust.codegen-backends', ['llvm'], config) - set('rust.lld', True, config) - set('rust.llvm-tools', True, config) - set('rust.llvm-bitcode-linker', True, config) - set('build.extended', True, config) - elif option.name in ['option-checking', 'verbose-configure']: + elif option.name == "local-rust-root": + set("build.rustc", value + "/bin/rustc", config) + set("build.cargo", value + "/bin/cargo", config) + elif option.name == "llvm-root": + set( + "target.{}.llvm-config".format(build_triple), + value + "/bin/llvm-config", + config, + ) + elif option.name == "llvm-config": + set("target.{}.llvm-config".format(build_triple), value, config) + elif option.name == "llvm-filecheck": + set("target.{}.llvm-filecheck".format(build_triple), value, config) + elif option.name == "tools": + set("build.tools", value.split(","), config) + elif option.name == "bootstrap-cache-path": + set("build.bootstrap-cache-path", value, config) + elif option.name == "codegen-backends": + set("rust.codegen-backends", value.split(","), config) + elif option.name == "host": + set("build.host", value.split(","), config) + elif option.name == "target": + set("build.target", value.split(","), config) + elif option.name == "full-tools": + set("rust.codegen-backends", ["llvm"], config) + set("rust.lld", True, config) + set("rust.llvm-tools", True, config) + set("rust.llvm-bitcode-linker", True, config) + set("build.extended", True, config) + elif option.name in ["option-checking", "verbose-configure"]: # this was handled above pass - elif option.name == 'dist-compression-formats': - set('dist.compression-formats', value.split(','), config) + elif option.name == "dist-compression-formats": + set("dist.compression-formats", value.split(","), config) else: raise RuntimeError("unhandled option {}".format(option.name)) + # "Parse" the `config.example.toml` file into the various sections, and we'll # use this as a template of a `config.toml` to write out which preserves # all the various comments and whatnot. @@ -406,20 +567,22 @@ def parse_example_config(known_args, config): targets = {} top_level_keys = [] - with open(rust_dir + '/config.example.toml') as example_config: + with open(rust_dir + "/config.example.toml") as example_config: example_lines = example_config.read().split("\n") for line in example_lines: if cur_section is None: - if line.count('=') == 1: - top_level_key = line.split('=')[0] - top_level_key = top_level_key.strip(' #') + if line.count("=") == 1: + top_level_key = line.split("=")[0] + top_level_key = top_level_key.strip(" #") top_level_keys.append(top_level_key) - if line.startswith('['): + if line.startswith("["): cur_section = line[1:-1] - if cur_section.startswith('target'): - cur_section = 'target' - elif '.' in cur_section: - raise RuntimeError("don't know how to deal with section: {}".format(cur_section)) + if cur_section.startswith("target"): + cur_section = "target" + elif "." in cur_section: + raise RuntimeError( + "don't know how to deal with section: {}".format(cur_section) + ) sections[cur_section] = [line] section_order.append(cur_section) else: @@ -428,22 +591,25 @@ def parse_example_config(known_args, config): # Fill out the `targets` array by giving all configured targets a copy of the # `target` section we just loaded from the example config configured_targets = [build(known_args)] - if 'build' in config: - if 'host' in config['build']: - configured_targets += config['build']['host'] - if 'target' in config['build']: - configured_targets += config['build']['target'] - if 'target' in config: - for target in config['target']: + if "build" in config: + if "host" in config["build"]: + configured_targets += config["build"]["host"] + if "target" in config["build"]: + configured_targets += config["build"]["target"] + if "target" in config: + for target in config["target"]: configured_targets.append(target) for target in configured_targets: - targets[target] = sections['target'][:] + targets[target] = sections["target"][:] # For `.` to be valid TOML, it needs to be quoted. But `bootstrap.py` doesn't use a proper TOML parser and fails to parse the target. # Avoid using quotes unless it's necessary. - targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target) + targets[target][0] = targets[target][0].replace( + "x86_64-unknown-linux-gnu", + "'{}'".format(target) if "." in target else target, + ) - if 'profile' not in config: - set('profile', 'dist', config) + if "profile" not in config: + set("profile", "dist", config) configure_file(sections, top_level_keys, targets, config) return section_order, sections, targets @@ -467,7 +633,7 @@ def to_toml(value): else: return "false" elif isinstance(value, list): - return '[' + ', '.join(map(to_toml, value)) + ']' + return "[" + ", ".join(map(to_toml, value)) + "]" elif isinstance(value, str): # Don't put quotes around numeric values if is_number(value): @@ -475,9 +641,18 @@ def to_toml(value): else: return "'" + value + "'" elif isinstance(value, dict): - return "{" + ", ".join(map(lambda a: "{} = {}".format(to_toml(a[0]), to_toml(a[1])), value.items())) + "}" + return ( + "{" + + ", ".join( + map( + lambda a: "{} = {}".format(to_toml(a[0]), to_toml(a[1])), + value.items(), + ) + ) + + "}" + ) else: - raise RuntimeError('no toml') + raise RuntimeError("no toml") def configure_section(lines, config): @@ -485,7 +660,7 @@ def configure_section(lines, config): value = config[key] found = False for i, line in enumerate(lines): - if not line.startswith('#' + key + ' = '): + if not line.startswith("#" + key + " = "): continue found = True lines[i] = "{} = {}".format(key, to_toml(value)) @@ -501,7 +676,9 @@ def configure_section(lines, config): def configure_top_level_key(lines, top_level_key, value): for i, line in enumerate(lines): - if line.startswith('#' + top_level_key + ' = ') or line.startswith(top_level_key + ' = '): + if line.startswith("#" + top_level_key + " = ") or line.startswith( + top_level_key + " = " + ): lines[i] = "{} = {}".format(top_level_key, to_toml(value)) return @@ -512,11 +689,13 @@ def configure_top_level_key(lines, top_level_key, value): def configure_file(sections, top_level_keys, targets, config): for section_key, section_config in config.items(): if section_key not in sections and section_key not in top_level_keys: - raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key)) + raise RuntimeError( + "config key {} not in sections or top_level_keys".format(section_key) + ) if section_key in top_level_keys: configure_top_level_key(sections[None], section_key, section_config) - elif section_key == 'target': + elif section_key == "target": for target in section_config: configure_section(targets[target], section_config[target]) else: @@ -536,18 +715,19 @@ def write_uncommented(target, f): block = [] is_comment = True continue - is_comment = is_comment and line.startswith('#') + is_comment = is_comment and line.startswith("#") return f def write_config_toml(writer, section_order, targets, sections): for section in section_order: - if section == 'target': + if section == "target": for target in targets: writer = write_uncommented(targets[target], writer) else: writer = write_uncommented(sections[section], writer) + def quit_if_file_exists(file): if os.path.isfile(file): msg = "Existing '{}' detected. Exiting".format(file) @@ -559,9 +739,10 @@ def quit_if_file_exists(file): err(msg) + if __name__ == "__main__": # If 'config.toml' already exists, exit the script at this point - quit_if_file_exists('config.toml') + quit_if_file_exists("config.toml") if "GITHUB_ACTIONS" in os.environ: print("::group::Configure the build") @@ -575,13 +756,13 @@ def quit_if_file_exists(file): # order that we read it in. p("") p("writing `config.toml` in current directory") - with bootstrap.output('config.toml') as f: + with bootstrap.output("config.toml") as f: write_config_toml(f, section_order, targets, sections) - with bootstrap.output('Makefile') as f: - contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in') + with bootstrap.output("Makefile") as f: + contents = os.path.join(rust_dir, "src", "bootstrap", "mk", "Makefile.in") contents = open(contents).read() - contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/') + contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + "/") contents = contents.replace("$(CFG_PYTHON)", sys.executable) f.write(contents) diff --git a/src/ci/cpu-usage-over-time.py b/src/ci/cpu-usage-over-time.py index adfd895ead04e..3d9dc86734fcc 100755 --- a/src/ci/cpu-usage-over-time.py +++ b/src/ci/cpu-usage-over-time.py @@ -40,12 +40,13 @@ # Python 3.3 changed the value of `sys.platform` on Linux from "linux2" to just # "linux". We check here with `.startswith` to keep compatibility with older # Python versions (especially Python 2.7). -if sys.platform.startswith('linux'): +if sys.platform.startswith("linux"): + class State: def __init__(self): - with open('/proc/stat', 'r') as file: + with open("/proc/stat", "r") as file: data = file.readline().split() - if data[0] != 'cpu': + if data[0] != "cpu": raise Exception('did not start with "cpu"') self.user = int(data[1]) self.nice = int(data[2]) @@ -69,10 +70,21 @@ def idle_since(self, prev): steal = self.steal - prev.steal guest = self.guest - prev.guest guest_nice = self.guest_nice - prev.guest_nice - total = user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice + total = ( + user + + nice + + system + + idle + + iowait + + irq + + softirq + + steal + + guest + + guest_nice + ) return float(idle) / float(total) * 100 -elif sys.platform == 'win32': +elif sys.platform == "win32": from ctypes.wintypes import DWORD from ctypes import Structure, windll, WinError, GetLastError, byref @@ -104,9 +116,10 @@ def idle_since(self, prev): kernel = self.kernel - prev.kernel return float(idle) / float(user + kernel) * 100 -elif sys.platform == 'darwin': +elif sys.platform == "darwin": from ctypes import * - libc = cdll.LoadLibrary('/usr/lib/libc.dylib') + + libc = cdll.LoadLibrary("/usr/lib/libc.dylib") class host_cpu_load_info_data_t(Structure): _fields_ = [("cpu_ticks", c_uint * 4)] @@ -116,7 +129,7 @@ class host_cpu_load_info_data_t(Structure): c_uint, c_int, POINTER(host_cpu_load_info_data_t), - POINTER(c_int) + POINTER(c_int), ] host_statistics.restype = c_int @@ -124,13 +137,14 @@ class host_cpu_load_info_data_t(Structure): CPU_STATE_SYSTEM = 1 CPU_STATE_IDLE = 2 CPU_STATE_NICE = 3 + class State: def __init__(self): stats = host_cpu_load_info_data_t() - count = c_int(4) # HOST_CPU_LOAD_INFO_COUNT + count = c_int(4) # HOST_CPU_LOAD_INFO_COUNT err = libc.host_statistics( libc.mach_host_self(), - c_int(3), # HOST_CPU_LOAD_INFO + c_int(3), # HOST_CPU_LOAD_INFO byref(stats), byref(count), ) @@ -148,7 +162,7 @@ def idle_since(self, prev): return float(idle) / float(user + system + idle + nice) * 100.0 else: - print('unknown platform', sys.platform) + print("unknown platform", sys.platform) sys.exit(1) cur_state = State() diff --git a/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile b/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile index e2b66c2cff1d8..e273672060732 100644 --- a/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile +++ b/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile @@ -27,5 +27,5 @@ RUN echo "[rust]" > /config/nopt-std-config.toml RUN echo "optimize = false" >> /config/nopt-std-config.toml ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests -ENV SCRIPT python3 ../x.py test --stage 0 --config /config/nopt-std-config.toml library/std \ - && python3 ../x.py --stage 2 test +ARG SCRIPT_ARG +ENV SCRIPT=${SCRIPT_ARG} diff --git a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile index 61811c41904c4..dec25461bb4e8 100644 --- a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile +++ b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile @@ -24,10 +24,5 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu -# Skip some tests that are unlikely to be platform specific, to speed up -# this slow job. -ENV SCRIPT python3 ../x.py --stage 2 test \ - --skip src/bootstrap \ - --skip tests/rustdoc-js \ - --skip src/tools/error_index_generator \ - --skip src/tools/linkchecker +ARG SCRIPT_ARG +ENV SCRIPT=${SCRIPT_ARG} diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py index 3577643ca550a..4f877389fbcfe 100755 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py +++ b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py @@ -8,78 +8,79 @@ from pathlib import Path -TARGET_AARCH64 = 'aarch64-unknown-uefi' -TARGET_I686 = 'i686-unknown-uefi' -TARGET_X86_64 = 'x86_64-unknown-uefi' +TARGET_AARCH64 = "aarch64-unknown-uefi" +TARGET_I686 = "i686-unknown-uefi" +TARGET_X86_64 = "x86_64-unknown-uefi" + def run(*cmd, capture=False, check=True, env=None, timeout=None): """Print and run a command, optionally capturing the output.""" cmd = [str(p) for p in cmd] - print(' '.join(cmd)) - return subprocess.run(cmd, - capture_output=capture, - check=check, - env=env, - text=True, - timeout=timeout) + print(" ".join(cmd)) + return subprocess.run( + cmd, capture_output=capture, check=check, env=env, text=True, timeout=timeout + ) + def build_and_run(tmp_dir, target): if target == TARGET_AARCH64: - boot_file_name = 'bootaa64.efi' - ovmf_dir = Path('/usr/share/AAVMF') - ovmf_code = 'AAVMF_CODE.fd' - ovmf_vars = 'AAVMF_VARS.fd' - qemu = 'qemu-system-aarch64' - machine = 'virt' - cpu = 'cortex-a72' + boot_file_name = "bootaa64.efi" + ovmf_dir = Path("/usr/share/AAVMF") + ovmf_code = "AAVMF_CODE.fd" + ovmf_vars = "AAVMF_VARS.fd" + qemu = "qemu-system-aarch64" + machine = "virt" + cpu = "cortex-a72" elif target == TARGET_I686: - boot_file_name = 'bootia32.efi' - ovmf_dir = Path('/usr/share/OVMF') - ovmf_code = 'OVMF32_CODE_4M.secboot.fd' - ovmf_vars = 'OVMF32_VARS_4M.fd' + boot_file_name = "bootia32.efi" + ovmf_dir = Path("/usr/share/OVMF") + ovmf_code = "OVMF32_CODE_4M.secboot.fd" + ovmf_vars = "OVMF32_VARS_4M.fd" # The i686 target intentionally uses 64-bit qemu; the important # difference is that the OVMF code provides a 32-bit environment. - qemu = 'qemu-system-x86_64' - machine = 'q35' - cpu = 'qemu64' + qemu = "qemu-system-x86_64" + machine = "q35" + cpu = "qemu64" elif target == TARGET_X86_64: - boot_file_name = 'bootx64.efi' - ovmf_dir = Path('/usr/share/OVMF') - ovmf_code = 'OVMF_CODE.fd' - ovmf_vars = 'OVMF_VARS.fd' - qemu = 'qemu-system-x86_64' - machine = 'q35' - cpu = 'qemu64' + boot_file_name = "bootx64.efi" + ovmf_dir = Path("/usr/share/OVMF") + ovmf_code = "OVMF_CODE.fd" + ovmf_vars = "OVMF_VARS.fd" + qemu = "qemu-system-x86_64" + machine = "q35" + cpu = "qemu64" else: - raise KeyError('invalid target') + raise KeyError("invalid target") - host_artifacts = Path('/checkout/obj/build/x86_64-unknown-linux-gnu') - stage0 = host_artifacts / 'stage0/bin' - stage2 = host_artifacts / 'stage2/bin' + host_artifacts = Path("/checkout/obj/build/x86_64-unknown-linux-gnu") + stage0 = host_artifacts / "stage0/bin" + stage2 = host_artifacts / "stage2/bin" env = dict(os.environ) - env['PATH'] = '{}:{}:{}'.format(stage2, stage0, env['PATH']) + env["PATH"] = "{}:{}:{}".format(stage2, stage0, env["PATH"]) # Copy the test create into `tmp_dir`. - test_crate = Path(tmp_dir) / 'uefi_qemu_test' - shutil.copytree('/uefi_qemu_test', test_crate) + test_crate = Path(tmp_dir) / "uefi_qemu_test" + shutil.copytree("/uefi_qemu_test", test_crate) # Build the UEFI executable. - run('cargo', - 'build', - '--manifest-path', - test_crate / 'Cargo.toml', - '--target', + run( + "cargo", + "build", + "--manifest-path", + test_crate / "Cargo.toml", + "--target", target, - env=env) + env=env, + ) # Create a mock EFI System Partition in a subdirectory. - esp = test_crate / 'esp' - boot = esp / 'efi/boot' + esp = test_crate / "esp" + boot = esp / "efi/boot" os.makedirs(boot, exist_ok=True) # Copy the executable into the ESP. - src_exe_path = test_crate / 'target' / target / 'debug/uefi_qemu_test.efi' + src_exe_path = test_crate / "target" / target / "debug/uefi_qemu_test.efi" shutil.copy(src_exe_path, boot / boot_file_name) print(src_exe_path, boot / boot_file_name) @@ -89,37 +90,39 @@ def build_and_run(tmp_dir, target): # Make a writable copy of the vars file. aarch64 doesn't boot # correctly with read-only vars. - ovmf_rw_vars = Path(tmp_dir) / 'vars.fd' + ovmf_rw_vars = Path(tmp_dir) / "vars.fd" shutil.copy(ovmf_vars, ovmf_rw_vars) # Run the executable in QEMU and capture the output. - output = run(qemu, - '-machine', - machine, - '-cpu', - cpu, - '-display', - 'none', - '-serial', - 'stdio', - '-drive', - f'if=pflash,format=raw,readonly=on,file={ovmf_code}', - '-drive', - f'if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}', - '-drive', - f'format=raw,file=fat:rw:{esp}', - capture=True, - check=True, - # Set a timeout to kill the VM in case something goes wrong. - timeout=60).stdout - - if 'Hello World!' in output: - print('VM produced expected output') + output = run( + qemu, + "-machine", + machine, + "-cpu", + cpu, + "-display", + "none", + "-serial", + "stdio", + "-drive", + f"if=pflash,format=raw,readonly=on,file={ovmf_code}", + "-drive", + f"if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}", + "-drive", + f"format=raw,file=fat:rw:{esp}", + capture=True, + check=True, + # Set a timeout to kill the VM in case something goes wrong. + timeout=60, + ).stdout + + if "Hello World!" in output: + print("VM produced expected output") else: - print('unexpected VM output:') - print('---start---') + print("unexpected VM output:") + print("---start---") print(output) - print('---end---') + print("---end---") sys.exit(1) diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version index 6b2d58c8ef350..7211b157c6945 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version +++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version @@ -1 +1 @@ -0.18.1 \ No newline at end of file +0.18.2 \ No newline at end of file diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index d554186df4cfe..a0adf60b6b2c9 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -105,6 +105,23 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then # It seems that it cannot be the same as $IMAGE_TAG, otherwise it overwrites the cache CACHE_IMAGE_TAG=${REGISTRY}/${REGISTRY_USERNAME}/rust-ci-cache:${cksum} + # Docker build arguments. + build_args=( + "build" + "--rm" + "-t" "rust-ci" + "-f" "$dockerfile" + "$context" + ) + + # If the environment variable DOCKER_SCRIPT is defined, + # set the build argument SCRIPT_ARG to DOCKER_SCRIPT. + # In this way, we run the script defined in CI, + # instead of the one defined in the Dockerfile. + if [ -n "${DOCKER_SCRIPT+x}" ]; then + build_args+=("--build-arg" "SCRIPT_ARG=${DOCKER_SCRIPT}") + fi + # On non-CI jobs, we try to download a pre-built image from the rust-lang-ci # ghcr.io registry. If it is not possible, we fall back to building the image # locally. @@ -115,7 +132,7 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then docker tag "${IMAGE_TAG}" rust-ci else echo "Building local Docker image" - retry docker build --rm -t rust-ci -f "$dockerfile" "$context" + retry docker "${build_args[@]}" fi # On PR CI jobs, we don't have permissions to write to the registry cache, # but we can still read from it. @@ -127,13 +144,9 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then # Build the image using registry caching backend retry docker \ buildx \ - build \ - --rm \ - -t rust-ci \ - -f "$dockerfile" \ + "${build_args[@]}" \ --cache-from type=registry,ref=${CACHE_IMAGE_TAG} \ - --output=type=docker \ - "$context" + --output=type=docker # On auto/try builds, we can also write to the cache. else # Log into the Docker registry, so that we can read/write cache and the final image @@ -147,14 +160,10 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then # Build the image using registry caching backend retry docker \ buildx \ - build \ - --rm \ - -t rust-ci \ - -f "$dockerfile" \ + "${build_args[@]}" \ --cache-from type=registry,ref=${CACHE_IMAGE_TAG} \ --cache-to type=registry,ref=${CACHE_IMAGE_TAG},compression=zstd \ - --output=type=docker \ - "$context" + --output=type=docker # Print images for debugging purposes docker images diff --git a/src/ci/docker/scripts/android-sdk-manager.py b/src/ci/docker/scripts/android-sdk-manager.py index 66cba58427bad..6356f15a8865c 100755 --- a/src/ci/docker/scripts/android-sdk-manager.py +++ b/src/ci/docker/scripts/android-sdk-manager.py @@ -35,6 +35,7 @@ MIRROR_BUCKET_REGION = "us-west-1" MIRROR_BASE_DIR = "rustc/android/" + class Package: def __init__(self, path, url, sha1, deps=None): if deps is None: @@ -53,18 +54,25 @@ def download(self, base_url): sha1 = hashlib.sha1(f.read()).hexdigest() if sha1 != self.sha1: raise RuntimeError( - "hash mismatch for package " + self.path + ": " + - sha1 + " vs " + self.sha1 + " (known good)" + "hash mismatch for package " + + self.path + + ": " + + sha1 + + " vs " + + self.sha1 + + " (known good)" ) return file def __repr__(self): - return " Optional[WorkflowRunType]: try_build = ctx.ref in ( "refs/heads/try", "refs/heads/try-perf", - "refs/heads/automation/bors/try" + "refs/heads/automation/bors/try", ) # Unrolled branch from a rollup for testing perf @@ -135,11 +136,15 @@ def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[ continue jobs.append(job[0]) if unknown_jobs: - raise Exception(f"Custom job(s) `{unknown_jobs}` not found in auto jobs") + raise Exception( + f"Custom job(s) `{unknown_jobs}` not found in auto jobs" + ) return add_base_env(name_jobs(jobs, "try"), job_data["envs"]["try"]) elif isinstance(run_type, AutoRunType): - return add_base_env(name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"]) + return add_base_env( + name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"] + ) return [] @@ -161,7 +166,7 @@ def get_github_ctx() -> GitHubCtx: event_name=event_name, ref=os.environ["GITHUB_REF"], repository=os.environ["GITHUB_REPOSITORY"], - commit_message=commit_message + commit_message=commit_message, ) diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index 2ea37c168dd3c..288b133f0da1e 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -58,6 +58,22 @@ envs: NO_DEBUG_ASSERTIONS: 1 NO_OVERFLOW_CHECKS: 1 + # Different set of tests to run tests in parallel in multiple jobs. + stage_2_test_set1: &stage_2_test_set1 + DOCKER_SCRIPT: >- + python3 ../x.py --stage 2 test + --skip compiler + --skip src + + stage_2_test_set2: &stage_2_test_set2 + DOCKER_SCRIPT: >- + python3 ../x.py --stage 2 test + --skip tests + --skip coverage-map + --skip coverage-run + --skip library + --skip tidyselftest + production: &production DEPLOY_BUCKET: rust-lang-ci2 @@ -212,11 +228,42 @@ auto: - image: dist-x86_64-netbsd <<: *job-linux-4c - - image: i686-gnu - <<: *job-linux-8c + # The i686-gnu job is split into multiple jobs to run tests in parallel. + # i686-gnu-1 skips tests that run in i686-gnu-2. + - image: i686-gnu-1 + env: + IMAGE: i686-gnu + <<: *stage_2_test_set1 + <<: *job-linux-4c - - image: i686-gnu-nopt - <<: *job-linux-8c + # Skip tests that run in i686-gnu-1 + - image: i686-gnu-2 + env: + IMAGE: i686-gnu + <<: *stage_2_test_set2 + <<: *job-linux-4c + + # The i686-gnu-nopt job is split into multiple jobs to run tests in parallel. + # i686-gnu-nopt-1 skips tests that run in i686-gnu-nopt-2 + - image: i686-gnu-nopt-1 + env: + IMAGE: i686-gnu-nopt + <<: *stage_2_test_set1 + <<: *job-linux-4c + + # Skip tests that run in i686-gnu-nopt-1 + - image: i686-gnu-nopt-2 + env: + IMAGE: i686-gnu-nopt + DOCKER_SCRIPT: >- + python3 ../x.py test --stage 0 --config /config/nopt-std-config.toml library/std && + python3 ../x.py --stage 2 test + --skip tests + --skip coverage-map + --skip coverage-run + --skip library + --skip tidyselftest + <<: *job-linux-4c - image: mingw-check <<: *job-linux-4c diff --git a/src/ci/scripts/upload-build-metrics.py b/src/ci/scripts/upload-build-metrics.py index a95e0949d700a..23061884a39e6 100644 --- a/src/ci/scripts/upload-build-metrics.py +++ b/src/ci/scripts/upload-build-metrics.py @@ -19,6 +19,7 @@ `path-to-CPU-usage-CSV` is a path to a CSV generated by the `src/ci/cpu-usage-over-time.py` script. """ + import argparse import csv import os @@ -31,7 +32,7 @@ def load_cpu_usage(path: Path) -> List[float]: usage = [] with open(path) as f: - reader = csv.reader(f, delimiter=',') + reader = csv.reader(f, delimiter=",") for row in reader: # The log might contain incomplete rows or some Python exception if len(row) == 2: @@ -50,25 +51,21 @@ def upload_datadog_measure(name: str, value: float): print(f"Metric {name}: {value:.4f}") datadog_cmd = "datadog-ci" - if os.getenv("GITHUB_ACTIONS") is not None and sys.platform.lower().startswith("win"): + if os.getenv("GITHUB_ACTIONS") is not None and sys.platform.lower().startswith( + "win" + ): # Due to weird interaction of MSYS2 and Python, we need to use an absolute path, # and also specify the ".cmd" at the end. See https://github.com/rust-lang/rust/pull/125771. datadog_cmd = "C:\\npm\\prefix\\datadog-ci.cmd" - subprocess.run([ - datadog_cmd, - "measure", - "--level", "job", - "--measures", f"{name}:{value}" - ], - check=False + subprocess.run( + [datadog_cmd, "measure", "--level", "job", "--measures", f"{name}:{value}"], + check=False, ) if __name__ == "__main__": - parser = argparse.ArgumentParser( - prog="DataDog metric uploader" - ) + parser = argparse.ArgumentParser(prog="DataDog metric uploader") parser.add_argument("cpu-usage-history-csv") args = parser.parse_args() diff --git a/src/etc/dec2flt_table.py b/src/etc/dec2flt_table.py index 9264a8439bb71..791186de9c190 100755 --- a/src/etc/dec2flt_table.py +++ b/src/etc/dec2flt_table.py @@ -13,6 +13,7 @@ Adapted from Daniel Lemire's fast_float ``table_generation.py``, available here: . """ + from __future__ import print_function from math import ceil, floor, log from collections import deque @@ -34,6 +35,7 @@ // the final binary. """ + def main(): min_exp = minimum_exponent(10) max_exp = maximum_exponent(10) @@ -41,10 +43,10 @@ def main(): print(HEADER.strip()) print() - print('pub const SMALLEST_POWER_OF_FIVE: i32 = {};'.format(min_exp)) - print('pub const LARGEST_POWER_OF_FIVE: i32 = {};'.format(max_exp)) - print('pub const N_POWERS_OF_FIVE: usize = ', end='') - print('(LARGEST_POWER_OF_FIVE - SMALLEST_POWER_OF_FIVE + 1) as usize;') + print("pub const SMALLEST_POWER_OF_FIVE: i32 = {};".format(min_exp)) + print("pub const LARGEST_POWER_OF_FIVE: i32 = {};".format(max_exp)) + print("pub const N_POWERS_OF_FIVE: usize = ", end="") + print("(LARGEST_POWER_OF_FIVE - SMALLEST_POWER_OF_FIVE + 1) as usize;") print() print_proper_powers(min_exp, max_exp, bias) @@ -54,7 +56,7 @@ def minimum_exponent(base): def maximum_exponent(base): - return floor(log(1.7976931348623157e+308, base)) + return floor(log(1.7976931348623157e308, base)) def print_proper_powers(min_exp, max_exp, bias): @@ -64,46 +66,46 @@ def print_proper_powers(min_exp, max_exp, bias): # 2^(2b)/(5^−q) with b=64 + int(math.ceil(log2(5^−q))) powers = [] for q in range(min_exp, 0): - power5 = 5 ** -q + power5 = 5**-q z = 0 while (1 << z) < power5: z += 1 if q >= -27: b = z + 127 - c = 2 ** b // power5 + 1 + c = 2**b // power5 + 1 powers.append((c, q)) else: b = 2 * z + 2 * 64 - c = 2 ** b // power5 + 1 + c = 2**b // power5 + 1 # truncate - while c >= (1<<128): + while c >= (1 << 128): c //= 2 powers.append((c, q)) # Add positive exponents for q in range(0, max_exp + 1): - power5 = 5 ** q + power5 = 5**q # move the most significant bit in position - while power5 < (1<<127): + while power5 < (1 << 127): power5 *= 2 # *truncate* - while power5 >= (1<<128): + while power5 >= (1 << 128): power5 //= 2 powers.append((power5, q)) # Print the powers. print(STATIC_WARNING.strip()) - print('#[rustfmt::skip]') - typ = '[(u64, u64); N_POWERS_OF_FIVE]' - print('pub static POWER_OF_FIVE_128: {} = ['.format(typ)) + print("#[rustfmt::skip]") + typ = "[(u64, u64); N_POWERS_OF_FIVE]" + print("pub static POWER_OF_FIVE_128: {} = [".format(typ)) for c, exp in powers: - hi = '0x{:x}'.format(c // (1 << 64)) - lo = '0x{:x}'.format(c % (1 << 64)) - value = ' ({}, {}), '.format(hi, lo) - comment = '// {}^{}'.format(5, exp) - print(value.ljust(46, ' ') + comment) - print('];') + hi = "0x{:x}".format(c // (1 << 64)) + lo = "0x{:x}".format(c % (1 << 64)) + value = " ({}, {}), ".format(hi, lo) + comment = "// {}^{}".format(5, exp) + print(value.ljust(46, " ") + comment) + print("];") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/etc/gdb_load_rust_pretty_printers.py b/src/etc/gdb_load_rust_pretty_printers.py index e05039ce47447..73d1e79f9617d 100644 --- a/src/etc/gdb_load_rust_pretty_printers.py +++ b/src/etc/gdb_load_rust_pretty_printers.py @@ -1,6 +1,7 @@ # Add this folder to the python sys path; GDB Python-interpreter will now find modules in this path import sys from os import path + self_dir = path.dirname(path.realpath(__file__)) sys.path.append(self_dir) diff --git a/src/etc/gdb_lookup.py b/src/etc/gdb_lookup.py index f3ac9c1097806..d368f7ed1ec5c 100644 --- a/src/etc/gdb_lookup.py +++ b/src/etc/gdb_lookup.py @@ -6,8 +6,11 @@ from rust_types import * -_gdb_version_matched = re.search('([0-9]+)\\.([0-9]+)', gdb.VERSION) -gdb_version = [int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else [] +_gdb_version_matched = re.search("([0-9]+)\\.([0-9]+)", gdb.VERSION) +gdb_version = ( + [int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else [] +) + def register_printers(objfile): objfile.pretty_printers.append(printer) diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index e8f9dee07d3e9..34bb5c39909e4 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -21,7 +21,7 @@ def unwrap_unique_or_non_null(unique_or_nonnull): # GDB 14 has a tag class that indicates that extension methods are ok # to call. Use of this tag only requires that printers hide local # attributes and methods by prefixing them with "_". -if hasattr(gdb, 'ValuePrinter'): +if hasattr(gdb, "ValuePrinter"): printer_base = gdb.ValuePrinter else: printer_base = object @@ -98,7 +98,7 @@ def display_hint(): def _enumerate_array_elements(element_ptrs): - for (i, element_ptr) in enumerate(element_ptrs): + for i, element_ptr in enumerate(element_ptrs): key = "[{}]".format(i) element = element_ptr.dereference() @@ -173,7 +173,8 @@ def to_string(self): def children(self): return _enumerate_array_elements( - (self._data_ptr + ((self._head + index) % self._cap)) for index in xrange(self._size) + (self._data_ptr + ((self._head + index) % self._cap)) + for index in xrange(self._size) ) @staticmethod @@ -270,7 +271,9 @@ def children_of_btree_map(map): # Yields each key/value pair in the node and in any child nodes. def children_of_node(node_ptr, height): def cast_to_internal(node): - internal_type_name = node.type.target().name.replace("LeafNode", "InternalNode", 1) + internal_type_name = node.type.target().name.replace( + "LeafNode", "InternalNode", 1 + ) internal_type = gdb.lookup_type(internal_type_name) return node.cast(internal_type.pointer()) @@ -293,8 +296,16 @@ def cast_to_internal(node): # Avoid "Cannot perform pointer math on incomplete type" on zero-sized arrays. key_type_size = keys.type.sizeof val_type_size = vals.type.sizeof - key = keys[i]["value"]["value"] if key_type_size > 0 else gdb.parse_and_eval("()") - val = vals[i]["value"]["value"] if val_type_size > 0 else gdb.parse_and_eval("()") + key = ( + keys[i]["value"]["value"] + if key_type_size > 0 + else gdb.parse_and_eval("()") + ) + val = ( + vals[i]["value"]["value"] + if val_type_size > 0 + else gdb.parse_and_eval("()") + ) yield key, val if map["length"] > 0: @@ -352,7 +363,7 @@ def __init__(self, valobj, show_values=True): self._hashes = self._table["hashes"] self._hash_uint_type = self._hashes.type self._hash_uint_size = self._hashes.type.sizeof - self._modulo = 2 ** self._hash_uint_size + self._modulo = 2**self._hash_uint_size self._data_ptr = self._hashes[ZERO_FIELD]["pointer"] self._capacity_mask = int(self._table["capacity_mask"]) @@ -382,8 +393,14 @@ def children(self): hashes = self._hash_uint_size * self._capacity align = self._pair_type_size - len_rounded_up = (((((hashes + align) % self._modulo - 1) % self._modulo) & ~( - (align - 1) % self._modulo)) % self._modulo - hashes) % self._modulo + len_rounded_up = ( + ( + (((hashes + align) % self._modulo - 1) % self._modulo) + & ~((align - 1) % self._modulo) + ) + % self._modulo + - hashes + ) % self._modulo pairs_offset = hashes + len_rounded_up pairs_start = gdb.Value(start + pairs_offset).cast(self._pair_type.pointer()) diff --git a/src/etc/generate-deriving-span-tests.py b/src/etc/generate-deriving-span-tests.py index d61693460bc1f..2e810d7df97ba 100755 --- a/src/etc/generate-deriving-span-tests.py +++ b/src/etc/generate-deriving-span-tests.py @@ -12,7 +12,8 @@ import stat TEST_DIR = os.path.abspath( - os.path.join(os.path.dirname(__file__), '../test/ui/derives/')) + os.path.join(os.path.dirname(__file__), "../test/ui/derives/") +) TEMPLATE = """\ // This file was auto-generated using 'src/etc/generate-deriving-span-tests.py' @@ -56,28 +57,33 @@ def create_test_case(type, trait, super_traits, error_count): - string = [ENUM_STRING, ENUM_STRUCT_VARIANT_STRING, STRUCT_STRING, STRUCT_TUPLE_STRING][type] - all_traits = ','.join([trait] + super_traits) - super_traits = ','.join(super_traits) - error_deriving = '#[derive(%s)]' % super_traits if super_traits else '' - - errors = '\n'.join('//~%s ERROR' % ('^' * n) for n in range(error_count)) + string = [ + ENUM_STRING, + ENUM_STRUCT_VARIANT_STRING, + STRUCT_STRING, + STRUCT_TUPLE_STRING, + ][type] + all_traits = ",".join([trait] + super_traits) + super_traits = ",".join(super_traits) + error_deriving = "#[derive(%s)]" % super_traits if super_traits else "" + + errors = "\n".join("//~%s ERROR" % ("^" * n) for n in range(error_count)) code = string.format(traits=all_traits, errors=errors) return TEMPLATE.format(error_deriving=error_deriving, code=code) def write_file(name, string): - test_file = os.path.join(TEST_DIR, 'derives-span-%s.rs' % name) + test_file = os.path.join(TEST_DIR, "derives-span-%s.rs" % name) # set write permission if file exists, so it can be changed if os.path.exists(test_file): os.chmod(test_file, stat.S_IWUSR) - with open(test_file, 'w') as f: + with open(test_file, "w") as f: f.write(string) # mark file read-only - os.chmod(test_file, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH) + os.chmod(test_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) ENUM = 1 @@ -85,29 +91,31 @@ def write_file(name, string): ALL = STRUCT | ENUM traits = { - 'Default': (STRUCT, [], 1), - 'FromPrimitive': (0, [], 0), # only works for C-like enums - - 'Decodable': (0, [], 0), # FIXME: quoting gives horrible spans - 'Encodable': (0, [], 0), # FIXME: quoting gives horrible spans + "Default": (STRUCT, [], 1), + "FromPrimitive": (0, [], 0), # only works for C-like enums + "Decodable": (0, [], 0), # FIXME: quoting gives horrible spans + "Encodable": (0, [], 0), # FIXME: quoting gives horrible spans } -for (trait, supers, errs) in [('Clone', [], 1), - ('PartialEq', [], 2), - ('PartialOrd', ['PartialEq'], 1), - ('Eq', ['PartialEq'], 1), - ('Ord', ['Eq', 'PartialOrd', 'PartialEq'], 1), - ('Debug', [], 1), - ('Hash', [], 1)]: +for trait, supers, errs in [ + ("Clone", [], 1), + ("PartialEq", [], 2), + ("PartialOrd", ["PartialEq"], 1), + ("Eq", ["PartialEq"], 1), + ("Ord", ["Eq", "PartialOrd", "PartialEq"], 1), + ("Debug", [], 1), + ("Hash", [], 1), +]: traits[trait] = (ALL, supers, errs) -for (trait, (types, super_traits, error_count)) in traits.items(): +for trait, (types, super_traits, error_count) in traits.items(): + def mk(ty, t=trait, st=super_traits, ec=error_count): return create_test_case(ty, t, st, ec) if types & ENUM: - write_file(trait + '-enum', mk(ENUM_TUPLE)) - write_file(trait + '-enum-struct-variant', mk(ENUM_STRUCT)) + write_file(trait + "-enum", mk(ENUM_TUPLE)) + write_file(trait + "-enum-struct-variant", mk(ENUM_STRUCT)) if types & STRUCT: - write_file(trait + '-struct', mk(STRUCT_FIELDS)) - write_file(trait + '-tuple-struct', mk(STRUCT_TUPLE)) + write_file(trait + "-struct", mk(STRUCT_FIELDS)) + write_file(trait + "-tuple-struct", mk(STRUCT_TUPLE)) diff --git a/src/etc/generate-keyword-tests.py b/src/etc/generate-keyword-tests.py index 77c3d2758c6dc..28f932acd9d5f 100755 --- a/src/etc/generate-keyword-tests.py +++ b/src/etc/generate-keyword-tests.py @@ -22,18 +22,16 @@ } """ -test_dir = os.path.abspath( - os.path.join(os.path.dirname(__file__), '../test/ui/parser') -) +test_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../test/ui/parser")) for kw in sys.argv[1:]: - test_file = os.path.join(test_dir, 'keyword-%s-as-identifier.rs' % kw) + test_file = os.path.join(test_dir, "keyword-%s-as-identifier.rs" % kw) # set write permission if file exists, so it can be changed if os.path.exists(test_file): os.chmod(test_file, stat.S_IWUSR) - with open(test_file, 'wt') as f: + with open(test_file, "wt") as f: f.write(template % (kw, kw, kw)) # mark file read-only diff --git a/src/etc/htmldocck.py b/src/etc/htmldocck.py index 851b01a7458f2..d6b594aca71a2 100755 --- a/src/etc/htmldocck.py +++ b/src/etc/htmldocck.py @@ -127,6 +127,7 @@ import re import shlex from collections import namedtuple + try: from html.parser import HTMLParser except ImportError: @@ -142,12 +143,28 @@ from htmlentitydefs import name2codepoint # "void elements" (no closing tag) from the HTML Standard section 12.1.2 -VOID_ELEMENTS = {'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', - 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'} +VOID_ELEMENTS = { + "area", + "base", + "br", + "col", + "embed", + "hr", + "img", + "input", + "keygen", + "link", + "menuitem", + "meta", + "param", + "source", + "track", + "wbr", +} # Python 2 -> 3 compatibility try: - unichr # noqa: B018 FIXME: py2 + unichr # noqa: B018 FIXME: py2 except NameError: unichr = chr @@ -158,18 +175,20 @@ rust_test_path = None bless = None + class CustomHTMLParser(HTMLParser): """simplified HTML parser. this is possible because we are dealing with very regular HTML from rustdoc; we only have to deal with i) void elements and ii) empty attributes.""" + def __init__(self, target=None): HTMLParser.__init__(self) self.__builder = target or ET.TreeBuilder() def handle_starttag(self, tag, attrs): - attrs = {k: v or '' for k, v in attrs} + attrs = {k: v or "" for k, v in attrs} self.__builder.start(tag, attrs) if tag in VOID_ELEMENTS: self.__builder.end(tag) @@ -178,7 +197,7 @@ def handle_endtag(self, tag): self.__builder.end(tag) def handle_startendtag(self, tag, attrs): - attrs = {k: v or '' for k, v in attrs} + attrs = {k: v or "" for k, v in attrs} self.__builder.start(tag, attrs) self.__builder.end(tag) @@ -189,7 +208,7 @@ def handle_entityref(self, name): self.__builder.data(unichr(name2codepoint[name])) def handle_charref(self, name): - code = int(name[1:], 16) if name.startswith(('x', 'X')) else int(name, 10) + code = int(name[1:], 16) if name.startswith(("x", "X")) else int(name, 10) self.__builder.data(unichr(code)) def close(self): @@ -197,7 +216,7 @@ def close(self): return self.__builder.close() -Command = namedtuple('Command', 'negated cmd args lineno context') +Command = namedtuple("Command", "negated cmd args lineno context") class FailedCheck(Exception): @@ -216,17 +235,17 @@ def concat_multi_lines(f): concatenated.""" lastline = None # set to the last line when the last line has a backslash firstlineno = None - catenated = '' + catenated = "" for lineno, line in enumerate(f): - line = line.rstrip('\r\n') + line = line.rstrip("\r\n") # strip the common prefix from the current line if needed if lastline is not None: common_prefix = os.path.commonprefix([line, lastline]) - line = line[len(common_prefix):].lstrip() + line = line[len(common_prefix) :].lstrip() firstlineno = firstlineno or lineno - if line.endswith('\\'): + if line.endswith("\\"): if lastline is None: lastline = line[:-1] catenated += line[:-1] @@ -234,10 +253,10 @@ def concat_multi_lines(f): yield firstlineno, catenated + line lastline = None firstlineno = None - catenated = '' + catenated = "" if lastline is not None: - print_err(lineno, line, 'Trailing backslash at the end of the file') + print_err(lineno, line, "Trailing backslash at the end of the file") def get_known_directive_names(): @@ -253,12 +272,12 @@ def filter_line(line): "tools/compiletest/src/directive-list.rs", ), "r", - encoding="utf8" + encoding="utf8", ) as fd: content = fd.read() return [ - line.strip().replace('",', '').replace('"', '') - for line in content.split('\n') + line.strip().replace('",', "").replace('"', "") + for line in content.split("\n") if filter_line(line) ] @@ -269,35 +288,42 @@ def filter_line(line): # See . KNOWN_DIRECTIVE_NAMES = get_known_directive_names() -LINE_PATTERN = re.compile(r''' +LINE_PATTERN = re.compile( + r""" //@\s+ (?P!?)(?P[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*) (?P.*)$ -''', re.X | re.UNICODE) +""", + re.X | re.UNICODE, +) def get_commands(template): - with io.open(template, encoding='utf-8') as f: + with io.open(template, encoding="utf-8") as f: for lineno, line in concat_multi_lines(f): m = LINE_PATTERN.search(line) if not m: continue - cmd = m.group('cmd') - negated = (m.group('negated') == '!') + cmd = m.group("cmd") + negated = m.group("negated") == "!" if not negated and cmd in KNOWN_DIRECTIVE_NAMES: continue - args = m.group('args') + args = m.group("args") if args and not args[:1].isspace(): - print_err(lineno, line, 'Invalid template syntax') + print_err(lineno, line, "Invalid template syntax") continue try: args = shlex.split(args) except UnicodeEncodeError: - args = [arg.decode('utf-8') for arg in shlex.split(args.encode('utf-8'))] + args = [ + arg.decode("utf-8") for arg in shlex.split(args.encode("utf-8")) + ] except Exception as exc: raise Exception("line {}: {}".format(lineno + 1, exc)) from None - yield Command(negated=negated, cmd=cmd, args=args, lineno=lineno+1, context=line) + yield Command( + negated=negated, cmd=cmd, args=args, lineno=lineno + 1, context=line + ) def _flatten(node, acc): @@ -312,22 +338,24 @@ def _flatten(node, acc): def flatten(node): acc = [] _flatten(node, acc) - return ''.join(acc) + return "".join(acc) def make_xml(text): - xml = ET.XML('%s' % text) + xml = ET.XML("%s" % text) return xml def normalize_xpath(path): path = path.replace("{{channel}}", channel) - if path.startswith('//'): - return '.' + path # avoid warnings - elif path.startswith('.//'): + if path.startswith("//"): + return "." + path # avoid warnings + elif path.startswith(".//"): return path else: - raise InvalidCheck('Non-absolute XPath is not supported due to implementation issues') + raise InvalidCheck( + "Non-absolute XPath is not supported due to implementation issues" + ) class CachedFiles(object): @@ -338,12 +366,12 @@ def __init__(self, root): self.last_path = None def resolve_path(self, path): - if path != '-': + if path != "-": path = os.path.normpath(path) self.last_path = path return path elif self.last_path is None: - raise InvalidCheck('Tried to use the previous path in the first command') + raise InvalidCheck("Tried to use the previous path in the first command") else: return self.last_path @@ -356,10 +384,10 @@ def get_file(self, path): return self.files[path] abspath = self.get_absolute_path(path) - if not(os.path.exists(abspath) and os.path.isfile(abspath)): - raise FailedCheck('File does not exist {!r}'.format(path)) + if not (os.path.exists(abspath) and os.path.isfile(abspath)): + raise FailedCheck("File does not exist {!r}".format(path)) - with io.open(abspath, encoding='utf-8') as f: + with io.open(abspath, encoding="utf-8") as f: data = f.read() self.files[path] = data return data @@ -370,15 +398,15 @@ def get_tree(self, path): return self.trees[path] abspath = self.get_absolute_path(path) - if not(os.path.exists(abspath) and os.path.isfile(abspath)): - raise FailedCheck('File does not exist {!r}'.format(path)) + if not (os.path.exists(abspath) and os.path.isfile(abspath)): + raise FailedCheck("File does not exist {!r}".format(path)) - with io.open(abspath, encoding='utf-8') as f: + with io.open(abspath, encoding="utf-8") as f: try: tree = ET.fromstringlist(f.readlines(), CustomHTMLParser()) except Exception as e: - raise RuntimeError( # noqa: B904 FIXME: py2 - 'Cannot parse an HTML file {!r}: {}'.format(path, e) + raise RuntimeError( # noqa: B904 FIXME: py2 + "Cannot parse an HTML file {!r}: {}".format(path, e) ) self.trees[path] = tree return self.trees[path] @@ -386,8 +414,8 @@ def get_tree(self, path): def get_dir(self, path): path = self.resolve_path(path) abspath = self.get_absolute_path(path) - if not(os.path.exists(abspath) and os.path.isdir(abspath)): - raise FailedCheck('Directory does not exist {!r}'.format(path)) + if not (os.path.exists(abspath) and os.path.isdir(abspath)): + raise FailedCheck("Directory does not exist {!r}".format(path)) def check_string(data, pat, regexp): @@ -397,8 +425,8 @@ def check_string(data, pat, regexp): elif regexp: return re.search(pat, data, flags=re.UNICODE) is not None else: - data = ' '.join(data.split()) - pat = ' '.join(pat.split()) + data = " ".join(data.split()) + pat = " ".join(pat.split()) return pat in data @@ -444,19 +472,19 @@ def get_tree_count(tree, path): def check_snapshot(snapshot_name, actual_tree, normalize_to_text): - assert rust_test_path.endswith('.rs') - snapshot_path = '{}.{}.{}'.format(rust_test_path[:-3], snapshot_name, 'html') + assert rust_test_path.endswith(".rs") + snapshot_path = "{}.{}.{}".format(rust_test_path[:-3], snapshot_name, "html") try: - with open(snapshot_path, 'r') as snapshot_file: + with open(snapshot_path, "r") as snapshot_file: expected_str = snapshot_file.read().replace("{{channel}}", channel) except FileNotFoundError: if bless: expected_str = None else: - raise FailedCheck('No saved snapshot value') # noqa: B904 FIXME: py2 + raise FailedCheck("No saved snapshot value") # noqa: B904 FIXME: py2 if not normalize_to_text: - actual_str = ET.tostring(actual_tree).decode('utf-8') + actual_str = ET.tostring(actual_tree).decode("utf-8") else: actual_str = flatten(actual_tree) @@ -464,64 +492,66 @@ def check_snapshot(snapshot_name, actual_tree, normalize_to_text): # 1. Is --bless # 2. Are actual and expected tree different # 3. Are actual and expected text different - if not expected_str \ - or (not normalize_to_text and \ - not compare_tree(make_xml(actual_str), make_xml(expected_str), stderr)) \ - or (normalize_to_text and actual_str != expected_str): - + if ( + not expected_str + or ( + not normalize_to_text + and not compare_tree(make_xml(actual_str), make_xml(expected_str), stderr) + ) + or (normalize_to_text and actual_str != expected_str) + ): if bless: - with open(snapshot_path, 'w') as snapshot_file: + with open(snapshot_path, "w") as snapshot_file: actual_str = actual_str.replace(channel, "{{channel}}") snapshot_file.write(actual_str) else: - print('--- expected ---\n') + print("--- expected ---\n") print(expected_str) - print('\n\n--- actual ---\n') + print("\n\n--- actual ---\n") print(actual_str) print() - raise FailedCheck('Actual snapshot value is different than expected') + raise FailedCheck("Actual snapshot value is different than expected") # Adapted from https://github.com/formencode/formencode/blob/3a1ba9de2fdd494dd945510a4568a3afeddb0b2e/formencode/doctest_xml_compare.py#L72-L120 def compare_tree(x1, x2, reporter=None): if x1.tag != x2.tag: if reporter: - reporter('Tags do not match: %s and %s' % (x1.tag, x2.tag)) + reporter("Tags do not match: %s and %s" % (x1.tag, x2.tag)) return False for name, value in x1.attrib.items(): if x2.attrib.get(name) != value: if reporter: - reporter('Attributes do not match: %s=%r, %s=%r' - % (name, value, name, x2.attrib.get(name))) + reporter( + "Attributes do not match: %s=%r, %s=%r" + % (name, value, name, x2.attrib.get(name)) + ) return False for name in x2.attrib: if name not in x1.attrib: if reporter: - reporter('x2 has an attribute x1 is missing: %s' - % name) + reporter("x2 has an attribute x1 is missing: %s" % name) return False if not text_compare(x1.text, x2.text): if reporter: - reporter('text: %r != %r' % (x1.text, x2.text)) + reporter("text: %r != %r" % (x1.text, x2.text)) return False if not text_compare(x1.tail, x2.tail): if reporter: - reporter('tail: %r != %r' % (x1.tail, x2.tail)) + reporter("tail: %r != %r" % (x1.tail, x2.tail)) return False cl1 = list(x1) cl2 = list(x2) if len(cl1) != len(cl2): if reporter: - reporter('children length differs, %i != %i' - % (len(cl1), len(cl2))) + reporter("children length differs, %i != %i" % (len(cl1), len(cl2))) return False i = 0 for c1, c2 in zip(cl1, cl2): i += 1 if not compare_tree(c1, c2, reporter=reporter): if reporter: - reporter('children %i do not match: %s' - % (i, c1.tag)) + reporter("children %i do not match: %s" % (i, c1.tag)) return False return True @@ -529,14 +559,14 @@ def compare_tree(x1, x2, reporter=None): def text_compare(t1, t2): if not t1 and not t2: return True - if t1 == '*' or t2 == '*': + if t1 == "*" or t2 == "*": return True - return (t1 or '').strip() == (t2 or '').strip() + return (t1 or "").strip() == (t2 or "").strip() def stderr(*args): if sys.version_info.major < 3: - file = codecs.getwriter('utf-8')(sys.stderr) + file = codecs.getwriter("utf-8")(sys.stderr) else: file = sys.stderr @@ -556,21 +586,25 @@ def print_err(lineno, context, err, message=None): def get_nb_matching_elements(cache, c, regexp, stop_at_first): tree = cache.get_tree(c.args[0]) - pat, sep, attr = c.args[1].partition('/@') + pat, sep, attr = c.args[1].partition("/@") if sep: # attribute tree = cache.get_tree(c.args[0]) return check_tree_attr(tree, pat, attr, c.args[2], False) else: # normalized text pat = c.args[1] - if pat.endswith('/text()'): + if pat.endswith("/text()"): pat = pat[:-7] - return check_tree_text(cache.get_tree(c.args[0]), pat, c.args[2], regexp, stop_at_first) + return check_tree_text( + cache.get_tree(c.args[0]), pat, c.args[2], regexp, stop_at_first + ) def check_files_in_folder(c, cache, folder, files): files = files.strip() - if not files.startswith('[') or not files.endswith(']'): - raise InvalidCheck("Expected list as second argument of {} (ie '[]')".format(c.cmd)) + if not files.startswith("[") or not files.endswith("]"): + raise InvalidCheck( + "Expected list as second argument of {} (ie '[]')".format(c.cmd) + ) folder = cache.get_absolute_path(folder) @@ -592,12 +626,18 @@ def check_files_in_folder(c, cache, folder, files): error = 0 if len(files_set) != 0: - print_err(c.lineno, c.context, "Entries not found in folder `{}`: `{}`".format( - folder, files_set)) + print_err( + c.lineno, + c.context, + "Entries not found in folder `{}`: `{}`".format(folder, files_set), + ) error += 1 if len(folder_set) != 0: - print_err(c.lineno, c.context, "Extra entries in folder `{}`: `{}`".format( - folder, folder_set)) + print_err( + c.lineno, + c.context, + "Extra entries in folder `{}`: `{}`".format(folder, folder_set), + ) error += 1 return error == 0 @@ -608,11 +648,11 @@ def check_files_in_folder(c, cache, folder, files): def check_command(c, cache): try: cerr = "" - if c.cmd in ['has', 'hasraw', 'matches', 'matchesraw']: # string test - regexp = c.cmd.startswith('matches') + if c.cmd in ["has", "hasraw", "matches", "matchesraw"]: # string test + regexp = c.cmd.startswith("matches") # has = file existence - if len(c.args) == 1 and not regexp and 'raw' not in c.cmd: + if len(c.args) == 1 and not regexp and "raw" not in c.cmd: try: cache.get_file(c.args[0]) ret = True @@ -620,24 +660,24 @@ def check_command(c, cache): cerr = str(err) ret = False # hasraw/matchesraw = string test - elif len(c.args) == 2 and 'raw' in c.cmd: + elif len(c.args) == 2 and "raw" in c.cmd: cerr = "`PATTERN` did not match" ret = check_string(cache.get_file(c.args[0]), c.args[1], regexp) # has/matches = XML tree test - elif len(c.args) == 3 and 'raw' not in c.cmd: + elif len(c.args) == 3 and "raw" not in c.cmd: cerr = "`XPATH PATTERN` did not match" ret = get_nb_matching_elements(cache, c, regexp, True) != 0 else: - raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd)) + raise InvalidCheck("Invalid number of {} arguments".format(c.cmd)) - elif c.cmd == 'files': # check files in given folder - if len(c.args) != 2: # files + elif c.cmd == "files": # check files in given folder + if len(c.args) != 2: # files raise InvalidCheck("Invalid number of {} arguments".format(c.cmd)) elif c.negated: raise InvalidCheck("{} doesn't support negative check".format(c.cmd)) ret = check_files_in_folder(c, cache, c.args[0], c.args[1]) - elif c.cmd == 'count': # count test + elif c.cmd == "count": # count test if len(c.args) == 3: # count = count test expected = int(c.args[2]) found = get_tree_count(cache.get_tree(c.args[0]), c.args[1]) @@ -649,15 +689,15 @@ def check_command(c, cache): cerr = "Expected {} occurrences but found {}".format(expected, found) ret = found == expected else: - raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd)) + raise InvalidCheck("Invalid number of {} arguments".format(c.cmd)) - elif c.cmd == 'snapshot': # snapshot test + elif c.cmd == "snapshot": # snapshot test if len(c.args) == 3: # snapshot [snapshot_name, html_path, pattern] = c.args tree = cache.get_tree(html_path) xpath = normalize_xpath(pattern) normalize_to_text = False - if xpath.endswith('/text()'): + if xpath.endswith("/text()"): xpath = xpath[:-7] normalize_to_text = True @@ -671,13 +711,15 @@ def check_command(c, cache): cerr = str(err) ret = False elif len(subtrees) == 0: - raise FailedCheck('XPATH did not match') + raise FailedCheck("XPATH did not match") else: - raise FailedCheck('Expected 1 match, but found {}'.format(len(subtrees))) + raise FailedCheck( + "Expected 1 match, but found {}".format(len(subtrees)) + ) else: - raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd)) + raise InvalidCheck("Invalid number of {} arguments".format(c.cmd)) - elif c.cmd == 'has-dir': # has-dir test + elif c.cmd == "has-dir": # has-dir test if len(c.args) == 1: # has-dir = has-dir test try: cache.get_dir(c.args[0]) @@ -686,22 +728,22 @@ def check_command(c, cache): cerr = str(err) ret = False else: - raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd)) + raise InvalidCheck("Invalid number of {} arguments".format(c.cmd)) - elif c.cmd == 'valid-html': - raise InvalidCheck('Unimplemented valid-html') + elif c.cmd == "valid-html": + raise InvalidCheck("Unimplemented valid-html") - elif c.cmd == 'valid-links': - raise InvalidCheck('Unimplemented valid-links') + elif c.cmd == "valid-links": + raise InvalidCheck("Unimplemented valid-links") else: - raise InvalidCheck('Unrecognized {}'.format(c.cmd)) + raise InvalidCheck("Unrecognized {}".format(c.cmd)) if ret == c.negated: raise FailedCheck(cerr) except FailedCheck as err: - message = '{}{} check failed'.format('!' if c.negated else '', c.cmd) + message = "{}{} check failed".format("!" if c.negated else "", c.cmd) print_err(c.lineno, c.context, str(err), message) except InvalidCheck as err: print_err(c.lineno, c.context, str(err)) @@ -713,18 +755,18 @@ def check(target, commands): check_command(c, cache) -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) not in [3, 4]: - stderr('Usage: {}