Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

code coverage foundation for hash and num_counters #73488

Merged
merged 10 commits into from
Jun 24, 2020
35 changes: 14 additions & 21 deletions src/librustc_codegen_llvm/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
use rustc_middle::ty::{self, Ty};
use rustc_middle::{bug, span_bug};
use rustc_span::Span;
use rustc_span::Symbol;
use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive};
use rustc_target::spec::PanicStrategy;

Expand Down Expand Up @@ -141,26 +140,20 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
self.call(llfn, &[], None)
}
"count_code_region" => {
if let ty::InstanceDef::Item(fn_def_id) = caller_instance.def {
let caller_fn_path = tcx.def_path_str(fn_def_id);
debug!(
"count_code_region to llvm.instrprof.increment(fn_name={})",
caller_fn_path
);

// FIXME(richkadel): (1) Replace raw function name with mangled function name;
// (2) Replace hardcoded `1234` in `hash` with a computed hash (as discussed in)
// the MCP (compiler-team/issues/278); and replace the hardcoded `1` for
// `num_counters` with the actual number of counters per function (when the
// changes are made to inject more than one counter per function).
let (fn_name, _len_val) = self.const_str(Symbol::intern(&caller_fn_path));
let index = args[0].immediate();
let hash = self.const_u64(1234);
let num_counters = self.const_u32(1);
self.instrprof_increment(fn_name, hash, num_counters, index)
} else {
bug!("intrinsic count_code_region: no src.instance");
}
// FIXME(richkadel): The current implementation assumes the MIR for the given
// caller_instance represents a single function. Validate and/or correct if inlining
// and/or monomorphization invalidates these assumptions.
let coverage_data = tcx.coverage_data(caller_instance.def_id());
let mangled_fn = tcx.symbol_name(caller_instance);
let (mangled_fn_name, _len_val) = self.const_str(mangled_fn.name);
let hash = self.const_u64(coverage_data.hash);
let num_counters = self.const_u32(coverage_data.num_counters);
let index = args[0].immediate();
debug!(
"count_code_region to LLVM intrinsic instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
mangled_fn.name, hash, num_counters, index
);
self.instrprof_increment(mangled_fn_name, hash, num_counters, index)
}
"va_start" => self.va_start(args[0].immediate()),
"va_end" => self.va_end(args[0].immediate()),
Expand Down
2 changes: 2 additions & 0 deletions src/librustc_metadata/locator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,8 @@ impl<'a> CrateLocator<'a> {
&& self.triple != TargetTriple::from_triple(config::host_triple())
{
err.note(&format!("the `{}` target may not be installed", self.triple));
} else if self.crate_name == sym::profiler_builtins {
err.note(&"the compiler may have been built without the profiler runtime");
}
err.span_label(self.span, "can't find crate");
err
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_middle/hir/map/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ fn fn_sig<'hir>(node: Node<'hir>) -> Option<&'hir FnSig<'hir>> {
}
}

fn associated_body<'hir>(node: Node<'hir>) -> Option<BodyId> {
pub fn associated_body<'hir>(node: Node<'hir>) -> Option<BodyId> {
match node {
Node::Item(Item {
kind: ItemKind::Const(_, body) | ItemKind::Static(.., body) | ItemKind::Fn(.., body),
Expand Down
33 changes: 31 additions & 2 deletions src/librustc_middle/ich/hcx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,15 @@ impl<'a> StableHashingContext<'a> {
/// Don't use it for anything else or you'll run the risk of
/// leaking data out of the tracking system.
#[inline]
pub fn new(
fn new_with_or_without_spans(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
always_ignore_spans: bool,
) -> Self {
let hash_spans_initial = !sess.opts.debugging_opts.incremental_ignore_spans;
let hash_spans_initial =
!always_ignore_spans && !sess.opts.debugging_opts.incremental_ignore_spans;

StableHashingContext {
sess,
Expand All @@ -88,6 +90,33 @@ impl<'a> StableHashingContext<'a> {
}
}

#[inline]
pub fn new(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
) -> Self {
Self::new_with_or_without_spans(
sess,
krate,
definitions,
cstore,
/*always_ignore_spans=*/ false,
)
}

#[inline]
pub fn ignore_spans(
sess: &'a Session,
krate: &'a hir::Crate<'a>,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
) -> Self {
let always_ignore_spans = true;
Self::new_with_or_without_spans(sess, krate, definitions, cstore, always_ignore_spans)
}

#[inline]
pub fn sess(&self) -> &'a Session {
self.sess
Expand Down
19 changes: 17 additions & 2 deletions src/librustc_middle/mir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,8 +164,8 @@ pub struct Body<'tcx> {
/// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because
/// we'd statically know that no thing with interior mutability will ever be available to the
/// user without some serious unsafe code. Now this means that our promoted is actually
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because the
/// index may be a runtime value. Such a promoted value is illegal because it has reachable
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because
/// the index may be a runtime value. Such a promoted value is illegal because it has reachable
/// interior mutability. This flag just makes this situation very obvious where the previous
/// implementation without the flag hid this situation silently.
/// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components.
Expand Down Expand Up @@ -2919,3 +2919,18 @@ impl Location {
}
}
}

/// Coverage data associated with each function (MIR) instrumented with coverage counters, when
/// compiled with `-Zinstrument_coverage`. The query `tcx.coverage_data(DefId)` computes these
/// values on demand (during code generation). This query is only valid after executing the MIR pass
/// `InstrumentCoverage`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct CoverageData {
/// A hash value that can be used by the consumer of the coverage profile data to detect
/// changes to the instrumented source of the associated MIR body (typically, for an
/// individual function).
pub hash: u64,

/// The total number of coverage region counters added to the MIR `Body`.
pub num_counters: u32,
}
6 changes: 6 additions & 0 deletions src/librustc_middle/query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,12 @@ rustc_queries! {
cache_on_disk_if { key.is_local() }
}

query coverage_data(key: DefId) -> mir::CoverageData {
desc { |tcx| "retrieving coverage data from MIR for `{}`", tcx.def_path_str(key) }
storage(ArenaCacheSelector<'tcx>)
cache_on_disk_if { key.is_local() }
}

query promoted_mir(key: DefId) -> IndexVec<mir::Promoted, mir::Body<'tcx>> {
desc { |tcx| "optimizing promoted MIR for `{}`", tcx.def_path_str(key) }
storage(ArenaCacheSelector<'tcx>)
Expand Down
7 changes: 7 additions & 0 deletions src/librustc_middle/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1284,6 +1284,13 @@ impl<'tcx> TyCtxt<'tcx> {
StableHashingContext::new(self.sess, krate, self.definitions, &*self.cstore)
}

#[inline(always)]
pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> {
let krate = self.gcx.untracked_crate;

StableHashingContext::ignore_spans(self.sess, krate, self.definitions, &*self.cstore)
}

// This method makes sure that we have a DepNode and a Fingerprint for
// every upstream crate. It needs to be called once right after the tcx is
// created.
Expand Down
Loading