Skip to content

Commit

Permalink
Improve attribution of backedge-triggered invalidation (#46756)
Browse files Browse the repository at this point in the history
SnoopCompile attempts to attribute invalidations to specific causes,
but until now it has not generally been able to handle what it called
"delayed" invalidations, which arose when a MethodInstance backedge
wasn't valid anymore. This dumps more data to the reporting stream
and should allow SnoopCompile to assemble the full chain of causes.

This also adds invalidation of the backedges of methods that fail
to validate their external edges.
  • Loading branch information
timholy authored Sep 24, 2022
1 parent 24cb92d commit b43bc62
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 11 deletions.
43 changes: 33 additions & 10 deletions src/dump.c
Original file line number Diff line number Diff line change
Expand Up @@ -2541,8 +2541,8 @@ static void jl_verify_edges(jl_array_t *targets, jl_array_t **pvalids)
size_t i, l = jl_array_len(targets) / 3;
jl_array_t *valids = jl_alloc_array_1d(jl_array_uint8_type, l);
memset(jl_array_data(valids), 1, l);
jl_value_t *loctag = NULL;
JL_GC_PUSH1(&loctag);
jl_value_t *loctag = NULL, *matches = NULL;
JL_GC_PUSH2(&loctag, &matches);
*pvalids = valids;
for (i = 0; i < l; i++) {
jl_value_t *invokesig = jl_array_ptr_ref(targets, i * 3);
Expand All @@ -2562,7 +2562,7 @@ static void jl_verify_edges(jl_array_t *targets, jl_array_t **pvalids)
size_t max_valid = ~(size_t)0;
int ambig = 0;
// TODO: possibly need to included ambiguities too (for the optimizer correctness)?
jl_value_t *matches = jl_matching_methods((jl_tupletype_t*)sig, jl_nothing, -1, 0, jl_atomic_load_acquire(&jl_world_counter), &min_valid, &max_valid, &ambig);
matches = jl_matching_methods((jl_tupletype_t*)sig, jl_nothing, -1, 0, jl_atomic_load_acquire(&jl_world_counter), &min_valid, &max_valid, &ambig);
if (matches == jl_false || jl_array_len(matches) != jl_array_len(expected)) {
valid = 0;
}
Expand All @@ -2586,9 +2586,27 @@ static void jl_verify_edges(jl_array_t *targets, jl_array_t **pvalids)
}
jl_array_uint8_set(valids, i, valid);
if (!valid && _jl_debug_method_invalidation) {
jl_array_ptr_1d_push(_jl_debug_method_invalidation, (jl_value_t*)callee);
jl_array_ptr_1d_push(_jl_debug_method_invalidation, callee ? (jl_value_t*)callee : sig);
loctag = jl_cstr_to_string("insert_backedges_callee");
jl_array_ptr_1d_push(_jl_debug_method_invalidation, loctag);
loctag = jl_box_int32((int32_t)i);
jl_array_ptr_1d_push(_jl_debug_method_invalidation, loctag);
loctag = jl_box_uint64(jl_worklist_key(serializer_worklist));
jl_array_ptr_1d_push(_jl_debug_method_invalidation, loctag);
if (matches != jl_false) {
// setdiff!(matches, expected)
size_t j, k, ins = 0;
for (j = 0; j < jl_array_len(matches); j++) {
int found = 0;
jl_method_t *match = ((jl_method_match_t*)jl_array_ptr_ref(matches, j))->method;
for (k = 0; !found && k < jl_array_len(expected); k++)
found |= jl_egal((jl_value_t*)match, jl_array_ptr_ref(expected, k));
if (!found)
jl_array_ptr_set(matches, ins++, match);
}
jl_array_del_end((jl_array_t*)matches, jl_array_len(matches) - ins);
}
jl_array_ptr_1d_push(_jl_debug_method_invalidation, matches);
}
}
JL_GC_POP();
Expand All @@ -2601,9 +2619,10 @@ static void jl_insert_backedges(jl_array_t *edges, jl_array_t *ext_targets)
{
// foreach(enable, ((edges[2i-1] => ext_targets[edges[2i] .* 3]) for i in 1:length(edges)÷2 if all(valids[edges[2i]])))
size_t i, l = jl_array_len(edges);
size_t world = jl_atomic_load_acquire(&jl_world_counter);
jl_array_t *valids = NULL;
jl_value_t *loctag = NULL;
JL_GC_PUSH2(&valids, &loctag);
jl_value_t *targetidx = NULL;
JL_GC_PUSH2(&valids, &targetidx);
jl_verify_edges(ext_targets, &valids);
for (i = 0; i < l; i += 2) {
jl_method_instance_t *caller = (jl_method_instance_t*)jl_array_ptr_ref(edges, i);
Expand All @@ -2612,10 +2631,12 @@ static void jl_insert_backedges(jl_array_t *edges, jl_array_t *ext_targets)
assert(jl_isa((jl_value_t*)idxs_array, jl_array_int32_type));
int32_t *idxs = (int32_t*)jl_array_data(idxs_array);
int valid = 1;
size_t j;
size_t j, idxbad = -1;
for (j = 0; valid && j < jl_array_len(idxs_array); j++) {
int32_t idx = idxs[j];
valid = jl_array_uint8_ref(valids, idx);
if (!valid)
idxbad = idx;
}
if (valid) {
// if this callee is still valid, add all the backedges
Expand Down Expand Up @@ -2652,10 +2673,12 @@ static void jl_insert_backedges(jl_array_t *edges, jl_array_t *ext_targets)
ptrhash_remove(&new_code_instance_validate, codeinst); // should be left invalid
codeinst = jl_atomic_load_relaxed(&codeinst->next);
}
invalidate_backedges(&remove_code_instance_from_validation, caller, world, "insert_backedges");
if (_jl_debug_method_invalidation) {
jl_array_ptr_1d_push(_jl_debug_method_invalidation, (jl_value_t*)caller);
loctag = jl_cstr_to_string("insert_backedges");
jl_array_ptr_1d_push(_jl_debug_method_invalidation, loctag);
targetidx = jl_box_int32((int32_t)idxbad);
jl_array_ptr_1d_push(_jl_debug_method_invalidation, targetidx);
targetidx = jl_box_uint64(jl_worklist_key(serializer_worklist));
jl_array_ptr_1d_push(_jl_debug_method_invalidation, targetidx);
}
}
}
Expand Down
40 changes: 39 additions & 1 deletion test/precompile.jl
Original file line number Diff line number Diff line change
Expand Up @@ -817,6 +817,10 @@ precompile_test_harness("code caching") do dir
build_stale(37)
stale('c')
## Reporting tests (unrelated to the above)
nbits(::Int8) = 8
nbits(::Int16) = 16
end
"""
)
Expand All @@ -835,6 +839,11 @@ precompile_test_harness("code caching") do dir
# force precompilation
useA()
## Reporting tests
call_nbits(x::Integer) = $StaleA.nbits(x)
map_nbits() = map(call_nbits, Integer[Int8(1), Int16(1)])
map_nbits()
end
"""
)
Expand All @@ -856,9 +865,12 @@ precompile_test_harness("code caching") do dir
Base.compilecache(Base.PkgId(string(pkg)))
end
@eval using $StaleA
MA = getfield(@__MODULE__, StaleA)
Base.eval(MA, :(nbits(::UInt8) = 8))
@eval using $StaleC
invalidations = ccall(:jl_debug_method_invalidation, Any, (Cint,), 1)
@eval using $StaleB
MA = getfield(@__MODULE__, StaleA)
ccall(:jl_debug_method_invalidation, Any, (Cint,), 0)
MB = getfield(@__MODULE__, StaleB)
MC = getfield(@__MODULE__, StaleC)
world = Base.get_world_counter()
Expand All @@ -883,6 +895,32 @@ precompile_test_harness("code caching") do dir
m = only(methods(MC.call_buildstale))
mi = m.specializations[1]
@test hasvalid(mi, world) # was compiled with the new method

# Reporting test
@test all(i -> isassigned(invalidations, i), eachindex(invalidations))
idxs = findall(==("insert_backedges"), invalidations)
m = only(methods(MB.call_nbits))
idxsbits = filter(idxs) do i
mi = invalidations[i-1]
mi.def == m
end
idx = only(idxsbits)
for mi in m.specializations
mi === nothing && continue
hv = hasvalid(mi, world)
@test mi.specTypes.parameters[end] === Integer ? !hv : hv
end

tagbad = invalidations[idx+1]
buildid = invalidations[idx+2]
@test isa(buildid, UInt64)
j = findfirst(==(tagbad), invalidations)
@test invalidations[j+1] == buildid
@test isa(invalidations[j-2], Type)
@test invalidations[j-1] == "insert_backedges_callee"

m = only(methods(MB.map_nbits))
@test !hasvalid(m.specializations[1], world+1) # insert_backedges invalidations also trigger their backedges
end

precompile_test_harness("invoke") do dir
Expand Down

0 comments on commit b43bc62

Please sign in to comment.