Skip to content

Commit

Permalink
Tagged CodeInstance
Browse files Browse the repository at this point in the history
  • Loading branch information
vchuravy committed Nov 19, 2023
1 parent f5d189f commit 05aa560
Show file tree
Hide file tree
Showing 15 changed files with 72 additions and 112 deletions.
6 changes: 3 additions & 3 deletions base/boot.jl
Original file line number Diff line number Diff line change
Expand Up @@ -479,13 +479,13 @@ eval(Core, quote
end)

function CodeInstance(
mi::MethodInstance, @nospecialize(rettype), @nospecialize(inferred_const),
mi::MethodInstance, owner, @nospecialize(rettype), @nospecialize(inferred_const),
@nospecialize(inferred), const_flags::Int32, min_world::UInt, max_world::UInt,
ipo_effects::UInt32, effects::UInt32, @nospecialize(analysis_results),
relocatability::UInt8)
return ccall(:jl_new_codeinst, Ref{CodeInstance},
(Any, Any, Any, Any, Int32, UInt, UInt, UInt32, UInt32, Any, UInt8),
mi, rettype, inferred_const, inferred, const_flags, min_world, max_world,
(Any, Any, Any, Any, Any, Int32, UInt, UInt, UInt32, UInt32, Any, UInt8),
mi, owner, rettype, inferred_const, inferred, const_flags, min_world, max_world,
ipo_effects, effects, analysis_results,
relocatability)
end
Expand Down
15 changes: 10 additions & 5 deletions base/compiler/cicache.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,17 @@ Internally, each `MethodInstance` keep a unique global cache of code instances
that have been created for the given method instance, stratified by world age
ranges. This struct abstracts over access to this cache.
"""
struct InternalCodeCache end
struct InternalCodeCache
owner::Any # Needs to be a token/objectid
end

function setindex!(cache::InternalCodeCache, ci::CodeInstance, mi::MethodInstance)
@assert ci.owner === cache.owner
ccall(:jl_mi_cache_insert, Cvoid, (Any, Any), mi, ci)
return cache
end

const GLOBAL_CI_CACHE = InternalCodeCache()
const GLOBAL_CI_CACHE = InternalCodeCache(nothing)

struct WorldRange
min_world::UInt
Expand Down Expand Up @@ -49,15 +52,17 @@ WorldView(wvc::WorldView, wr::WorldRange) = WorldView(wvc.cache, wr)
WorldView(wvc::WorldView, args...) = WorldView(wvc.cache, args...)

function haskey(wvc::WorldView{InternalCodeCache}, mi::MethodInstance)
return ccall(:jl_rettype_inferred, Any, (Any, UInt, UInt), mi, first(wvc.worlds), last(wvc.worlds)) !== nothing
return ccall(:jl_rettype_inferred, Any, (Any, Any, UInt, UInt), wvc.cache.owner, mi, first(wvc.worlds), last(wvc.worlds)) !== nothing
end

function get(wvc::WorldView{InternalCodeCache}, mi::MethodInstance, default)
r = ccall(:jl_rettype_inferred, Any, (Any, UInt, UInt), mi, first(wvc.worlds), last(wvc.worlds))
r = ccall(:jl_rettype_inferred, Any, (Any, Any, UInt, UInt), wvc.cache.owner, mi, first(wvc.worlds), last(wvc.worlds))
if r === nothing
return default
end
return r::CodeInstance
ci = r::CodeInstance
@assert ci.owner === wvc.cache.owner
return ci
end

function getindex(wvc::WorldView{InternalCodeCache}, mi::MethodInstance)
Expand Down
2 changes: 1 addition & 1 deletion base/compiler/typeinfer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ function CodeInstance(interp::AbstractInterpreter, result::InferenceResult,
end
end
# relocatability = isa(inferred_result, String) ? inferred_result[end] : UInt8(0)
return CodeInstance(result.linfo,
return CodeInstance(result.linfo, nothing,
widenconst(result_type), rettype_const, inferred_result,
const_flags, first(valid_worlds), last(valid_worlds),
# TODO: Actually do something with non-IPO effects
Expand Down
19 changes: 0 additions & 19 deletions base/compiler/utilities.jl
Original file line number Diff line number Diff line change
Expand Up @@ -321,25 +321,6 @@ function iterate(iter::BackedgeIterator, i::Int=1)
return BackedgePair(item, backedges[i+1]::MethodInstance), i+2 # `invoke` calls
end

"""
add_invalidation_callback!(callback, mi::MethodInstance)
Register `callback` to be triggered upon the invalidation of `mi`.
`callback` should a function taking two arguments, `callback(replaced::MethodInstance, max_world::UInt32)`,
and it will be recursively invoked on `MethodInstance`s within the invalidation graph.
"""
function add_invalidation_callback!(@nospecialize(callback), mi::MethodInstance)
if !isdefined(mi, :callbacks)
callbacks = mi.callbacks = Any[callback]
else
callbacks = mi.callbacks::Vector{Any}
if !any(@nospecialize(cb)->cb===callback, callbacks)
push!(callbacks, callback)
end
end
return callbacks
end

#########
# types #
#########
Expand Down
1 change: 1 addition & 0 deletions src/builtins.c
Original file line number Diff line number Diff line change
Expand Up @@ -2089,6 +2089,7 @@ jl_fptr_args_t jl_get_builtin_fptr(jl_datatype_t *dt)
jl_typemap_entry_t *entry = (jl_typemap_entry_t*)jl_atomic_load_relaxed(&dt->name->mt->defs);
jl_method_instance_t *mi = jl_atomic_load_relaxed(&entry->func.method->unspecialized);
jl_code_instance_t *ci = jl_atomic_load_relaxed(&mi->cache);
assert(ci->owner == jl_nothing);
return jl_atomic_load_relaxed(&ci->specptr.fptr1);
}

Expand Down
92 changes: 34 additions & 58 deletions src/gf.c
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,8 @@ JL_DLLEXPORT jl_value_t *jl_methtable_lookup(jl_methtable_t *mt, jl_value_t *typ
// ----- MethodInstance specialization instantiation ----- //

JL_DLLEXPORT jl_code_instance_t* jl_new_codeinst(
jl_method_instance_t *mi, jl_value_t *rettype,
jl_value_t *inferred_const, jl_value_t *inferred,
jl_method_instance_t *mi, jl_value_t *owner,
jl_value_t *rettype, jl_value_t *inferred_const, jl_value_t *inferred,
int32_t const_flags, size_t min_world, size_t max_world,
uint32_t ipo_effects, uint32_t effects, jl_value_t *analysis_results,
uint8_t relocatability);
Expand Down Expand Up @@ -322,7 +322,7 @@ jl_datatype_t *jl_mk_builtin_func(jl_datatype_t *dt, const char *name, jl_fptr_a
jl_atomic_store_relaxed(&m->unspecialized, mi);
jl_gc_wb(m, mi);

jl_code_instance_t *codeinst = jl_new_codeinst(mi,
jl_code_instance_t *codeinst = jl_new_codeinst(mi, jl_nothing,
(jl_value_t*)jl_any_type, jl_nothing, jl_nothing,
0, 1, ~(size_t)0, 0, 0, jl_nothing, 0);
jl_mi_cache_insert(mi, codeinst);
Expand Down Expand Up @@ -436,11 +436,11 @@ JL_DLLEXPORT jl_value_t *jl_call_in_typeinf_world(jl_value_t **args, int nargs)
return ret;
}

JL_DLLEXPORT jl_value_t *jl_rettype_inferred(jl_method_instance_t *mi, size_t min_world, size_t max_world) JL_NOTSAFEPOINT
STATIC_INLINE jl_value_t *_jl_rettype_inferred(jl_value_t *owner, jl_method_instance_t *mi, size_t min_world, size_t max_world) JL_NOTSAFEPOINT
{
jl_code_instance_t *codeinst = jl_atomic_load_relaxed(&mi->cache);
while (codeinst) {
if (codeinst->min_world <= min_world && max_world <= codeinst->max_world) {
if (codeinst->min_world <= min_world && max_world <= codeinst->max_world && jl_egal(codeinst->owner, owner)) {
jl_value_t *code = jl_atomic_load_relaxed(&codeinst->inferred);
if (code && (code == jl_nothing || jl_ir_flag_inferred(code)))
return (jl_value_t*)codeinst;
Expand All @@ -449,24 +449,37 @@ JL_DLLEXPORT jl_value_t *jl_rettype_inferred(jl_method_instance_t *mi, size_t mi
}
return (jl_value_t*)jl_nothing;
}
JL_DLLEXPORT jl_value_t *(*const jl_rettype_inferred_addr)(jl_method_instance_t *mi, size_t min_world, size_t max_world) JL_NOTSAFEPOINT = jl_rettype_inferred;

JL_DLLEXPORT jl_value_t *jl_rettype_inferred(jl_value_t *owner, jl_method_instance_t *mi, size_t min_world, size_t max_world) JL_NOTSAFEPOINT
{
return (jl_value_t*)_jl_rettype_inferred(owner, mi, min_world, max_world);
}

JL_DLLEXPORT jl_value_t *jl_rettype_inferred_native(jl_method_instance_t *mi, size_t min_world, size_t max_world) JL_NOTSAFEPOINT
{
return (jl_value_t*)_jl_rettype_inferred(jl_nothing, mi, min_world, max_world);
}

JL_DLLEXPORT jl_value_t *(*const jl_rettype_inferred_addr)(jl_method_instance_t *mi, size_t min_world, size_t max_world) JL_NOTSAFEPOINT = jl_rettype_inferred_native;


JL_DLLEXPORT jl_code_instance_t *jl_get_method_inferred(
jl_method_instance_t *mi JL_PROPAGATES_ROOT, jl_value_t *rettype,
size_t min_world, size_t max_world)
{
jl_value_t *owner = jl_nothing; // TODO: owner should be arg
jl_code_instance_t *codeinst = jl_atomic_load_relaxed(&mi->cache);
while (codeinst) {
if (codeinst->min_world == min_world &&
codeinst->max_world == max_world &&
jl_egal(codeinst->owner, owner) &&
jl_egal(codeinst->rettype, rettype)) {
return codeinst;
}
codeinst = jl_atomic_load_relaxed(&codeinst->next);
}
codeinst = jl_new_codeinst(
mi, rettype, NULL, NULL,
mi, owner, rettype, NULL, NULL,
0, min_world, max_world, 0, 0, jl_nothing, 0);
jl_mi_cache_insert(mi, codeinst);
return codeinst;
Expand All @@ -483,8 +496,8 @@ JL_DLLEXPORT jl_code_instance_t *jl_get_codeinst_for_src(
}

JL_DLLEXPORT jl_code_instance_t *jl_new_codeinst(
jl_method_instance_t *mi, jl_value_t *rettype,
jl_value_t *inferred_const, jl_value_t *inferred,
jl_method_instance_t *mi, jl_value_t *owner,
jl_value_t *rettype, jl_value_t *inferred_const, jl_value_t *inferred,
int32_t const_flags, size_t min_world, size_t max_world,
uint32_t ipo_effects, uint32_t effects, jl_value_t *analysis_results,
uint8_t relocatability
Expand All @@ -495,6 +508,7 @@ JL_DLLEXPORT jl_code_instance_t *jl_new_codeinst(
jl_code_instance_t *codeinst = (jl_code_instance_t*)jl_gc_alloc(ct->ptls, sizeof(jl_code_instance_t),
jl_code_instance_type);
codeinst->def = mi;
codeinst->owner = owner;
codeinst->min_world = min_world;
codeinst->max_world = max_world;
codeinst->rettype = rettype;
Expand Down Expand Up @@ -546,7 +560,7 @@ static int get_method_unspec_list(jl_typemap_entry_t *def, void *closure)
if (!jl_is_svec(specializations)) {
jl_method_instance_t *mi = (jl_method_instance_t*)specializations;
assert(jl_is_method_instance(mi));
if (jl_rettype_inferred(mi, world, world) == jl_nothing)
if (jl_rettype_inferred_native(mi, world, world) == jl_nothing)
jl_array_ptr_1d_push((jl_array_t*)closure, (jl_value_t*)mi);
return 1;
}
Expand All @@ -556,7 +570,7 @@ static int get_method_unspec_list(jl_typemap_entry_t *def, void *closure)
jl_method_instance_t *mi = (jl_method_instance_t*)jl_svecref(specializations, i);
if ((jl_value_t*)mi != jl_nothing) {
assert(jl_is_method_instance(mi));
if (jl_rettype_inferred(mi, world, world) == jl_nothing)
if (jl_rettype_inferred_native(mi, world, world) == jl_nothing)
jl_array_ptr_1d_push((jl_array_t*)closure, (jl_value_t*)mi);
}
}
Expand Down Expand Up @@ -676,7 +690,7 @@ JL_DLLEXPORT void jl_set_typeinf_func(jl_value_t *f)
size_t i, l;
for (i = 0, l = jl_array_nrows(unspec); i < l; i++) {
jl_method_instance_t *mi = (jl_method_instance_t*)jl_array_ptr_ref(unspec, i);
if (jl_rettype_inferred(mi, world, world) == jl_nothing)
if (jl_rettype_inferred_native(mi, world, world) == jl_nothing)
jl_type_infer(mi, world, 1);
}
JL_GC_POP();
Expand Down Expand Up @@ -1612,41 +1626,6 @@ JL_DLLEXPORT jl_value_t *jl_debug_method_invalidation(int state)
return jl_nothing;
}

// call external callbacks registered with this method_instance
static void invalidate_external(jl_method_instance_t *mi, size_t max_world) {
jl_array_t *callbacks = mi->callbacks;
if (callbacks) {
// AbstractInterpreter allows for MethodInstances to be present in non-local caches
// inform those caches about the invalidation.
JL_TRY {
size_t i, l = jl_array_nrows(callbacks);
jl_value_t **args;
JL_GC_PUSHARGS(args, 3);
// these arguments are constant per call
args[1] = (jl_value_t*)mi;
args[2] = jl_box_uint32(max_world);

jl_task_t *ct = jl_current_task;
size_t last_age = ct->world_age;
ct->world_age = jl_get_world_counter();

jl_value_t **cbs = (jl_value_t**)jl_array_ptr_data(callbacks);
for (i = 0; i < l; i++) {
args[0] = cbs[i];
jl_apply(args, 3);
}
ct->world_age = last_age;
JL_GC_POP();
}
JL_CATCH {
jl_printf((JL_STREAM*)STDERR_FILENO, "error in invalidation callback: ");
jl_static_show((JL_STREAM*)STDERR_FILENO, jl_current_exception());
jl_printf((JL_STREAM*)STDERR_FILENO, "\n");
jlbacktrace(); // written to STDERR_FILENO
}
}
}

static void _invalidate_backedges(jl_method_instance_t *replaced_mi, size_t max_world, int depth);

// recursively invalidate cached methods that had an edge to a replaced method
Expand Down Expand Up @@ -1675,7 +1654,6 @@ static void invalidate_method_instance(jl_method_instance_t *replaced, size_t ma
codeinst = jl_atomic_load_relaxed(&codeinst->next);
}
JL_GC_PUSH1(&replaced);
invalidate_external(replaced, max_world);
// recurse to all backedges to update their valid range also
_invalidate_backedges(replaced, max_world, depth + 1);
JL_GC_POP();
Expand Down Expand Up @@ -1816,7 +1794,6 @@ static int invalidate_mt_cache(jl_typemap_entry_t *oldentry, void *closure0)
}
}
if (intersects) {
// TODO call invalidate_external here?
if (_jl_debug_method_invalidation) {
jl_array_ptr_1d_push(_jl_debug_method_invalidation, (jl_value_t*)mi);
jl_value_t *loctag = jl_cstr_to_string("invalidate_mt_cache");
Expand Down Expand Up @@ -1898,7 +1875,6 @@ static void jl_method_table_invalidate(jl_methtable_t *mt, jl_typemap_entry_t *m
jl_method_instance_t *mi = (jl_method_instance_t*)jl_svecref(specializations, i);
if ((jl_value_t*)mi != jl_nothing) {
invalidated = 1;
invalidate_external(mi, max_world);
invalidate_backedges(mi, max_world, "jl_method_table_disable");
}
}
Expand Down Expand Up @@ -2151,7 +2127,6 @@ JL_DLLEXPORT void jl_method_table_insert(jl_methtable_t *mt, jl_method_t *method
jl_array_del_end(backedges, nb - insb);
}
jl_array_ptr_1d_push(oldmi, (jl_value_t*)mi);
invalidate_external(mi, max_world);
if (_jl_debug_method_invalidation && invalidated) {
jl_array_ptr_1d_push(_jl_debug_method_invalidation, (jl_value_t*)mi);
loctag = jl_cstr_to_string("jl_method_table_insert");
Expand Down Expand Up @@ -2454,7 +2429,7 @@ jl_code_instance_t *jl_compile_method_internal(jl_method_instance_t *mi, size_t
jl_code_instance_t *unspec = jl_atomic_load_relaxed(&unspecmi->cache);
jl_callptr_t unspec_invoke = NULL;
if (unspec && (unspec_invoke = jl_atomic_load_acquire(&unspec->invoke))) {
jl_code_instance_t *codeinst = jl_new_codeinst(mi,
jl_code_instance_t *codeinst = jl_new_codeinst(mi, jl_nothing,
(jl_value_t*)jl_any_type, NULL, NULL,
0, 1, ~(size_t)0, 0, 0, jl_nothing, 0);
void *unspec_fptr = jl_atomic_load_relaxed(&unspec->specptr.fptr);
Expand All @@ -2481,7 +2456,7 @@ jl_code_instance_t *jl_compile_method_internal(jl_method_instance_t *mi, size_t
compile_option == JL_OPTIONS_COMPILE_MIN) {
jl_code_info_t *src = jl_code_for_interpreter(mi, world);
if (!jl_code_requires_compiler(src, 0)) {
jl_code_instance_t *codeinst = jl_new_codeinst(mi,
jl_code_instance_t *codeinst = jl_new_codeinst(mi, jl_nothing,
(jl_value_t*)jl_any_type, NULL, NULL,
0, 1, ~(size_t)0, 0, 0, jl_nothing, 0);
jl_atomic_store_release(&codeinst->invoke, jl_fptr_interpret_call);
Expand Down Expand Up @@ -2516,7 +2491,8 @@ jl_code_instance_t *jl_compile_method_internal(jl_method_instance_t *mi, size_t
// only these care about the exact specTypes, otherwise we can use it directly
return ucache;
}
codeinst = jl_new_codeinst(mi, (jl_value_t*)jl_any_type, NULL, NULL,
codeinst = jl_new_codeinst(mi, jl_nothing,
(jl_value_t*)jl_any_type, NULL, NULL,
0, 1, ~(size_t)0, 0, 0, jl_nothing, 0);
void *unspec_fptr = jl_atomic_load_relaxed(&ucache->specptr.fptr);
if (unspec_fptr) {
Expand Down Expand Up @@ -2766,10 +2742,10 @@ static jl_method_instance_t *jl_get_compile_hint_specialization(jl_tupletype_t *

static void _generate_from_hint(jl_method_instance_t *mi, size_t world)
{
jl_value_t *codeinst = jl_rettype_inferred(mi, world, world);
jl_value_t *codeinst = jl_rettype_inferred_native(mi, world, world);
if (codeinst == jl_nothing) {
(void)jl_type_infer(mi, world, 1);
codeinst = jl_rettype_inferred(mi, world, world);
codeinst = jl_rettype_inferred_native(mi, world, world);
}
if (codeinst != jl_nothing) {
if (jl_atomic_load_relaxed(&((jl_code_instance_t*)codeinst)->invoke) == jl_fptr_const_return)
Expand Down Expand Up @@ -2808,10 +2784,10 @@ JL_DLLEXPORT void jl_compile_method_instance(jl_method_instance_t *mi, jl_tuplet
jl_method_instance_t *mi2 = jl_specializations_get_linfo(mi->def.method, (jl_value_t*)types2, tpenv2);
JL_GC_POP();
jl_atomic_store_relaxed(&mi2->precompiled, 1);
if (jl_rettype_inferred(mi2, world, world) == jl_nothing)
if (jl_rettype_inferred_native(mi2, world, world) == jl_nothing)
(void)jl_type_infer(mi2, world, 1);
if (jl_typeinf_func && mi->def.method->primary_world <= tworld) {
if (jl_rettype_inferred(mi2, tworld, tworld) == jl_nothing)
if (jl_rettype_inferred_native(mi2, tworld, tworld) == jl_nothing)
(void)jl_type_infer(mi2, tworld, 1);
}
}
Expand Down
Loading

0 comments on commit 05aa560

Please sign in to comment.