Skip to content

Commit

Permalink
[vm/finalize] Split code clean up from class finalization into separa…
Browse files Browse the repository at this point in the history
…te step.

While most of the class finalization can be done without stopping mutators, code clean up has to be done with all mutators stopped. So that was split up into separate step invoked during allocation stub creation.

Bug: #36097
Change-Id: I86ca9bf58aaad2ae1884d777a1cc06a22d1ce65c
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/154060
Commit-Queue: Alexander Aprelev <aam@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
  • Loading branch information
aam authored and commit-bot@chromium.org committed Jul 13, 2020
1 parent ba459f9 commit b387ebc
Show file tree
Hide file tree
Showing 9 changed files with 116 additions and 38 deletions.
74 changes: 55 additions & 19 deletions runtime/vm/class_finalizer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ static void RemoveCHAOptimizedCode(
}
}

void AddSuperType(const AbstractType& type,
GrowableArray<intptr_t>* finalized_super_classes) {
static void AddSuperType(const AbstractType& type,
GrowableArray<intptr_t>* finalized_super_classes) {
ASSERT(type.HasTypeClass());
ASSERT(!type.IsDynamicType());
if (type.IsObjectType()) {
Expand Down Expand Up @@ -1162,33 +1162,69 @@ void ClassFinalizer::FinalizeClass(const Class& cls) {
// Mark as loaded and finalized.
cls.Finalize();
FinalizeMemberTypes(cls);
// Run additional checks after all types are finalized.
if (FLAG_use_cha_deopt) {
GrowableArray<intptr_t> cids;
CollectFinalizedSuperClasses(cls, &cids);
CollectImmediateSuperInterfaces(cls, &cids);
RemoveCHAOptimizedCode(cls, cids);

if (cls.is_enum_class()) {
AllocateEnumValues(cls);
}

// The rest of finalization for non-top-level class has to be done with
// stopped mutators. It will be done by AllocateFinalizeClass. before new
// instance of a class is created in GetAllocationStubForClass.
if (cls.IsTopLevel()) {
cls.set_is_allocate_finalized();
}
}

ErrorPtr ClassFinalizer::AllocateFinalizeClass(const Class& cls) {
ASSERT(cls.is_finalized());
if (cls.is_allocate_finalized()) {
return Error::null();
}

Thread* thread = Thread::Current();
HANDLESCOPE(thread);

if (FLAG_trace_class_finalization) {
THR_Print("Allocate finalize %s\n", cls.ToCString());
}

#if defined(SUPPORT_TIMELINE)
TimelineBeginEndScope tbes(thread, Timeline::GetCompilerStream(),
"AllocateFinalizeClass");
if (tbes.enabled()) {
tbes.SetNumArguments(1);
tbes.CopyArgument(0, "class", cls.ToCString());
}
#endif // defined(SUPPORT_TIMELINE)

// Run additional checks after all types are finalized.
if (FLAG_use_cha_deopt && !cls.IsTopLevel()) {
{
GrowableArray<intptr_t> cids;
CollectFinalizedSuperClasses(cls, &cids);
CollectImmediateSuperInterfaces(cls, &cids);
RemoveCHAOptimizedCode(cls, cids);
}

Zone* zone = thread->zone();
ClassTable* class_table = thread->isolate()->class_table();
auto& interface_class = Class::Handle(zone);

// We scan every interface this [cls] implements and invalidate all CHA code
// which depends on knowing the implementors of that interface.
GrowableArray<intptr_t> cids;
InterfaceFinder finder(zone, class_table, &cids);
finder.FindAllInterfaces(cls);
for (intptr_t j = 0; j < cids.length(); ++j) {
interface_class = class_table->At(cids[j]);
interface_class.DisableCHAImplementorUsers();
// We scan every interface this [cls] implements and invalidate all CHA
// code which depends on knowing the implementors of that interface.
{
GrowableArray<intptr_t> cids;
InterfaceFinder finder(zone, class_table, &cids);
finder.FindAllInterfaces(cls);
for (intptr_t j = 0; j < cids.length(); ++j) {
interface_class = class_table->At(cids[j]);
interface_class.DisableCHAImplementorUsers();
}
}
}

if (cls.is_enum_class()) {
AllocateEnumValues(cls);
}
cls.set_is_allocate_finalized();
return Error::null();
}

ErrorPtr ClassFinalizer::LoadClassMembers(const Class& cls) {
Expand Down
5 changes: 4 additions & 1 deletion runtime/vm/class_finalizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,11 @@ class ClassFinalizer : public AllStatic {
// Register class in the lists of direct subclasses and direct implementors.
static void RegisterClassInHierarchy(Zone* zone, const Class& cls);

// Finalize the class including its fields and functions.
// Ensures members of the class are loaded, class layout is finalized and size
// registered in class table.
static void FinalizeClass(const Class& cls);
// Makes class instantiatable and usable by generated code.
static ErrorPtr AllocateFinalizeClass(const Class& cls);

// Completes loading of the class, this populates the function
// and fields of the class.
Expand Down
2 changes: 0 additions & 2 deletions runtime/vm/compiler/aot/precompiler.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2404,8 +2404,6 @@ static void GenerateNecessaryAllocationStubs(FlowGraph* flow_graph) {
}

// Return false if bailed out.
// If optimized_result_code is not NULL then it is caller's responsibility
// to install code.
bool PrecompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
ASSERT(CompilerState::Current().is_aot());
if (optimized() && !parsed_function()->function().IsOptimizable()) {
Expand Down
2 changes: 0 additions & 2 deletions runtime/vm/compiler/jit/compiler.cc
Original file line number Diff line number Diff line change
Expand Up @@ -502,8 +502,6 @@ void CompileParsedFunctionHelper::CheckIfBackgroundCompilerIsBeingStopped(
}

// Return null if bailed out.
// If optimized_result_code is not NULL then it is caller's responsibility
// to install code.
CodePtr CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
ASSERT(!FLAG_precompiled_mode);
const Function& function = parsed_function()->function();
Expand Down
2 changes: 1 addition & 1 deletion runtime/vm/dart_entry.cc
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ ObjectPtr DartEntry::InvokeFunction(const Function& function,
#if !defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_use_bare_instructions) {
Thread* thread = Thread::Current();
thread->set_global_object_pool(
thread->isolate()->object_store()->global_object_pool());
Expand Down
52 changes: 42 additions & 10 deletions runtime/vm/object.cc
Original file line number Diff line number Diff line change
Expand Up @@ -737,7 +737,7 @@ void Object::Init(Isolate* isolate) {
cls.set_next_field_offset(host_next_field_offset, target_next_field_offset);
cls.set_id(Class::kClassId);
cls.set_state_bits(0);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls.set_type_arguments_field_offset_in_words(Class::kNoTypeArguments,
Expand All @@ -756,7 +756,7 @@ void Object::Init(Isolate* isolate) {
// Allocate and initialize Never class.
cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate);
cls.set_num_type_arguments(0);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
isolate->object_store()->set_never_class(cls);
Expand All @@ -766,7 +766,7 @@ void Object::Init(Isolate* isolate) {
Class::New<FreeListElement::FakeInstance,
RTN::FreeListElement::FakeInstance>(kFreeListElement, isolate);
cls.set_num_type_arguments(0);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();

Expand All @@ -775,7 +775,7 @@ void Object::Init(Isolate* isolate) {
RTN::ForwardingCorpse::FakeInstance>(kForwardingCorpse,
isolate);
cls.set_num_type_arguments(0);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();

Expand Down Expand Up @@ -1061,20 +1061,20 @@ void Object::Init(Isolate* isolate) {
cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate);
cls.set_is_abstract();
cls.set_num_type_arguments(0);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
dynamic_class_ = cls.raw();

cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate);
cls.set_num_type_arguments(0);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
void_class_ = cls.raw();

cls = Class::New<Type, RTN::Type>(isolate);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();

Expand Down Expand Up @@ -1850,7 +1850,7 @@ ErrorPtr Object::Init(Isolate* isolate,

cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate);
cls.set_num_type_arguments(0);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls.set_name(Symbols::Never());
Expand Down Expand Up @@ -2847,7 +2847,7 @@ ClassPtr Class::New(Isolate* isolate, bool register_class) {
// possible in this case.
result.set_is_declaration_loaded();
result.set_is_type_finalized();
result.set_is_finalized();
result.set_is_allocate_finalized();
} else if (FakeObject::kClassId != kClosureCid) {
// VM backed classes are almost ready: run checks and resolve class
// references, but do not recompute size.
Expand Down Expand Up @@ -4222,6 +4222,32 @@ ErrorPtr Class::EnsureIsFinalized(Thread* thread) const {
return error.raw();
}

// Ensure that code outdated by finalized class is cleaned up, new instance of
// this class is ready to be allocated.
ErrorPtr Class::EnsureIsAllocateFinalized(Thread* thread) const {
ASSERT(!IsNull());
// Finalized classes have already been parsed.
if (is_allocate_finalized()) {
return Error::null();
}
if (Compiler::IsBackgroundCompilation()) {
Compiler::AbortBackgroundCompilation(
DeoptId::kNone, "Class allocate finalization while compiling");
}
ASSERT(thread->IsMutatorThread());
ASSERT(thread != NULL);
Error& error = Error::Handle(thread->zone(), EnsureIsFinalized(thread));
if (!error.IsNull()) {
ASSERT(thread == Thread::Current());
if (thread->long_jump_base() != NULL) {
Report::LongJump(error);
UNREACHABLE();
}
}
error ^= ClassFinalizer::AllocateFinalizeClass(*this);
return error.raw();
}

void Class::SetFields(const Array& value) const {
ASSERT(!value.IsNull());
#if defined(DEBUG)
Expand Down Expand Up @@ -4415,7 +4441,7 @@ ClassPtr Class::NewNativeWrapper(const Library& library,
compiler::target::RoundedAllocationSize(target_instance_size));
cls.set_next_field_offset(host_instance_size, target_instance_size);
cls.set_num_native_fields(field_count);
cls.set_is_finalized();
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls.set_is_synthesized_class();
Expand Down Expand Up @@ -4804,6 +4830,12 @@ void Class::set_is_finalized() const {
raw_ptr()->state_bits_));
}

void Class::set_is_allocate_finalized() const {
ASSERT(!is_allocate_finalized());
set_state_bits(ClassFinalizedBits::update(ClassLayout::kAllocateFinalized,
raw_ptr()->state_bits_));
}

void Class::set_is_prefinalized() const {
ASSERT(!is_finalized());
set_state_bits(ClassFinalizedBits::update(ClassLayout::kPreFinalized,
Expand Down
11 changes: 10 additions & 1 deletion runtime/vm/object.h
Original file line number Diff line number Diff line change
Expand Up @@ -1371,10 +1371,18 @@ class Class : public Object {

bool is_finalized() const {
return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
ClassLayout::kFinalized;
ClassLayout::kFinalized ||
ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
ClassLayout::kAllocateFinalized;
}
void set_is_finalized() const;

bool is_allocate_finalized() const {
return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
ClassLayout::kAllocateFinalized;
}
void set_is_allocate_finalized() const;

bool is_prefinalized() const {
return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
ClassLayout::kPreFinalized;
Expand Down Expand Up @@ -1528,6 +1536,7 @@ class Class : public Object {
void EnsureDeclarationLoaded() const;

ErrorPtr EnsureIsFinalized(Thread* thread) const;
ErrorPtr EnsureIsAllocateFinalized(Thread* thread) const;

// Allocate a class used for VM internal objects.
template <class FakeObject, class TargetFakeObject>
Expand Down
3 changes: 2 additions & 1 deletion runtime/vm/raw_object.h
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,8 @@ class ClassLayout : public ObjectLayout {
enum ClassFinalizedState {
kAllocated = 0, // Initial state.
kPreFinalized, // VM classes: size precomputed, but no checks done.
kFinalized, // Class parsed, finalized and ready for use.
kFinalized, // Class parsed, code compiled, not ready for allocation.
kAllocateFinalized, // CHA invalidated, class is ready for allocation.
};
enum ClassLoadingState {
// Class object is created, but it is not filled up.
Expand Down
3 changes: 2 additions & 1 deletion runtime/vm/stub_code.cc
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,8 @@ CodePtr StubCode::GetAllocationStubForClass(const Class& cls) {
Thread* thread = Thread::Current();
auto object_store = thread->isolate()->object_store();
Zone* zone = thread->zone();
const Error& error = Error::Handle(zone, cls.EnsureIsFinalized(thread));
const Error& error =
Error::Handle(zone, cls.EnsureIsAllocateFinalized(thread));
ASSERT(error.IsNull());
if (cls.id() == kArrayCid) {
return object_store->allocate_array_stub();
Expand Down

0 comments on commit b387ebc

Please sign in to comment.