diff --git a/runtime/vm/canonical_tables.h b/runtime/vm/canonical_tables.h index 310444e5e2f..be90131758b 100644 --- a/runtime/vm/canonical_tables.h +++ b/runtime/vm/canonical_tables.h @@ -342,6 +342,51 @@ class MetadataMapTraits { }; typedef UnorderedHashMap MetadataMap; +class DispatcherKey { + public: + DispatcherKey(const String& name, + const Array& args_desc, + UntaggedFunction::Kind kind) + : name_(name), args_desc_(args_desc), kind_(kind) {} + bool Equals(const Function& other) const { + return (name_.ptr() == other.name()) && + (args_desc_.ptr() == other.saved_args_desc()) && + (kind_ == other.kind()); + } + uword Hash() const { return CombineHashes(name_.Hash(), kind_); } + + private: + const String& name_; + const Array& args_desc_; + UntaggedFunction::Kind kind_; +}; + +class DispatcherTraits { + public: + static const char* Name() { return "DispatcherTraits"; } + static bool ReportStats() { return false; } + + // Called when growing the table. + static bool IsMatch(const Object& a, const Object& b) { + const Function& a_func = Function::Cast(a); + const Function& b_func = Function::Cast(b); + return (a_func.name() == b_func.name()) && + (a_func.kind() == b_func.kind()) && + (a_func.saved_args_desc() == b_func.saved_args_desc()); + } + static bool IsMatch(const DispatcherKey& key, const Object& obj) { + return key.Equals(Function::Cast(obj)); + } + static uword Hash(const Object& key) { + const Function& func = Function::Cast(key); + return CombineHashes(String::Hash(func.name()), func.kind()); + } + static uword Hash(const DispatcherKey& key) { return key.Hash(); } + static ObjectPtr NewKey(const DispatcherKey& key) { UNREACHABLE(); } +}; + +typedef UnorderedHashSet DispatcherSet; + class CanonicalInstanceKey { public: explicit CanonicalInstanceKey(const Instance& key); diff --git a/runtime/vm/compiler/aot/precompiler.cc b/runtime/vm/compiler/aot/precompiler.cc index c6a80e04465..4b946311145 100644 --- a/runtime/vm/compiler/aot/precompiler.cc +++ b/runtime/vm/compiler/aot/precompiler.cc @@ -1945,7 +1945,6 @@ void Precompiler::TraceForRetainedFunctions() { Library& lib = Library::Handle(Z); Class& cls = Class::Handle(Z); Array& functions = Array::Handle(Z); - String& name = String::Handle(Z); Function& function = Function::Handle(Z); Function& function2 = Function::Handle(Z); Array& fields = Array::Handle(Z); @@ -1997,17 +1996,16 @@ void Precompiler::TraceForRetainedFunctions() { } } - { - functions = cls.invocation_dispatcher_cache(); - InvocationDispatcherTable dispatchers(functions); - for (auto dispatcher : dispatchers) { - name = dispatcher.Get(); - if (name.IsNull()) break; // Reached last entry. - function = dispatcher.Get(); + if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) { + DispatcherSet dispatchers(cls.invocation_dispatcher_cache()); + DispatcherSet::Iterator it(&dispatchers); + while (it.MoveNext()) { + function ^= dispatchers.GetKey(it.Current()); if (possibly_retained_functions_.ContainsKey(function)) { AddTypesOf(function); } } + dispatchers.Release(); } } } @@ -2253,9 +2251,6 @@ void Precompiler::DropFunctions() { }; SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock()); - auto& dispatchers_array = Array::Handle(Z); - auto& name = String::Handle(Z); - auto& desc = Array::Handle(Z); for (intptr_t i = 0; i < libraries_.Length(); i++) { lib ^= libraries_.At(i); HANDLESCOPE(T); @@ -2283,30 +2278,32 @@ void Precompiler::DropFunctions() { } retained_functions = GrowableObjectArray::New(); - { - dispatchers_array = cls.invocation_dispatcher_cache(); - InvocationDispatcherTable dispatchers(dispatchers_array); - for (auto dispatcher : dispatchers) { - name = dispatcher.Get(); - if (name.IsNull()) break; // Reached last entry. - desc = dispatcher.Get(); - function = dispatcher.Get(); + if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) { + DispatcherSet dispatchers(Z, cls.invocation_dispatcher_cache()); + DispatcherSet::Iterator it(&dispatchers); + while (it.MoveNext()) { + function ^= dispatchers.GetKey(it.Current()); if (functions_to_retain_.ContainsKey(function)) { - retained_functions.Add(name); - retained_functions.Add(desc); trim_function(function); retained_functions.Add(function); } else { drop_function(function); } } + dispatchers.Release(); } - if (retained_functions.Length() > 0) { - functions = Array::MakeFixedLength(retained_functions); + if (retained_functions.Length() == 0) { + cls.set_invocation_dispatcher_cache(Array::empty_array()); } else { - functions = Object::empty_array().ptr(); + DispatcherSet retained_dispatchers( + Z, HashTables::New(retained_functions.Length(), + Heap::kOld)); + for (intptr_t j = 0; j < retained_functions.Length(); j++) { + function ^= retained_functions.At(j); + retained_dispatchers.Insert(function); + } + cls.set_invocation_dispatcher_cache(retained_dispatchers.Release()); } - cls.set_invocation_dispatcher_cache(functions); } } diff --git a/runtime/vm/compiler/aot/precompiler.h b/runtime/vm/compiler/aot/precompiler.h index 50df9e7d594..243d246f5e4 100644 --- a/runtime/vm/compiler/aot/precompiler.h +++ b/runtime/vm/compiler/aot/precompiler.h @@ -82,32 +82,6 @@ struct FunctionKeyTraits { typedef UnorderedHashSet FunctionSet; -class FieldKeyValueTrait { - public: - // Typedefs needed for the DirectChainedHashMap template. - typedef const Field* Key; - typedef const Field* Value; - typedef const Field* Pair; - - static Key KeyOf(Pair kv) { return kv; } - - static Value ValueOf(Pair kv) { return kv; } - - static inline uword Hash(Key key) { - const TokenPosition token_pos = key->token_pos(); - if (token_pos.IsReal()) { - return token_pos.Hash(); - } - return key->kernel_offset(); - } - - static inline bool IsKeyEqual(Pair pair, Key key) { - return pair->ptr() == key->ptr(); - } -}; - -typedef DirectChainedHashMap FieldSet; - class ClassKeyValueTrait { public: // Typedefs needed for the DirectChainedHashMap template. diff --git a/runtime/vm/compiler/backend/inliner.cc b/runtime/vm/compiler/backend/inliner.cc index 22198161e3e..f1823129811 100644 --- a/runtime/vm/compiler/backend/inliner.cc +++ b/runtime/vm/compiler/backend/inliner.cc @@ -1446,11 +1446,11 @@ class CallSiteInliner : public ValueObject { // When inlined, we add the guarded fields of the callee to the caller's // list of guarded fields. - const ZoneGrowableArray& callee_guarded_fields = - *callee_graph->parsed_function().guarded_fields(); - for (intptr_t i = 0; i < callee_guarded_fields.length(); ++i) { - caller_graph()->parsed_function().AddToGuardedFields( - callee_guarded_fields[i]); + const FieldSet* callee_guarded_fields = + callee_graph->parsed_function().guarded_fields(); + FieldSet::Iterator it = callee_guarded_fields->GetIterator(); + while (const Field** field = it.Next()) { + caller_graph()->parsed_function().AddToGuardedFields(*field); } { diff --git a/runtime/vm/compiler/backend/slot_test.cc b/runtime/vm/compiler/backend/slot_test.cc index 7c1bf8d7bba..9ba035f987f 100644 --- a/runtime/vm/compiler/backend/slot_test.cc +++ b/runtime/vm/compiler/backend/slot_test.cc @@ -82,9 +82,8 @@ TEST_CASE(SlotFromGuardedField) { EXPECT_EQ(kSmiCid, slot1.nullable_cid()); // Check that the field was added (once) to the list of guarded fields. - EXPECT_EQ(1, parsed_function->guarded_fields()->length()); - EXPECT_EQ(parsed_function->guarded_fields()->At(0)->ptr(), - field_clone_1.ptr()); + EXPECT_EQ(1, parsed_function->guarded_fields()->Length()); + EXPECT(parsed_function->guarded_fields()->HasKey(&field_clone_1)); // Change the guarded state of the field to "unknown" - emulating concurrent // modification of the guarded state in mutator) and create a new clone of @@ -99,9 +98,8 @@ TEST_CASE(SlotFromGuardedField) { new (zone) ParsedFunction(thread, dummy_function); const Slot& slot3 = Slot::Get(field_clone_3, parsed_function2); EXPECT_EQ(&slot1, &slot3); - EXPECT_EQ(1, parsed_function2->guarded_fields()->length()); - EXPECT_EQ(parsed_function2->guarded_fields()->At(0)->ptr(), - field_clone_1.ptr()); + EXPECT_EQ(1, parsed_function2->guarded_fields()->Length()); + EXPECT(parsed_function2->guarded_fields()->HasKey(&field_clone_1)); } } // namespace dart diff --git a/runtime/vm/compiler/jit/compiler.cc b/runtime/vm/compiler/jit/compiler.cc index bfcc52cc1e5..393c3704972 100644 --- a/runtime/vm/compiler/jit/compiler.cc +++ b/runtime/vm/compiler/jit/compiler.cc @@ -389,19 +389,19 @@ CodePtr CompileParsedFunctionHelper::FinalizeCompilation( const bool trace_compiler = FLAG_trace_compiler || FLAG_trace_optimizing_compiler; bool code_is_valid = true; - if (!flow_graph->parsed_function().guarded_fields()->is_empty()) { - const ZoneGrowableArray& guarded_fields = - *flow_graph->parsed_function().guarded_fields(); + if (flow_graph->parsed_function().guarded_fields()->Length() != 0) { + const FieldSet* guarded_fields = + flow_graph->parsed_function().guarded_fields(); Field& original = Field::Handle(); - for (intptr_t i = 0; i < guarded_fields.length(); i++) { - const Field& field = *guarded_fields[i]; - ASSERT(!field.IsOriginal()); - original = field.Original(); - if (!field.IsConsistentWith(original)) { + FieldSet::Iterator it = guarded_fields->GetIterator(); + while (const Field** field = it.Next()) { + ASSERT(!(*field)->IsOriginal()); + original = (*field)->Original(); + if (!(*field)->IsConsistentWith(original)) { code_is_valid = false; if (trace_compiler) { THR_Print("--> FAIL: Field %s guarded state changed.", - field.ToCString()); + (*field)->ToCString()); } break; } @@ -444,11 +444,12 @@ CodePtr CompileParsedFunctionHelper::FinalizeCompilation( // to ensure that the code will be deoptimized if they are violated. thread()->compiler_state().cha().RegisterDependencies(code); - const ZoneGrowableArray& guarded_fields = - *flow_graph->parsed_function().guarded_fields(); + const FieldSet* guarded_fields = + flow_graph->parsed_function().guarded_fields(); Field& field = Field::Handle(); - for (intptr_t i = 0; i < guarded_fields.length(); i++) { - field = guarded_fields[i]->Original(); + FieldSet::Iterator it = guarded_fields->GetIterator(); + while (const Field** guarded_field = it.Next()) { + field = (*guarded_field)->Original(); field.RegisterDependentCode(code); } } diff --git a/runtime/vm/compiler/stub_code_compiler_arm.cc b/runtime/vm/compiler/stub_code_compiler_arm.cc index 0975fb73be1..f0b6b15b774 100644 --- a/runtime/vm/compiler/stub_code_compiler_arm.cc +++ b/runtime/vm/compiler/stub_code_compiler_arm.cc @@ -1920,8 +1920,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( classid_t cls_id = target::Class::GetId(cls); ASSERT(cls_id != kIllegalCid); - RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); - // The generated code is different if the class is parameterized. const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0; ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset( @@ -1929,7 +1927,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( const intptr_t instance_size = target::Class::GetInstanceSize(cls); ASSERT(instance_size > 0); - RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); const uword tags = target::MakeTagWordForNewSpaceObject(cls_id, instance_size); @@ -1941,6 +1938,9 @@ void StubCodeCompiler::GenerateAllocationStubForClass( if (!FLAG_use_slow_path && FLAG_inline_alloc && !target::Class::TraceAllocation(cls) && target::SizeFitsInSizeTag(instance_size)) { + RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); + RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); + if (is_cls_parameterized) { if (!IsSameObject(NullObject(), CastHandle(allocat_object_parametrized))) { diff --git a/runtime/vm/compiler/stub_code_compiler_arm64.cc b/runtime/vm/compiler/stub_code_compiler_arm64.cc index de774dbcbd5..1981afeb0a9 100644 --- a/runtime/vm/compiler/stub_code_compiler_arm64.cc +++ b/runtime/vm/compiler/stub_code_compiler_arm64.cc @@ -2221,8 +2221,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( classid_t cls_id = target::Class::GetId(cls); ASSERT(cls_id != kIllegalCid); - RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); - // The generated code is different if the class is parameterized. const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0; ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset( @@ -2230,7 +2228,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( const intptr_t instance_size = target::Class::GetInstanceSize(cls); ASSERT(instance_size > 0); - RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); const uword tags = target::MakeTagWordForNewSpaceObject(cls_id, instance_size); @@ -2243,6 +2240,9 @@ void StubCodeCompiler::GenerateAllocationStubForClass( if (!FLAG_use_slow_path && FLAG_inline_alloc && !target::Class::TraceAllocation(cls) && target::SizeFitsInSizeTag(instance_size)) { + RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); + RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); + if (is_cls_parameterized) { if (!IsSameObject(NullObject(), CastHandle(allocat_object_parametrized))) { diff --git a/runtime/vm/compiler/stub_code_compiler_riscv.cc b/runtime/vm/compiler/stub_code_compiler_riscv.cc index 489eb7dc373..66a58721129 100644 --- a/runtime/vm/compiler/stub_code_compiler_riscv.cc +++ b/runtime/vm/compiler/stub_code_compiler_riscv.cc @@ -2023,8 +2023,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( classid_t cls_id = target::Class::GetId(cls); ASSERT(cls_id != kIllegalCid); - RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); - // The generated code is different if the class is parameterized. const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0; ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset( @@ -2032,7 +2030,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( const intptr_t instance_size = target::Class::GetInstanceSize(cls); ASSERT(instance_size > 0); - RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); const uword tags = target::MakeTagWordForNewSpaceObject(cls_id, instance_size); @@ -2046,6 +2043,8 @@ void StubCodeCompiler::GenerateAllocationStubForClass( if (!FLAG_use_slow_path && FLAG_inline_alloc && !target::Class::TraceAllocation(cls) && target::SizeFitsInSizeTag(instance_size)) { + RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); + RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); if (is_cls_parameterized) { if (!IsSameObject(NullObject(), CastHandle(allocat_object_parametrized))) { diff --git a/runtime/vm/compiler/stub_code_compiler_x64.cc b/runtime/vm/compiler/stub_code_compiler_x64.cc index 321e1613d83..89ee207bb44 100644 --- a/runtime/vm/compiler/stub_code_compiler_x64.cc +++ b/runtime/vm/compiler/stub_code_compiler_x64.cc @@ -2126,8 +2126,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( classid_t cls_id = target::Class::GetId(cls); ASSERT(cls_id != kIllegalCid); - RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); - const intptr_t cls_type_arg_field_offset = target::Class::TypeArgumentsFieldOffset(cls); @@ -2138,9 +2136,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass( const intptr_t instance_size = target::Class::GetInstanceSize(cls); ASSERT(instance_size > 0); - // User-defined classes should always be allocatable in new space. - RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); - const uword tags = target::MakeTagWordForNewSpaceObject(cls_id, instance_size); @@ -2152,6 +2147,9 @@ void StubCodeCompiler::GenerateAllocationStubForClass( if (!FLAG_use_slow_path && FLAG_inline_alloc && !target::Class::TraceAllocation(cls) && target::SizeFitsInSizeTag(instance_size)) { + RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls)); + RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size)); + if (is_cls_parameterized) { if (!IsSameObject(NullObject(), CastHandle(allocat_object_parametrized))) { diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc index 057843c301a..7fc042e077b 100644 --- a/runtime/vm/object.cc +++ b/runtime/vm/object.cc @@ -3739,41 +3739,14 @@ void Class::AddInvocationDispatcher(const String& target_name, auto thread = Thread::Current(); ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter()); - auto zone = thread->zone(); - auto& cache = Array::Handle(zone, invocation_dispatcher_cache()); - InvocationDispatcherTable dispatchers(cache); - intptr_t i = 0; -#if defined(DEBUG) - auto& function = Function::Handle(); -#endif - for (auto entry : dispatchers) { - if (entry.Get() == String::null()) { - break; - } + ASSERT(target_name.ptr() == dispatcher.name()); -#if defined(DEBUG) - // Check for duplicate entries in the cache. - function = entry.Get(); - ASSERT(entry.Get() != target_name.ptr() || - function.kind() != dispatcher.kind() || - entry.Get() != args_desc.ptr()); -#endif // defined(DEBUG) - i++; - } - if (i == dispatchers.Length()) { - const intptr_t new_len = - cache.Length() == 0 - ? static_cast(Class::kInvocationDispatcherEntrySize) - : cache.Length() * 2; - cache = Array::Grow(cache, new_len); - set_invocation_dispatcher_cache(cache); - } - // Ensure all stores are visible at the point the name is visible. - auto entry = dispatchers[i]; - entry.Set(args_desc); - entry.Set(dispatcher); - entry.Set( - target_name); + DispatcherSet dispatchers(invocation_dispatcher_cache() == + Array::empty_array().ptr() + ? HashTables::New(4, Heap::kOld) + : invocation_dispatcher_cache()); + dispatchers.Insert(dispatcher); + set_invocation_dispatcher_cache(dispatchers.Release()); } FunctionPtr Class::GetInvocationDispatcher(const String& target_name, @@ -3786,32 +3759,14 @@ FunctionPtr Class::GetInvocationDispatcher(const String& target_name, auto thread = Thread::Current(); auto Z = thread->zone(); auto& function = Function::Handle(Z); - auto& name = String::Handle(Z); - auto& desc = Array::Handle(Z); - auto& cache = Array::Handle(Z); - - auto find_entry = [&]() { - cache = invocation_dispatcher_cache(); - ASSERT(!cache.IsNull()); - InvocationDispatcherTable dispatchers(cache); - for (auto dispatcher : dispatchers) { - // Ensure all loads are done after loading the name. - name = dispatcher.Get(); - if (name.IsNull()) break; // Reached last entry. - if (!name.Equals(target_name)) continue; - desc = dispatcher.Get(); - if (desc.ptr() != args_desc.ptr()) continue; - function = dispatcher.Get(); - if (function.kind() == kind) { - return function.ptr(); - } - } - return Function::null(); - }; // First we'll try to find it without using locks. - function = find_entry(); + DispatcherKey key(target_name, args_desc, kind); + if (invocation_dispatcher_cache() != Array::empty_array().ptr()) { + DispatcherSet dispatchers(Z, invocation_dispatcher_cache()); + function ^= dispatchers.GetOrNull(key); + dispatchers.Release(); + } if (!function.IsNull() || !create_if_absent) { return function.ptr(); } @@ -3820,7 +3775,11 @@ FunctionPtr Class::GetInvocationDispatcher(const String& target_name, SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock()); // Try to find it again & return if it was added in the meantime. - function = find_entry(); + if (invocation_dispatcher_cache() != Array::empty_array().ptr()) { + DispatcherSet dispatchers(Z, invocation_dispatcher_cache()); + function ^= dispatchers.GetOrNull(key); + dispatchers.Release(); + } if (!function.IsNull()) return function.ptr(); // Otherwise create it & add it. @@ -7832,11 +7791,12 @@ void Function::set_extracted_method_closure(const Function& value) const { } ArrayPtr Function::saved_args_desc() const { + if (kind() == UntaggedFunction::kDynamicInvocationForwarder) { + return Array::null(); + } ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher || kind() == UntaggedFunction::kInvokeFieldDispatcher); - const Object& obj = Object::Handle(untag()->data()); - ASSERT(obj.IsArray()); - return Array::Cast(obj).ptr(); + return Array::RawCast(untag()->data()); } void Function::set_saved_args_desc(const Array& value) const { @@ -11042,12 +11002,10 @@ FieldPtr Field::Original() const { if (IsNull()) { return Field::null(); } - Object& obj = Object::Handle(untag()->owner()); - if (obj.IsField()) { - return Field::RawCast(obj.ptr()); - } else { - return this->ptr(); + if (untag()->owner()->IsField()) { + return static_cast(untag()->owner()); } + return this->ptr(); } intptr_t Field::guarded_cid() const { @@ -16199,12 +16157,10 @@ ICDataPtr ICData::Original() const { if (IsNull()) { return ICData::null(); } - Object& obj = Object::Handle(untag()->owner()); - if (obj.IsFunction()) { - return this->ptr(); - } else { - return ICData::RawCast(obj.ptr()); + if (untag()->owner()->IsICData()) { + return static_cast(untag()->owner()); } + return this->ptr(); } void ICData::SetOriginal(const ICData& value) const { diff --git a/runtime/vm/object.h b/runtime/vm/object.h index ac6664a4170..c735405ba6a 100644 --- a/runtime/vm/object.h +++ b/runtime/vm/object.h @@ -13147,10 +13147,6 @@ class ArrayOfTuplesView { const Array& array_; }; -using InvocationDispatcherTable = - ArrayOfTuplesView>; - using StaticCallsTable = ArrayOfTuplesView>; diff --git a/runtime/vm/parser.cc b/runtime/vm/parser.cc index fe170ed97e3..d645daf8fa8 100644 --- a/runtime/vm/parser.cc +++ b/runtime/vm/parser.cc @@ -60,7 +60,7 @@ ParsedFunction::ParsedFunction(Thread* thread, const Function& function) entry_points_temp_var_(NULL), finally_return_temp_var_(NULL), dynamic_closure_call_vars_(nullptr), - guarded_fields_(new ZoneGrowableArray()), + guarded_fields_(), default_parameter_values_(NULL), raw_type_arguments_var_(NULL), first_parameter_index_(), @@ -93,21 +93,19 @@ void ParsedFunction::AddToGuardedFields(const Field* field) const { return; } - for (intptr_t j = 0; j < guarded_fields_->length(); j++) { - const Field* other = (*guarded_fields_)[j]; - if (field->Original() == other->Original()) { - // Abort background compilation early if the guarded state of this field - // has changed during compilation. We will not be able to commit - // the resulting code anyway. - if (Compiler::IsBackgroundCompilation()) { - if (!other->IsConsistentWith(*field)) { - Compiler::AbortBackgroundCompilation( - DeoptId::kNone, - "Field's guarded state changed during compilation"); - } + const Field** other = guarded_fields_.Lookup(field); + if (other != nullptr) { + ASSERT(field->Original() == (*other)->Original()); + // Abort background compilation early if the guarded state of this field + // has changed during compilation. We will not be able to commit + // the resulting code anyway. + if (Compiler::IsBackgroundCompilation()) { + if (!(*other)->IsConsistentWith(*field)) { + Compiler::AbortBackgroundCompilation( + DeoptId::kNone, "Field's guarded state changed during compilation"); } - return; } + return; } // Note: the list of guarded fields must contain copies during optimizing @@ -116,7 +114,7 @@ void ParsedFunction::AddToGuardedFields(const Field* field) const { // inlining. ASSERT(field->IsOriginal() == !CompilerState::Current().should_clone_fields()); - guarded_fields_->Add(&Field::ZoneHandle(Z, field->ptr())); + guarded_fields_.Insert(&Field::ZoneHandle(Z, field->ptr())); } void ParsedFunction::Bailout(const char* origin, const char* reason) const { diff --git a/runtime/vm/parser.h b/runtime/vm/parser.h index 8790f3b19a0..66a4b39fc9d 100644 --- a/runtime/vm/parser.h +++ b/runtime/vm/parser.h @@ -39,6 +39,32 @@ template class GrowableArray; class Parser; +class FieldKeyValueTrait { + public: + // Typedefs needed for the DirectChainedHashMap template. + typedef const Field* Key; + typedef const Field* Value; + typedef const Field* Pair; + + static Key KeyOf(Pair kv) { return kv; } + + static Value ValueOf(Pair kv) { return kv; } + + static inline uword Hash(Key key) { + const TokenPosition token_pos = key->token_pos(); + if (token_pos.IsReal()) { + return token_pos.Hash(); + } + return key->kernel_offset(); + } + + static inline bool IsKeyEqual(Pair pair, Key key) { + return pair->Original() == key->Original(); + } +}; + +typedef DirectChainedHashMap FieldSet; + // The class ParsedFunction holds the result of parsing a function. class ParsedFunction : public ZoneAllocated { public: @@ -153,9 +179,7 @@ class ParsedFunction : public ZoneAllocated { LocalVariable* EnsureExpressionTemp(); LocalVariable* EnsureEntryPointsTemp(); - ZoneGrowableArray* guarded_fields() const { - return guarded_fields_; - } + const FieldSet* guarded_fields() const { return &guarded_fields_; } VariableIndex first_parameter_index() const { return first_parameter_index_; } int num_stack_locals() const { return num_stack_locals_; } @@ -274,7 +298,7 @@ class ParsedFunction : public ZoneAllocated { LocalVariable* entry_points_temp_var_; LocalVariable* finally_return_temp_var_; DynamicClosureCallVars* dynamic_closure_call_vars_; - ZoneGrowableArray* guarded_fields_; + mutable FieldSet guarded_fields_; ZoneGrowableArray* default_parameter_values_; LocalVariable* raw_type_arguments_var_;