[vm] Fix various scaling limitations.

- Quadratic behavior recording field guards
 - Quadratic behavior looking up dynamic invocation forwarders
 - Incorrect limit generating allocation stubs

TEST=class with 64k fields
Change-Id: I7a9aba617e71eff9b28711e99c689cb04be0dde7
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/273285
Reviewed-by: Alexander Aprelev <aam@google.com>
Commit-Queue: Ryan Macnak <rmacnak@google.com>
This commit is contained in:
Ryan Macnak 2022-12-13 19:28:37 +00:00 committed by Commit Queue
parent 93c3737674
commit fa2e1c70ca
14 changed files with 170 additions and 184 deletions

View file

@ -342,6 +342,51 @@ class MetadataMapTraits {
};
typedef UnorderedHashMap<MetadataMapTraits> MetadataMap;
class DispatcherKey {
public:
DispatcherKey(const String& name,
const Array& args_desc,
UntaggedFunction::Kind kind)
: name_(name), args_desc_(args_desc), kind_(kind) {}
bool Equals(const Function& other) const {
return (name_.ptr() == other.name()) &&
(args_desc_.ptr() == other.saved_args_desc()) &&
(kind_ == other.kind());
}
uword Hash() const { return CombineHashes(name_.Hash(), kind_); }
private:
const String& name_;
const Array& args_desc_;
UntaggedFunction::Kind kind_;
};
class DispatcherTraits {
public:
static const char* Name() { return "DispatcherTraits"; }
static bool ReportStats() { return false; }
// Called when growing the table.
static bool IsMatch(const Object& a, const Object& b) {
const Function& a_func = Function::Cast(a);
const Function& b_func = Function::Cast(b);
return (a_func.name() == b_func.name()) &&
(a_func.kind() == b_func.kind()) &&
(a_func.saved_args_desc() == b_func.saved_args_desc());
}
static bool IsMatch(const DispatcherKey& key, const Object& obj) {
return key.Equals(Function::Cast(obj));
}
static uword Hash(const Object& key) {
const Function& func = Function::Cast(key);
return CombineHashes(String::Hash(func.name()), func.kind());
}
static uword Hash(const DispatcherKey& key) { return key.Hash(); }
static ObjectPtr NewKey(const DispatcherKey& key) { UNREACHABLE(); }
};
typedef UnorderedHashSet<DispatcherTraits, AcqRelStorageTraits> DispatcherSet;
class CanonicalInstanceKey {
public:
explicit CanonicalInstanceKey(const Instance& key);

View file

@ -1945,7 +1945,6 @@ void Precompiler::TraceForRetainedFunctions() {
Library& lib = Library::Handle(Z);
Class& cls = Class::Handle(Z);
Array& functions = Array::Handle(Z);
String& name = String::Handle(Z);
Function& function = Function::Handle(Z);
Function& function2 = Function::Handle(Z);
Array& fields = Array::Handle(Z);
@ -1997,17 +1996,16 @@ void Precompiler::TraceForRetainedFunctions() {
}
}
{
functions = cls.invocation_dispatcher_cache();
InvocationDispatcherTable dispatchers(functions);
for (auto dispatcher : dispatchers) {
name = dispatcher.Get<Class::kInvocationDispatcherName>();
if (name.IsNull()) break; // Reached last entry.
function = dispatcher.Get<Class::kInvocationDispatcherFunction>();
if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) {
DispatcherSet dispatchers(cls.invocation_dispatcher_cache());
DispatcherSet::Iterator it(&dispatchers);
while (it.MoveNext()) {
function ^= dispatchers.GetKey(it.Current());
if (possibly_retained_functions_.ContainsKey(function)) {
AddTypesOf(function);
}
}
dispatchers.Release();
}
}
}
@ -2253,9 +2251,6 @@ void Precompiler::DropFunctions() {
};
SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
auto& dispatchers_array = Array::Handle(Z);
auto& name = String::Handle(Z);
auto& desc = Array::Handle(Z);
for (intptr_t i = 0; i < libraries_.Length(); i++) {
lib ^= libraries_.At(i);
HANDLESCOPE(T);
@ -2283,30 +2278,32 @@ void Precompiler::DropFunctions() {
}
retained_functions = GrowableObjectArray::New();
{
dispatchers_array = cls.invocation_dispatcher_cache();
InvocationDispatcherTable dispatchers(dispatchers_array);
for (auto dispatcher : dispatchers) {
name = dispatcher.Get<Class::kInvocationDispatcherName>();
if (name.IsNull()) break; // Reached last entry.
desc = dispatcher.Get<Class::kInvocationDispatcherArgsDesc>();
function = dispatcher.Get<Class::kInvocationDispatcherFunction>();
if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) {
DispatcherSet dispatchers(Z, cls.invocation_dispatcher_cache());
DispatcherSet::Iterator it(&dispatchers);
while (it.MoveNext()) {
function ^= dispatchers.GetKey(it.Current());
if (functions_to_retain_.ContainsKey(function)) {
retained_functions.Add(name);
retained_functions.Add(desc);
trim_function(function);
retained_functions.Add(function);
} else {
drop_function(function);
}
}
dispatchers.Release();
}
if (retained_functions.Length() > 0) {
functions = Array::MakeFixedLength(retained_functions);
if (retained_functions.Length() == 0) {
cls.set_invocation_dispatcher_cache(Array::empty_array());
} else {
functions = Object::empty_array().ptr();
DispatcherSet retained_dispatchers(
Z, HashTables::New<DispatcherSet>(retained_functions.Length(),
Heap::kOld));
for (intptr_t j = 0; j < retained_functions.Length(); j++) {
function ^= retained_functions.At(j);
retained_dispatchers.Insert(function);
}
cls.set_invocation_dispatcher_cache(retained_dispatchers.Release());
}
cls.set_invocation_dispatcher_cache(functions);
}
}

View file

@ -82,32 +82,6 @@ struct FunctionKeyTraits {
typedef UnorderedHashSet<FunctionKeyTraits> FunctionSet;
class FieldKeyValueTrait {
public:
// Typedefs needed for the DirectChainedHashMap template.
typedef const Field* Key;
typedef const Field* Value;
typedef const Field* Pair;
static Key KeyOf(Pair kv) { return kv; }
static Value ValueOf(Pair kv) { return kv; }
static inline uword Hash(Key key) {
const TokenPosition token_pos = key->token_pos();
if (token_pos.IsReal()) {
return token_pos.Hash();
}
return key->kernel_offset();
}
static inline bool IsKeyEqual(Pair pair, Key key) {
return pair->ptr() == key->ptr();
}
};
typedef DirectChainedHashMap<FieldKeyValueTrait> FieldSet;
class ClassKeyValueTrait {
public:
// Typedefs needed for the DirectChainedHashMap template.

View file

@ -1446,11 +1446,11 @@ class CallSiteInliner : public ValueObject {
// When inlined, we add the guarded fields of the callee to the caller's
// list of guarded fields.
const ZoneGrowableArray<const Field*>& callee_guarded_fields =
*callee_graph->parsed_function().guarded_fields();
for (intptr_t i = 0; i < callee_guarded_fields.length(); ++i) {
caller_graph()->parsed_function().AddToGuardedFields(
callee_guarded_fields[i]);
const FieldSet* callee_guarded_fields =
callee_graph->parsed_function().guarded_fields();
FieldSet::Iterator it = callee_guarded_fields->GetIterator();
while (const Field** field = it.Next()) {
caller_graph()->parsed_function().AddToGuardedFields(*field);
}
{

View file

@ -82,9 +82,8 @@ TEST_CASE(SlotFromGuardedField) {
EXPECT_EQ(kSmiCid, slot1.nullable_cid());
// Check that the field was added (once) to the list of guarded fields.
EXPECT_EQ(1, parsed_function->guarded_fields()->length());
EXPECT_EQ(parsed_function->guarded_fields()->At(0)->ptr(),
field_clone_1.ptr());
EXPECT_EQ(1, parsed_function->guarded_fields()->Length());
EXPECT(parsed_function->guarded_fields()->HasKey(&field_clone_1));
// Change the guarded state of the field to "unknown" - emulating concurrent
// modification of the guarded state in mutator) and create a new clone of
@ -99,9 +98,8 @@ TEST_CASE(SlotFromGuardedField) {
new (zone) ParsedFunction(thread, dummy_function);
const Slot& slot3 = Slot::Get(field_clone_3, parsed_function2);
EXPECT_EQ(&slot1, &slot3);
EXPECT_EQ(1, parsed_function2->guarded_fields()->length());
EXPECT_EQ(parsed_function2->guarded_fields()->At(0)->ptr(),
field_clone_1.ptr());
EXPECT_EQ(1, parsed_function2->guarded_fields()->Length());
EXPECT(parsed_function2->guarded_fields()->HasKey(&field_clone_1));
}
} // namespace dart

View file

@ -389,19 +389,19 @@ CodePtr CompileParsedFunctionHelper::FinalizeCompilation(
const bool trace_compiler =
FLAG_trace_compiler || FLAG_trace_optimizing_compiler;
bool code_is_valid = true;
if (!flow_graph->parsed_function().guarded_fields()->is_empty()) {
const ZoneGrowableArray<const Field*>& guarded_fields =
*flow_graph->parsed_function().guarded_fields();
if (flow_graph->parsed_function().guarded_fields()->Length() != 0) {
const FieldSet* guarded_fields =
flow_graph->parsed_function().guarded_fields();
Field& original = Field::Handle();
for (intptr_t i = 0; i < guarded_fields.length(); i++) {
const Field& field = *guarded_fields[i];
ASSERT(!field.IsOriginal());
original = field.Original();
if (!field.IsConsistentWith(original)) {
FieldSet::Iterator it = guarded_fields->GetIterator();
while (const Field** field = it.Next()) {
ASSERT(!(*field)->IsOriginal());
original = (*field)->Original();
if (!(*field)->IsConsistentWith(original)) {
code_is_valid = false;
if (trace_compiler) {
THR_Print("--> FAIL: Field %s guarded state changed.",
field.ToCString());
(*field)->ToCString());
}
break;
}
@ -444,11 +444,12 @@ CodePtr CompileParsedFunctionHelper::FinalizeCompilation(
// to ensure that the code will be deoptimized if they are violated.
thread()->compiler_state().cha().RegisterDependencies(code);
const ZoneGrowableArray<const Field*>& guarded_fields =
*flow_graph->parsed_function().guarded_fields();
const FieldSet* guarded_fields =
flow_graph->parsed_function().guarded_fields();
Field& field = Field::Handle();
for (intptr_t i = 0; i < guarded_fields.length(); i++) {
field = guarded_fields[i]->Original();
FieldSet::Iterator it = guarded_fields->GetIterator();
while (const Field** guarded_field = it.Next()) {
field = (*guarded_field)->Original();
field.RegisterDependentCode(code);
}
}

View file

@ -1920,8 +1920,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
classid_t cls_id = target::Class::GetId(cls);
ASSERT(cls_id != kIllegalCid);
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
// The generated code is different if the class is parameterized.
const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0;
ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset(
@ -1929,7 +1927,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
const uword tags =
target::MakeTagWordForNewSpaceObject(cls_id, instance_size);
@ -1941,6 +1938,9 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
if (!FLAG_use_slow_path && FLAG_inline_alloc &&
!target::Class::TraceAllocation(cls) &&
target::SizeFitsInSizeTag(instance_size)) {
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
if (is_cls_parameterized) {
if (!IsSameObject(NullObject(),
CastHandle<Object>(allocat_object_parametrized))) {

View file

@ -2221,8 +2221,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
classid_t cls_id = target::Class::GetId(cls);
ASSERT(cls_id != kIllegalCid);
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
// The generated code is different if the class is parameterized.
const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0;
ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset(
@ -2230,7 +2228,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
const uword tags =
target::MakeTagWordForNewSpaceObject(cls_id, instance_size);
@ -2243,6 +2240,9 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
if (!FLAG_use_slow_path && FLAG_inline_alloc &&
!target::Class::TraceAllocation(cls) &&
target::SizeFitsInSizeTag(instance_size)) {
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
if (is_cls_parameterized) {
if (!IsSameObject(NullObject(),
CastHandle<Object>(allocat_object_parametrized))) {

View file

@ -2023,8 +2023,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
classid_t cls_id = target::Class::GetId(cls);
ASSERT(cls_id != kIllegalCid);
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
// The generated code is different if the class is parameterized.
const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0;
ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset(
@ -2032,7 +2030,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
const uword tags =
target::MakeTagWordForNewSpaceObject(cls_id, instance_size);
@ -2046,6 +2043,8 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
if (!FLAG_use_slow_path && FLAG_inline_alloc &&
!target::Class::TraceAllocation(cls) &&
target::SizeFitsInSizeTag(instance_size)) {
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
if (is_cls_parameterized) {
if (!IsSameObject(NullObject(),
CastHandle<Object>(allocat_object_parametrized))) {

View file

@ -2126,8 +2126,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
classid_t cls_id = target::Class::GetId(cls);
ASSERT(cls_id != kIllegalCid);
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
const intptr_t cls_type_arg_field_offset =
target::Class::TypeArgumentsFieldOffset(cls);
@ -2138,9 +2136,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
// User-defined classes should always be allocatable in new space.
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
const uword tags =
target::MakeTagWordForNewSpaceObject(cls_id, instance_size);
@ -2152,6 +2147,9 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
if (!FLAG_use_slow_path && FLAG_inline_alloc &&
!target::Class::TraceAllocation(cls) &&
target::SizeFitsInSizeTag(instance_size)) {
RELEASE_ASSERT(AllocateObjectInstr::WillAllocateNewOrRemembered(cls));
RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
if (is_cls_parameterized) {
if (!IsSameObject(NullObject(),
CastHandle<Object>(allocat_object_parametrized))) {

View file

@ -3739,41 +3739,14 @@ void Class::AddInvocationDispatcher(const String& target_name,
auto thread = Thread::Current();
ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadWriter());
auto zone = thread->zone();
auto& cache = Array::Handle(zone, invocation_dispatcher_cache());
InvocationDispatcherTable dispatchers(cache);
intptr_t i = 0;
#if defined(DEBUG)
auto& function = Function::Handle();
#endif
for (auto entry : dispatchers) {
if (entry.Get<kInvocationDispatcherName>() == String::null()) {
break;
}
ASSERT(target_name.ptr() == dispatcher.name());
#if defined(DEBUG)
// Check for duplicate entries in the cache.
function = entry.Get<kInvocationDispatcherFunction>();
ASSERT(entry.Get<kInvocationDispatcherName>() != target_name.ptr() ||
function.kind() != dispatcher.kind() ||
entry.Get<kInvocationDispatcherArgsDesc>() != args_desc.ptr());
#endif // defined(DEBUG)
i++;
}
if (i == dispatchers.Length()) {
const intptr_t new_len =
cache.Length() == 0
? static_cast<intptr_t>(Class::kInvocationDispatcherEntrySize)
: cache.Length() * 2;
cache = Array::Grow(cache, new_len);
set_invocation_dispatcher_cache(cache);
}
// Ensure all stores are visible at the point the name is visible.
auto entry = dispatchers[i];
entry.Set<Class::kInvocationDispatcherArgsDesc>(args_desc);
entry.Set<Class::kInvocationDispatcherFunction>(dispatcher);
entry.Set<Class::kInvocationDispatcherName, std::memory_order_release>(
target_name);
DispatcherSet dispatchers(invocation_dispatcher_cache() ==
Array::empty_array().ptr()
? HashTables::New<DispatcherSet>(4, Heap::kOld)
: invocation_dispatcher_cache());
dispatchers.Insert(dispatcher);
set_invocation_dispatcher_cache(dispatchers.Release());
}
FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
@ -3786,32 +3759,14 @@ FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
auto thread = Thread::Current();
auto Z = thread->zone();
auto& function = Function::Handle(Z);
auto& name = String::Handle(Z);
auto& desc = Array::Handle(Z);
auto& cache = Array::Handle(Z);
auto find_entry = [&]() {
cache = invocation_dispatcher_cache();
ASSERT(!cache.IsNull());
InvocationDispatcherTable dispatchers(cache);
for (auto dispatcher : dispatchers) {
// Ensure all loads are done after loading the name.
name = dispatcher.Get<Class::kInvocationDispatcherName,
std::memory_order_acquire>();
if (name.IsNull()) break; // Reached last entry.
if (!name.Equals(target_name)) continue;
desc = dispatcher.Get<Class::kInvocationDispatcherArgsDesc>();
if (desc.ptr() != args_desc.ptr()) continue;
function = dispatcher.Get<Class::kInvocationDispatcherFunction>();
if (function.kind() == kind) {
return function.ptr();
}
}
return Function::null();
};
// First we'll try to find it without using locks.
function = find_entry();
DispatcherKey key(target_name, args_desc, kind);
if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
function ^= dispatchers.GetOrNull(key);
dispatchers.Release();
}
if (!function.IsNull() || !create_if_absent) {
return function.ptr();
}
@ -3820,7 +3775,11 @@ FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
// Try to find it again & return if it was added in the meantime.
function = find_entry();
if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
function ^= dispatchers.GetOrNull(key);
dispatchers.Release();
}
if (!function.IsNull()) return function.ptr();
// Otherwise create it & add it.
@ -7832,11 +7791,12 @@ void Function::set_extracted_method_closure(const Function& value) const {
}
ArrayPtr Function::saved_args_desc() const {
if (kind() == UntaggedFunction::kDynamicInvocationForwarder) {
return Array::null();
}
ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
kind() == UntaggedFunction::kInvokeFieldDispatcher);
const Object& obj = Object::Handle(untag()->data());
ASSERT(obj.IsArray());
return Array::Cast(obj).ptr();
return Array::RawCast(untag()->data());
}
void Function::set_saved_args_desc(const Array& value) const {
@ -11042,12 +11002,10 @@ FieldPtr Field::Original() const {
if (IsNull()) {
return Field::null();
}
Object& obj = Object::Handle(untag()->owner());
if (obj.IsField()) {
return Field::RawCast(obj.ptr());
} else {
return this->ptr();
if (untag()->owner()->IsField()) {
return static_cast<FieldPtr>(untag()->owner());
}
return this->ptr();
}
intptr_t Field::guarded_cid() const {
@ -16199,12 +16157,10 @@ ICDataPtr ICData::Original() const {
if (IsNull()) {
return ICData::null();
}
Object& obj = Object::Handle(untag()->owner());
if (obj.IsFunction()) {
return this->ptr();
} else {
return ICData::RawCast(obj.ptr());
if (untag()->owner()->IsICData()) {
return static_cast<ICDataPtr>(untag()->owner());
}
return this->ptr();
}
void ICData::SetOriginal(const ICData& value) const {

View file

@ -13147,10 +13147,6 @@ class ArrayOfTuplesView {
const Array& array_;
};
using InvocationDispatcherTable =
ArrayOfTuplesView<Class::InvocationDispatcherEntry,
std::tuple<String, Array, Function>>;
using StaticCallsTable =
ArrayOfTuplesView<Code::SCallTableEntry, std::tuple<Smi, Object, Function>>;

View file

@ -60,7 +60,7 @@ ParsedFunction::ParsedFunction(Thread* thread, const Function& function)
entry_points_temp_var_(NULL),
finally_return_temp_var_(NULL),
dynamic_closure_call_vars_(nullptr),
guarded_fields_(new ZoneGrowableArray<const Field*>()),
guarded_fields_(),
default_parameter_values_(NULL),
raw_type_arguments_var_(NULL),
first_parameter_index_(),
@ -93,21 +93,19 @@ void ParsedFunction::AddToGuardedFields(const Field* field) const {
return;
}
for (intptr_t j = 0; j < guarded_fields_->length(); j++) {
const Field* other = (*guarded_fields_)[j];
if (field->Original() == other->Original()) {
// Abort background compilation early if the guarded state of this field
// has changed during compilation. We will not be able to commit
// the resulting code anyway.
if (Compiler::IsBackgroundCompilation()) {
if (!other->IsConsistentWith(*field)) {
Compiler::AbortBackgroundCompilation(
DeoptId::kNone,
"Field's guarded state changed during compilation");
}
const Field** other = guarded_fields_.Lookup(field);
if (other != nullptr) {
ASSERT(field->Original() == (*other)->Original());
// Abort background compilation early if the guarded state of this field
// has changed during compilation. We will not be able to commit
// the resulting code anyway.
if (Compiler::IsBackgroundCompilation()) {
if (!(*other)->IsConsistentWith(*field)) {
Compiler::AbortBackgroundCompilation(
DeoptId::kNone, "Field's guarded state changed during compilation");
}
return;
}
return;
}
// Note: the list of guarded fields must contain copies during optimizing
@ -116,7 +114,7 @@ void ParsedFunction::AddToGuardedFields(const Field* field) const {
// inlining.
ASSERT(field->IsOriginal() ==
!CompilerState::Current().should_clone_fields());
guarded_fields_->Add(&Field::ZoneHandle(Z, field->ptr()));
guarded_fields_.Insert(&Field::ZoneHandle(Z, field->ptr()));
}
void ParsedFunction::Bailout(const char* origin, const char* reason) const {

View file

@ -39,6 +39,32 @@ template <typename T>
class GrowableArray;
class Parser;
class FieldKeyValueTrait {
public:
// Typedefs needed for the DirectChainedHashMap template.
typedef const Field* Key;
typedef const Field* Value;
typedef const Field* Pair;
static Key KeyOf(Pair kv) { return kv; }
static Value ValueOf(Pair kv) { return kv; }
static inline uword Hash(Key key) {
const TokenPosition token_pos = key->token_pos();
if (token_pos.IsReal()) {
return token_pos.Hash();
}
return key->kernel_offset();
}
static inline bool IsKeyEqual(Pair pair, Key key) {
return pair->Original() == key->Original();
}
};
typedef DirectChainedHashMap<FieldKeyValueTrait> FieldSet;
// The class ParsedFunction holds the result of parsing a function.
class ParsedFunction : public ZoneAllocated {
public:
@ -153,9 +179,7 @@ class ParsedFunction : public ZoneAllocated {
LocalVariable* EnsureExpressionTemp();
LocalVariable* EnsureEntryPointsTemp();
ZoneGrowableArray<const Field*>* guarded_fields() const {
return guarded_fields_;
}
const FieldSet* guarded_fields() const { return &guarded_fields_; }
VariableIndex first_parameter_index() const { return first_parameter_index_; }
int num_stack_locals() const { return num_stack_locals_; }
@ -274,7 +298,7 @@ class ParsedFunction : public ZoneAllocated {
LocalVariable* entry_points_temp_var_;
LocalVariable* finally_return_temp_var_;
DynamicClosureCallVars* dynamic_closure_call_vars_;
ZoneGrowableArray<const Field*>* guarded_fields_;
mutable FieldSet guarded_fields_;
ZoneGrowableArray<const Instance*>* default_parameter_values_;
LocalVariable* raw_type_arguments_var_;