[vm] Type testing stubs for record types

Benchmarks improved by:
JIT:
RecordCollections.ListAddPoly.Record ~9.6 times
RecordCollections.ListSetIndexed.Record ~8.3 times
RecordCollections.ListSetIndexedPoly.Record ~16.1 times
RecordCollections.MapAdd.RecordKey ~3.6 times
RecordCollections.MapAdd.RecordValue ~4.4 times
RecordCollections.SetAdd.Record ~4.1 times
AOT:
RecordCollections.ListAdd.Record ~8.7 times
RecordCollections.ListAddPoly.Record ~8.8 times
RecordCollections.ListSetIndexed.Record ~19.5 times
RecordCollections.ListSetIndexedPoly.Record ~18.3 times
RecordCollections.MapAdd.RecordKey ~7.1 times
RecordCollections.MapAdd.RecordValue ~8.8 times
RecordCollections.SetAdd.Record ~8.9 times

TEST=vm/cc/TTS_RecordSubtypeRangeCheck

Issue: https://github.com/dart-lang/sdk/issues/49719
Change-Id: I96e4c6a40a897a72b3570b59e014c8af77a9ef99
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/275082
Reviewed-by: Tess Strickland <sstrickl@google.com>
Commit-Queue: Alexander Markov <alexmarkov@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Alexander Markov 2022-12-14 22:33:38 +00:00 committed by Commit Queue
parent 8a37b70a53
commit 987775d6fb
5 changed files with 215 additions and 23 deletions

View file

@ -389,6 +389,33 @@ bool HierarchyInfo::CanUseGenericSubtypeRangeCheckFor(
return true;
}
bool HierarchyInfo::CanUseRecordSubtypeRangeCheckFor(const AbstractType& type) {
ASSERT(type.IsFinalized());
if (!type.IsRecordType()) {
return false;
}
const RecordType& rec = RecordType::Cast(type);
// Type testing stubs have no access to their object pools
// so they will not be able to load field names from object pool
// in order to check the shape of a record instance.
// See TypeTestingStubGenerator::BuildOptimizedRecordSubtypeRangeCheck.
if (rec.NumNamedFields() != 0) {
return false;
} else {
ASSERT(rec.field_names() == Object::empty_array().ptr());
ASSERT(compiler::target::CanLoadFromThread(Object::empty_array()));
}
Zone* zone = thread()->zone();
auto& field_type = AbstractType::Handle(zone);
for (intptr_t i = 0, n = rec.NumFields(); i < n; ++i) {
field_type = rec.FieldTypeAt(i);
if (!CanUseSubtypeRangeCheckFor(field_type)) {
return false;
}
}
return true;
}
bool HierarchyInfo::InstanceOfHasClassRange(const AbstractType& type,
intptr_t* lower_limit,
intptr_t* upper_limit) {

View file

@ -305,6 +305,10 @@ class HierarchyInfo : public ThreadStackResource {
// false.
bool CanUseGenericSubtypeRangeCheckFor(const AbstractType& type);
// Returns `true` if [type] is a record type which fields can be tested using
// simple [CidRange]-based subtype-check.
bool CanUseRecordSubtypeRangeCheckFor(const AbstractType& type);
private:
// Does not use any hierarchy information available in the system but computes
// it via O(n) class table traversal.

View file

@ -79,6 +79,23 @@ void TypeTestingStubNamer::StringifyTypeTo(BaseTextBuffer* buffer,
}
} else if (type.IsTypeParameter()) {
buffer->AddString(TypeParameter::Cast(type).CanonicalNameCString());
} else if (type.IsRecordType()) {
const RecordType& rec = RecordType::Cast(type);
buffer->AddString("Record");
const intptr_t num_fields = rec.NumFields();
const intptr_t num_positional_fields = rec.NumPositionalFields();
const auto& field_names = Array::Handle(rec.field_names());
const auto& field_types = Array::Handle(rec.field_types());
for (intptr_t i = 0; i < num_fields; ++i) {
buffer->AddString("__");
type_ ^= field_types.At(i);
StringifyTypeTo(buffer, type_);
if (i >= num_positional_fields) {
buffer->AddString("_");
string_ ^= field_names.At(i - num_positional_fields);
buffer->AddString(string_.ToCString());
}
}
} else {
buffer->AddString(type.ToCString());
}
@ -129,13 +146,13 @@ CodePtr TypeTestingStubGenerator::DefaultCodeForType(
}
}
if (type.IsFunctionType() || type.IsRecordType()) {
if (type.IsFunctionType()) {
const bool nullable = Instance::NullIsAssignableTo(type);
return nullable ? StubCode::DefaultNullableTypeTest().ptr()
: StubCode::DefaultTypeTest().ptr();
}
if (type.IsType()) {
if (type.IsType() || type.IsRecordType()) {
const bool should_specialize = !FLAG_precompiled_mode && lazy_specialize;
const bool nullable = Instance::NullIsAssignableTo(type);
if (should_specialize) {
@ -177,10 +194,10 @@ CodePtr TypeTestingStubGenerator::OptimizedCodeForType(
}
if (type.IsCanonical()) {
if (type.IsType()) {
if (type.IsType() || type.IsRecordType()) {
#if !defined(DART_PRECOMPILED_RUNTIME)
const Code& code = Code::Handle(
TypeTestingStubGenerator::BuildCodeForType(Type::Cast(type)));
const Code& code =
Code::Handle(TypeTestingStubGenerator::BuildCodeForType(type));
if (!code.IsNull()) {
return code.ptr();
}
@ -235,20 +252,18 @@ static CodePtr RetryCompilationWithFarBranches(
}
}
CodePtr TypeTestingStubGenerator::BuildCodeForType(const Type& type) {
CodePtr TypeTestingStubGenerator::BuildCodeForType(const AbstractType& type) {
auto thread = Thread::Current();
auto zone = thread->zone();
HierarchyInfo* hi = thread->hierarchy_info();
ASSERT(hi != NULL);
if (!hi->CanUseSubtypeRangeCheckFor(type) &&
!hi->CanUseGenericSubtypeRangeCheckFor(type)) {
!hi->CanUseGenericSubtypeRangeCheckFor(type) &&
!hi->CanUseRecordSubtypeRangeCheckFor(type)) {
return Code::null();
}
const Class& type_class = Class::Handle(type.type_class());
ASSERT(!type_class.IsNull());
auto& slow_tts_stub = Code::ZoneHandle(zone);
if (FLAG_precompiled_mode) {
slow_tts_stub = thread->isolate_group()->object_store()->slow_tts_stub();
@ -260,7 +275,7 @@ CodePtr TypeTestingStubGenerator::BuildCodeForType(const Type& type) {
thread, [&](compiler::Assembler& assembler) {
compiler::UnresolvedPcRelativeCalls unresolved_calls;
BuildOptimizedTypeTestStub(&assembler, &unresolved_calls,
slow_tts_stub, hi, type, type_class);
slow_tts_stub, hi, type);
const auto& static_calls_table = Array::Handle(
zone, compiler::StubCodeCompiler::BuildStaticCallsTable(
@ -320,9 +335,8 @@ void TypeTestingStubGenerator::BuildOptimizedTypeTestStub(
compiler::UnresolvedPcRelativeCalls* unresolved_calls,
const Code& slow_type_test_stub,
HierarchyInfo* hi,
const Type& type,
const Class& type_class) {
BuildOptimizedTypeTestStubFastCases(assembler, hi, type, type_class);
const AbstractType& type) {
BuildOptimizedTypeTestStubFastCases(assembler, hi, type);
__ Jump(compiler::Address(
THR, compiler::target::Thread::slow_type_test_entry_point_offset()));
}
@ -330,8 +344,7 @@ void TypeTestingStubGenerator::BuildOptimizedTypeTestStub(
void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
compiler::Assembler* assembler,
HierarchyInfo* hi,
const Type& type,
const Class& type_class) {
const AbstractType& type) {
// These are handled via the TopTypeTypeTestStub!
ASSERT(!type.IsTopTypeForSubtyping());
@ -360,6 +373,8 @@ void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
// Check the cid ranges which are a subtype of [type].
if (hi->CanUseSubtypeRangeCheckFor(type)) {
const Class& type_class = Class::Handle(type.type_class());
ASSERT(!type_class.IsNull());
const CidRangeVector& ranges = hi->SubtypeRangesForClass(
type_class,
/*include_abstract=*/false,
@ -381,9 +396,16 @@ void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
__ Bind(&is_subtype);
__ Ret();
__ Bind(&is_not_subtype);
} else if (hi->CanUseGenericSubtypeRangeCheckFor(type)) {
const Class& type_class = Class::Handle(type.type_class());
ASSERT(!type_class.IsNull());
BuildOptimizedSubclassRangeCheckWithTypeArguments(
assembler, hi, Type::Cast(type), type_class);
} else if (hi->CanUseRecordSubtypeRangeCheckFor(type)) {
BuildOptimizedRecordSubtypeRangeCheck(assembler, hi,
RecordType::Cast(type));
} else {
BuildOptimizedSubclassRangeCheckWithTypeArguments(assembler, hi, type,
type_class);
UNREACHABLE();
}
if (Instance::NullIsAssignableTo(type)) {
@ -665,6 +687,78 @@ void TypeTestingStubGenerator::
__ Bind(&check_failed);
}
void TypeTestingStubGenerator::BuildOptimizedRecordSubtypeRangeCheck(
compiler::Assembler* assembler,
HierarchyInfo* hi,
const RecordType& type) {
compiler::Label is_subtype, is_not_subtype;
Zone* zone = Thread::Current()->zone();
__ BranchIfSmi(TypeTestABI::kInstanceReg, &is_not_subtype);
__ LoadClassId(TTSInternalRegs::kScratchReg, TypeTestABI::kInstanceReg);
if (Instance::NullIsAssignableTo(type)) {
__ CompareImmediate(TTSInternalRegs::kScratchReg, kNullCid);
__ BranchIf(EQUAL, &is_subtype);
}
__ CompareImmediate(TTSInternalRegs::kScratchReg, kRecordCid);
__ BranchIf(NOT_EQUAL, &is_not_subtype);
__ LoadCompressedSmi(
TTSInternalRegs::kScratchReg,
compiler::FieldAddress(TypeTestABI::kInstanceReg,
compiler::target::Record::num_fields_offset()));
__ CompareImmediate(TTSInternalRegs::kScratchReg,
Smi::RawValue(type.NumFields()));
__ BranchIf(NOT_EQUAL, &is_not_subtype);
__ LoadCompressedField(
TTSInternalRegs::kScratchReg,
compiler::FieldAddress(TypeTestABI::kInstanceReg,
compiler::target::Record::field_names_offset()));
// Cannot load arbitrary field names from object pool, so
// only record types without named fields are supported.
ASSERT(type.field_names() == Object::empty_array().ptr());
__ CompareObject(TTSInternalRegs::kScratchReg, Object::empty_array());
__ BranchIf(NOT_EQUAL, &is_not_subtype);
auto& field_type = AbstractType::Handle(zone);
auto& field_type_class = Class::Handle(zone);
const auto& smi_type = Type::Handle(zone, Type::SmiType());
for (intptr_t i = 0, n = type.NumFields(); i < n; ++i) {
compiler::Label next;
field_type = type.FieldTypeAt(i);
ASSERT(hi->CanUseSubtypeRangeCheckFor(field_type));
__ LoadCompressedFieldFromOffset(TTSInternalRegs::kScratchReg,
TypeTestABI::kInstanceReg,
compiler::target::Record::field_offset(i));
field_type_class = field_type.type_class();
ASSERT(!field_type_class.IsNull());
const CidRangeVector& ranges = hi->SubtypeRangesForClass(
field_type_class,
/*include_abstract=*/false,
/*exclude_null=*/!Instance::NullIsAssignableTo(field_type));
const bool smi_is_ok = smi_type.IsSubtypeOf(field_type, Heap::kNew);
__ BranchIfSmi(TTSInternalRegs::kScratchReg,
smi_is_ok ? &next : &is_not_subtype);
__ LoadClassId(TTSInternalRegs::kScratchReg, TTSInternalRegs::kScratchReg);
BuildOptimizedSubtypeRangeCheck(assembler, ranges,
TTSInternalRegs::kScratchReg, &next,
&is_not_subtype);
__ Bind(&next);
}
__ Bind(&is_subtype);
__ Ret();
__ Bind(&is_not_subtype);
}
// Splits [ranges] into multiple ranges in [output], where the concrete,
// finalized classes in each range share the same type arguments field offset.
//

View file

@ -60,20 +60,18 @@ class TypeTestingStubGenerator {
private:
#if !defined(TARGET_ARCH_IA32)
#if !defined(DART_PRECOMPILED_RUNTIME)
CodePtr BuildCodeForType(const Type& type);
CodePtr BuildCodeForType(const AbstractType& type);
static void BuildOptimizedTypeTestStub(
compiler::Assembler* assembler,
compiler::UnresolvedPcRelativeCalls* unresolved_calls,
const Code& slow_type_test_stub,
HierarchyInfo* hi,
const Type& type,
const Class& type_class);
const AbstractType& type);
static void BuildOptimizedTypeTestStubFastCases(
compiler::Assembler* assembler,
HierarchyInfo* hi,
const Type& type,
const Class& type_class);
const AbstractType& type);
static bool BuildOptimizedSubtypeRangeCheck(compiler::Assembler* assembler,
const CidRangeVector& ranges,
@ -87,6 +85,11 @@ class TypeTestingStubGenerator {
const Type& type,
const Class& type_class);
static void BuildOptimizedRecordSubtypeRangeCheck(
compiler::Assembler* assembler,
HierarchyInfo* hi,
const RecordType& type);
// Returns whether any cid ranges require type argument checking.
//
// If any do, then returns from the stub if any checks that do not need

View file

@ -1100,6 +1100,70 @@ ISOLATE_UNIT_TEST_CASE(TTS_GenericSubtypeRangeCheck) {
/*should_specialize=*/false}));
}
const char* kRecordSubtypeRangeCheckScript =
R"(
class A {}
class B extends A {}
class C implements A {}
class D<T> {}
getType<T>() => T;
getRecordType1() => getType<(int, A)>();
getRecordType2() => getType<(A, int, String)>();
getRecordType3() => getType<(int, D)>();
createObj1() => (1, B());
createObj2() => (1, 'bye');
createObj3() => (1, foo: B());
createObj4() => (1, B(), 2);
createObj5() => (C(), 2, 'hi');
createObj6() => (D(), 2, 'hi');
createObj7() => (3, D<int>());
createObj8() => (D<int>(), 3);
)";
ISOLATE_UNIT_TEST_CASE(TTS_RecordSubtypeRangeCheck) {
const auto& root_library =
Library::Handle(LoadTestScript(kRecordSubtypeRangeCheckScript));
const auto& type1 = AbstractType::Cast(
Object::Handle(Invoke(root_library, "getRecordType1")));
const auto& type2 = AbstractType::Cast(
Object::Handle(Invoke(root_library, "getRecordType2")));
const auto& type3 = AbstractType::Cast(
Object::Handle(Invoke(root_library, "getRecordType3")));
const auto& obj1 = Object::Handle(Invoke(root_library, "createObj1"));
const auto& obj2 = Object::Handle(Invoke(root_library, "createObj2"));
const auto& obj3 = Object::Handle(Invoke(root_library, "createObj3"));
const auto& obj4 = Object::Handle(Invoke(root_library, "createObj4"));
const auto& obj5 = Object::Handle(Invoke(root_library, "createObj5"));
const auto& obj6 = Object::Handle(Invoke(root_library, "createObj6"));
const auto& obj7 = Object::Handle(Invoke(root_library, "createObj7"));
const auto& obj8 = Object::Handle(Invoke(root_library, "createObj8"));
const auto& tav_null = TypeArguments::Handle(TypeArguments::null());
// (1, B()) as (int, A)
// (1, 'bye') as (int, A)
// (1, foo: B()) as (int, A)
// (1, B(), 2) as (int, A)
RunTTSTest(type1, {obj1, tav_null, tav_null});
RunTTSTest(type1, Failure({obj2, tav_null, tav_null}));
RunTTSTest(type1, Failure({obj3, tav_null, tav_null}));
RunTTSTest(type1, Failure({obj4, tav_null, tav_null}));
// (C(), 2, 'hi') as (A, int, String)
// (D(), 2, 'hi') as (A, int, String)
RunTTSTest(type2, {obj5, tav_null, tav_null});
RunTTSTest(type2, Failure({obj6, tav_null, tav_null}));
// (3, D<int>()) as (int, D)
// (D<int>(), 3) as (int, D)
RunTTSTest(type3, {obj7, tav_null, tav_null});
RunTTSTest(type3, Failure({obj8, tav_null, tav_null}));
}
ISOLATE_UNIT_TEST_CASE(TTS_Generic_Implements_Instantiated_Interface) {
const char* kScript =
R"(