[vm] Initial implementation of record types

TEST=language/record_type_test

Issue: https://github.com/dart-lang/sdk/issues/49719
Change-Id: Ib2100c23513395c9fa9c541320eacbb33a2a119e
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/256802
Reviewed-by: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
Commit-Queue: Alexander Markov <alexmarkov@google.com>
This commit is contained in:
Alexander Markov 2022-09-12 22:40:57 +00:00 committed by Commit Bot
parent 3744763f3a
commit 9a023aeae9
41 changed files with 1460 additions and 410 deletions

View file

@ -81,7 +81,8 @@ DEFINE_NATIVE_ENTRY(Object_runtimeType, 0, 1) {
return Type::IntType();
} else if (instance.IsDouble()) {
return Type::Double();
} else if (instance.IsType() || instance.IsFunctionType()) {
} else if (instance.IsType() || instance.IsFunctionType() ||
instance.IsRecordType()) {
return Type::DartTypeType();
} else if (IsArrayClassId(instance.GetClassId())) {
const auto& cls = Class::Handle(

View file

@ -4349,6 +4349,129 @@ class FunctionTypeDeserializationCluster
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class RecordTypeSerializationCluster
: public CanonicalSetSerializationCluster<CanonicalRecordTypeSet,
RecordType,
RecordTypePtr> {
public:
RecordTypeSerializationCluster(bool is_canonical,
bool represents_canonical_set)
: CanonicalSetSerializationCluster(
kRecordTypeCid,
is_canonical,
represents_canonical_set,
"RecordType",
compiler::target::RecordType::InstanceSize()) {}
~RecordTypeSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
RecordTypePtr type = RecordType::RawCast(object);
objects_.Add(type);
PushFromTo(type);
}
void WriteAlloc(Serializer* s) {
intptr_t count = objects_.length();
s->WriteUnsigned(count);
ReorderObjects(s);
for (intptr_t i = 0; i < count; i++) {
RecordTypePtr type = objects_[i];
s->AssignRef(type);
}
WriteCanonicalSetLayout(s);
}
void WriteFill(Serializer* s) {
intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
WriteRecordType(s, objects_[i]);
}
}
private:
void WriteRecordType(Serializer* s, RecordTypePtr type) {
AutoTraceObject(type);
WriteFromTo(type);
ASSERT(type->untag()->type_state_ <
(1 << UntaggedRecordType::kTypeStateBitSize));
ASSERT(type->untag()->nullability_ < (1 << kNullabilityBitSize));
static_assert(UntaggedRecordType::kTypeStateBitSize + kNullabilityBitSize <=
kBitsPerByte * sizeof(uint8_t),
"Cannot pack type_state_ and nullability_ into a uint8_t");
const uint8_t combined =
(type->untag()->type_state_ << kNullabilityBitSize) |
type->untag()->nullability_;
ASSERT_EQUAL(type->untag()->type_state_, combined >> kNullabilityBitSize);
ASSERT_EQUAL(type->untag()->nullability_, combined & kNullabilityBitMask);
s->Write<uint8_t>(combined);
}
};
#endif // !DART_PRECOMPILED_RUNTIME
class RecordTypeDeserializationCluster
: public CanonicalSetDeserializationCluster<CanonicalRecordTypeSet> {
public:
RecordTypeDeserializationCluster(bool is_canonical, bool is_root_unit)
: CanonicalSetDeserializationCluster(is_canonical,
is_root_unit,
"RecordType") {}
~RecordTypeDeserializationCluster() {}
void ReadAlloc(Deserializer* d) {
ReadAllocFixedSize(d, RecordType::InstanceSize());
BuildCanonicalSetFromLayout(d);
}
void ReadFill(Deserializer* d_, bool primary) {
Deserializer::Local d(d_);
const bool mark_canonical = primary && is_canonical();
for (intptr_t id = start_index_, n = stop_index_; id < n; id++) {
RecordTypePtr type = static_cast<RecordTypePtr>(d.Ref(id));
Deserializer::InitializeHeader(
type, kRecordTypeCid, RecordType::InstanceSize(), mark_canonical);
d.ReadFromTo(type);
const uint8_t combined = d.Read<uint8_t>();
type->untag()->type_state_ = combined >> kNullabilityBitSize;
type->untag()->nullability_ = combined & kNullabilityBitMask;
}
}
void PostLoad(Deserializer* d, const Array& refs, bool primary) {
if (!table_.IsNull()) {
auto object_store = d->isolate_group()->object_store();
VerifyCanonicalSet(d, refs,
Array::Handle(object_store->canonical_record_types()));
object_store->set_canonical_record_types(table_);
} else if (!primary && is_canonical()) {
AbstractType& type = AbstractType::Handle(d->zone());
for (intptr_t i = start_index_, n = stop_index_; i < n; i++) {
type ^= refs.At(i);
type = type.Canonicalize(d->thread(), nullptr);
refs.SetAt(i, type);
}
}
RecordType& type = RecordType::Handle(d->zone());
Code& stub = Code::Handle(d->zone());
if (Snapshot::IncludesCode(d->kind())) {
for (intptr_t id = start_index_, n = stop_index_; id < n; id++) {
type ^= refs.At(id);
type.UpdateTypeTestingStubEntryPoint();
}
} else {
for (intptr_t id = start_index_, n = stop_index_; id < n; id++) {
type ^= refs.At(id);
stub = TypeTestingStubGenerator::DefaultCodeForType(type);
type.InitializeTypeTestingStubNonAtomic(stub);
}
}
}
};
#if !defined(DART_PRECOMPILED_RUNTIME)
class TypeRefSerializationCluster : public SerializationCluster {
public:
@ -5994,6 +6117,7 @@ class ProgramSerializationRoots : public SerializationRoots {
V(canonical_types, Array, HashTables::New<CanonicalTypeSet>(4)) \
V(canonical_function_types, Array, \
HashTables::New<CanonicalFunctionTypeSet>(4)) \
V(canonical_record_types, Array, HashTables::New<CanonicalRecordTypeSet>(4)) \
V(canonical_type_arguments, Array, \
HashTables::New<CanonicalTypeArgumentsSet>(4)) \
V(canonical_type_parameters, Array, \
@ -6668,6 +6792,10 @@ bool Serializer::CreateArtificialNodeIfNeeded(ObjectPtr obj) {
type = "FunctionType";
break;
};
case kRecordTypeCid: {
type = "RecordType";
break;
};
default:
FATAL("Request to create artificial node for object with cid %d", cid);
}
@ -6854,6 +6982,9 @@ SerializationCluster* Serializer::NewClusterForClass(intptr_t cid,
case kFunctionTypeCid:
return new (Z) FunctionTypeSerializationCluster(
is_canonical, cluster_represents_canonical_set);
case kRecordTypeCid:
return new (Z) RecordTypeSerializationCluster(
is_canonical, cluster_represents_canonical_set);
case kTypeRefCid:
return new (Z) TypeRefSerializationCluster();
case kTypeParameterCid:
@ -7998,6 +8129,9 @@ DeserializationCluster* Deserializer::ReadCluster() {
case kFunctionTypeCid:
return new (Z)
FunctionTypeDeserializationCluster(is_canonical, !is_non_root_unit_);
case kRecordTypeCid:
return new (Z)
RecordTypeDeserializationCluster(is_canonical, !is_non_root_unit_);
case kTypeRefCid:
ASSERT(!is_canonical);
return new (Z) TypeRefDeserializationCluster();

View file

@ -212,6 +212,45 @@ class CanonicalFunctionTypeTraits {
};
typedef UnorderedHashSet<CanonicalFunctionTypeTraits> CanonicalFunctionTypeSet;
class CanonicalRecordTypeKey {
public:
explicit CanonicalRecordTypeKey(const RecordType& key) : key_(key) {}
bool Matches(const RecordType& arg) const { return key_.Equals(arg); }
uword Hash() const { return key_.Hash(); }
const RecordType& key_;
private:
DISALLOW_ALLOCATION();
};
// Traits for looking up Canonical RecordType based on its hash.
class CanonicalRecordTypeTraits {
public:
static const char* Name() { return "CanonicalRecordTypeTraits"; }
static bool ReportStats() { return false; }
// Called when growing the table.
static bool IsMatch(const Object& a, const Object& b) {
ASSERT(a.IsRecordType() && b.IsRecordType());
const RecordType& arg1 = RecordType::Cast(a);
const RecordType& arg2 = RecordType::Cast(b);
return arg1.Equals(arg2) && (arg1.Hash() == arg2.Hash());
}
static bool IsMatch(const CanonicalRecordTypeKey& a, const Object& b) {
ASSERT(b.IsRecordType());
return a.Matches(RecordType::Cast(b));
}
static uword Hash(const Object& key) {
ASSERT(key.IsRecordType());
return RecordType::Cast(key).Hash();
}
static uword Hash(const CanonicalRecordTypeKey& key) { return key.Hash(); }
static ObjectPtr NewKey(const CanonicalRecordTypeKey& obj) {
return obj.key_.ptr();
}
};
typedef UnorderedHashSet<CanonicalRecordTypeTraits> CanonicalRecordTypeSet;
class CanonicalTypeParameterKey {
public:
explicit CanonicalTypeParameterKey(const TypeParameter& key) : key_(key) {}

View file

@ -822,6 +822,11 @@ AbstractTypePtr ClassFinalizer::FinalizeType(const AbstractType& type,
pending_types);
}
if (type.IsRecordType()) {
return FinalizeRecordType(zone, RecordType::Cast(type), finalization,
pending_types);
}
// This type is the root type of the type graph if no pending types queue is
// allocated yet. A function type is a collection of types, but not a root.
const bool is_root_type = pending_types == NULL;
@ -906,6 +911,35 @@ AbstractTypePtr ClassFinalizer::FinalizeSignature(Zone* zone,
return signature.ptr();
}
AbstractTypePtr ClassFinalizer::FinalizeRecordType(
Zone* zone,
const RecordType& record,
FinalizationKind finalization,
PendingTypes* pending_types) {
AbstractType& type = AbstractType::Handle(zone);
AbstractType& finalized_type = AbstractType::Handle(zone);
// Finalize record field types.
const intptr_t num_fields = record.NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = record.FieldTypeAt(i);
finalized_type = FinalizeType(type, kFinalize, pending_types);
if (type.ptr() != finalized_type.ptr()) {
record.SetFieldTypeAt(i, finalized_type);
}
}
if (FLAG_trace_type_finalization) {
THR_Print("Marking record type '%s' as finalized\n",
String::Handle(zone, record.Name()).ToCString());
}
record.SetIsFinalized();
if (finalization >= kCanonicalize) {
return record.Canonicalize(Thread::Current(), nullptr);
}
return record.ptr();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
#if defined(TARGET_ARCH_X64)
@ -1597,6 +1631,7 @@ void ClassFinalizer::RemapClassIds(intptr_t* old_to_new_cid) {
//
// * UntaggedType::hash_
// * UntaggedFunctionType::hash_
// * UntaggedRecordType::hash_
// * UntaggedTypeParameter::hash_
// * UntaggedTypeArguments::hash_
// * InstancePtr (weak table)
@ -1611,6 +1646,7 @@ void ClassFinalizer::RemapClassIds(intptr_t* old_to_new_cid) {
//
// * ObjectStore::canonical_types()
// * ObjectStore::canonical_function_types()
// * ObjectStore::canonical_record_types()
// * ObjectStore::canonical_type_parameters()
// * ObjectStore::canonical_type_arguments()
// * Class::constants()
@ -1621,6 +1657,7 @@ class ClearTypeHashVisitor : public ObjectVisitor {
: type_param_(TypeParameter::Handle(zone)),
type_(Type::Handle(zone)),
function_type_(FunctionType::Handle(zone)),
record_type_(RecordType::Handle(zone)),
type_args_(TypeArguments::Handle(zone)) {}
void VisitObject(ObjectPtr obj) {
@ -1633,6 +1670,9 @@ class ClearTypeHashVisitor : public ObjectVisitor {
} else if (obj->IsFunctionType()) {
function_type_ ^= obj;
function_type_.SetHash(0);
} else if (obj->IsRecordType()) {
record_type_ ^= obj;
record_type_.SetHash(0);
} else if (obj->IsTypeArguments()) {
type_args_ ^= obj;
type_args_.SetHash(0);
@ -1643,6 +1683,7 @@ class ClearTypeHashVisitor : public ObjectVisitor {
TypeParameter& type_param_;
Type& type_;
FunctionType& function_type_;
RecordType& record_type_;
TypeArguments& type_args_;
};
@ -1700,6 +1741,27 @@ void ClassFinalizer::RehashTypes() {
}
object_store->set_canonical_function_types(function_types_table.Release());
// Rehash the canonical RecordTypes table.
Array& record_types = Array::Handle(Z);
RecordType& record_type = RecordType::Handle(Z);
{
CanonicalRecordTypeSet record_types_table(
Z, object_store->canonical_record_types());
record_types = HashTables::ToArray(record_types_table, false);
record_types_table.Release();
}
dict_size = Utils::RoundUpToPowerOfTwo(record_types.Length() * 4 / 3);
CanonicalRecordTypeSet record_types_table(
Z, HashTables::New<CanonicalRecordTypeSet>(dict_size, Heap::kOld));
for (intptr_t i = 0; i < record_types.Length(); i++) {
record_type ^= record_types.At(i);
bool present = record_types_table.Insert(record_type);
// Two recursive types with different topology (and hashes) may be equal.
ASSERT(!present || record_type.IsRecursive());
}
object_store->set_canonical_record_types(record_types_table.Release());
// Rehash the canonical TypeParameters table.
Array& typeparams = Array::Handle(Z);
TypeParameter& typeparam = TypeParameter::Handle(Z);

View file

@ -93,6 +93,12 @@ class ClassFinalizer : public AllStatic {
FinalizationKind finalization = kCanonicalize,
PendingTypes* pending_types = NULL);
static AbstractTypePtr FinalizeRecordType(
Zone* zone,
const RecordType& record,
FinalizationKind finalization = kCanonicalize,
PendingTypes* pending_types = nullptr);
#if !defined(DART_PRECOMPILED_RUNTIME)
static void AllocateEnumValues(const Class& enum_cls);
#endif // !defined(DART_PRECOMPILED_RUNTIME)

View file

@ -68,13 +68,14 @@ typedef uint16_t ClassIdTagType;
V(TypeArguments) \
V(AbstractType) \
V(Type) \
V(FunctionType) \
V(RecordType) \
V(TypeRef) \
V(TypeParameter) \
V(FinalizerBase) \
V(Finalizer) \
V(NativeFinalizer) \
V(FinalizerEntry) \
V(FunctionType) \
V(TypeRef) \
V(TypeParameter) \
V(Closure) \
V(Number) \
V(Integer) \
@ -358,8 +359,10 @@ inline bool IsBuiltinListClassId(intptr_t index) {
}
inline bool IsTypeClassId(intptr_t index) {
// Only Type and FunctionType can be encountered as instance types at runtime.
return index == kTypeCid || index == kFunctionTypeCid;
// Only Type, FunctionType and RecordType can be encountered as instance
// types at runtime.
return index == kTypeCid || index == kFunctionTypeCid ||
index == kRecordTypeCid;
}
inline bool IsTypedDataBaseClassId(intptr_t index) {

View file

@ -1187,6 +1187,13 @@ void Precompiler::AddType(const AbstractType& abstype) {
AbstractType& type = AbstractType::Handle(Z);
type = TypeRef::Cast(abstype).type();
AddType(type);
} else if (abstype.IsRecordType()) {
const auto& rec = RecordType::Cast(abstype);
AbstractType& type = AbstractType::Handle(Z);
for (intptr_t i = 0, n = rec.NumFields(); i < n; ++i) {
type = rec.FieldTypeAt(i);
AddType(type);
}
}
}
@ -2392,6 +2399,7 @@ void Precompiler::AttachOptimizedTypeTestingStub() {
void VisitObject(ObjectPtr obj) {
if (obj->GetClassId() == kTypeCid ||
obj->GetClassId() == kFunctionTypeCid ||
obj->GetClassId() == kRecordTypeCid ||
obj->GetClassId() == kTypeRefCid) {
type_ ^= obj;
types_->Add(type_);

View file

@ -1056,15 +1056,18 @@ static void JumpIfType(Assembler* assembler,
Register cid,
Register tmp,
Label* target) {
RangeCheck(assembler, cid, tmp, kTypeCid, kFunctionTypeCid, kIfInRange,
target);
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, tmp, kTypeCid, kRecordTypeCid, kIfInRange, target);
}
static void JumpIfNotType(Assembler* assembler,
Register cid,
Register tmp,
Label* target) {
RangeCheck(assembler, cid, tmp, kTypeCid, kFunctionTypeCid, kIfNotInRange,
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, tmp, kTypeCid, kRecordTypeCid, kIfNotInRange,
target);
}

View file

@ -1218,15 +1218,18 @@ static void JumpIfType(Assembler* assembler,
Register cid,
Register tmp,
Label* target) {
RangeCheck(assembler, cid, tmp, kTypeCid, kFunctionTypeCid, kIfInRange,
target);
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, tmp, kTypeCid, kRecordTypeCid, kIfInRange, target);
}
static void JumpIfNotType(Assembler* assembler,
Register cid,
Register tmp,
Label* target) {
RangeCheck(assembler, cid, tmp, kTypeCid, kFunctionTypeCid, kIfNotInRange,
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, tmp, kTypeCid, kRecordTypeCid, kIfNotInRange,
target);
}

View file

@ -1165,11 +1165,15 @@ static void JumpIfNotList(Assembler* assembler, Register cid, Label* target) {
}
static void JumpIfType(Assembler* assembler, Register cid, Label* target) {
RangeCheck(assembler, cid, kTypeCid, kFunctionTypeCid, kIfInRange, target);
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, kTypeCid, kRecordTypeCid, kIfInRange, target);
}
static void JumpIfNotType(Assembler* assembler, Register cid, Label* target) {
RangeCheck(assembler, cid, kTypeCid, kFunctionTypeCid, kIfNotInRange, target);
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, kTypeCid, kRecordTypeCid, kIfNotInRange, target);
}
// Return type quickly for simple types (not parameterized and not signature).

View file

@ -1237,15 +1237,18 @@ static void JumpIfType(Assembler* assembler,
Register cid,
Register tmp,
Label* target) {
RangeCheck(assembler, cid, tmp, kTypeCid, kFunctionTypeCid, kIfInRange,
target);
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, tmp, kTypeCid, kRecordTypeCid, kIfInRange, target);
}
static void JumpIfNotType(Assembler* assembler,
Register cid,
Register tmp,
Label* target) {
RangeCheck(assembler, cid, tmp, kTypeCid, kFunctionTypeCid, kIfNotInRange,
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, tmp, kTypeCid, kRecordTypeCid, kIfNotInRange,
target);
}

View file

@ -1068,11 +1068,15 @@ static void JumpIfNotList(Assembler* assembler, Register cid, Label* target) {
}
static void JumpIfType(Assembler* assembler, Register cid, Label* target) {
RangeCheck(assembler, cid, kTypeCid, kFunctionTypeCid, kIfInRange, target);
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, kTypeCid, kRecordTypeCid, kIfInRange, target);
}
static void JumpIfNotType(Assembler* assembler, Register cid, Label* target) {
RangeCheck(assembler, cid, kTypeCid, kFunctionTypeCid, kIfNotInRange, target);
COMPILE_ASSERT((kFunctionTypeCid == kTypeCid + 1) &&
(kRecordTypeCid == kTypeCid + 2));
RangeCheck(assembler, cid, kTypeCid, kRecordTypeCid, kIfNotInRange, target);
}
// Return type quickly for simple types (not parameterized and not signature).

View file

@ -2582,6 +2582,10 @@ SubtypeTestCachePtr FlowGraphCompiler::GenerateInlineInstanceof(
return GenerateFunctionTypeTest(source, type, is_instance_lbl,
is_not_instance_lbl);
}
if (type.IsRecordType()) {
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
}
if (type.IsInstantiated()) {
const Class& type_class = Class::ZoneHandle(zone(), type.type_class());

View file

@ -1598,6 +1598,11 @@ void FlowGraphSerializer::WriteObjectImpl(const Object& x,
stream_->WriteBytes(latin1, length);
break;
}
case kRecordTypeCid: {
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
break;
}
case kSentinelCid:
if (x.ptr() == Object::sentinel().ptr()) {
Write<bool>(true);
@ -1858,6 +1863,11 @@ const Object& FlowGraphDeserializer::ReadObjectImpl(intptr_t cid,
return String::ZoneHandle(Z,
Symbols::FromLatin1(thread(), latin1, length));
}
case kRecordTypeCid: {
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
break;
}
case kSentinelCid:
return Read<bool>() ? Object::sentinel() : Object::transition_sentinel();
case kSmiCid:

View file

@ -599,7 +599,7 @@ void CompileType::Union(CompileType* other) {
// Climb up the hierarchy to find a suitable supertype. Note that interface
// types are not considered, making the union potentially non-commutative
if (abstract_type->IsInstantiated() && !abstract_type->IsDynamicType() &&
!abstract_type->IsFunctionType()) {
!abstract_type->IsFunctionType() && !abstract_type->IsRecordType()) {
Class& cls = Class::Handle(abstract_type->type_class());
for (; !cls.IsNull() && !cls.IsGeneric(); cls = cls.SuperClass()) {
type_ = &AbstractType::ZoneHandle(cls.RareType());
@ -773,6 +773,10 @@ intptr_t CompileType::ToNullableCid() {
cid_ = kSentinelCid;
} else if (type_->IsFunctionType() || type_->IsDartFunctionType()) {
cid_ = kClosureCid;
} else if (type_->IsRecordType()) {
// TODO(dartbug.com/49719)
// cid_ = kRecordCid;
UNIMPLEMENTED();
} else if (type_->type_class_id() != kIllegalCid) {
const Class& type_class = Class::Handle(type_->type_class());
intptr_t implementation_cid = kIllegalCid;

View file

@ -1041,7 +1041,7 @@ BoolPtr CallSpecializer::InstanceOfAsBool(
ASSERT(results->is_empty());
ASSERT(ic_data.NumArgsTested() == 1); // Unary checks only.
if (type.IsFunctionType() || type.IsDartFunctionType() ||
!type.IsInstantiated()) {
type.IsRecordType() || !type.IsInstantiated()) {
return Bool::null();
}
const Class& type_class = Class::Handle(Z, type.type_class());
@ -1108,8 +1108,8 @@ bool CallSpecializer::TypeCheckAsClassEquality(const AbstractType& type,
ASSERT(type.IsFinalized());
// Requires CHA.
if (!type.IsInstantiated()) return false;
// Function types have different type checking rules.
if (type.IsFunctionType()) return false;
// Function and record types have different type checking rules.
if (type.IsFunctionType() || type.IsRecordType()) return false;
const Class& type_class = Class::Handle(type.type_class());
if (!CHA::HasSingleConcreteImplementation(type_class, type_cid)) {

View file

@ -373,8 +373,11 @@ bool SimpleInstanceOfType(const AbstractType& type) {
// Bail if the type is still uninstantiated at compile time.
if (!type.IsInstantiated()) return false;
// Bail if the type is a function or a Dart Function type.
if (type.IsFunctionType() || type.IsDartFunctionType()) return false;
// Bail if the type is a function, record or a Dart Function type.
if (type.IsFunctionType() || type.IsRecordType() ||
type.IsDartFunctionType()) {
return false;
}
ASSERT(type.HasTypeClass());
const Class& type_class = Class::Handle(type.type_class());

View file

@ -257,6 +257,18 @@ void KernelFingerprintHelper::CalculateDartTypeFingerprint() {
CalculateDartTypeFingerprint(); // read left;
CalculateDartTypeFingerprint(); // read right;
break;
case kRecordType: {
BuildHash(static_cast<uint32_t>(ReadNullability()));
CalculateListOfDartTypesFingerprint();
const intptr_t named_count = ReadListLength();
BuildHash(named_count);
for (intptr_t i = 0; i < named_count; ++i) {
CalculateStringReferenceFingerprint();
CalculateDartTypeFingerprint();
ReadFlags();
}
break;
}
default:
ReportUnexpectedTag("type", tag);
UNREACHABLE();

View file

@ -2213,6 +2213,17 @@ void KernelReaderHelper::SkipDartType() {
case kSimpleFunctionType:
SkipFunctionType(true);
return;
case kRecordType: {
ReadNullability();
SkipListOfDartTypes();
const intptr_t named_count = ReadListLength();
for (intptr_t i = 0; i < named_count; ++i) {
SkipStringReference();
SkipDartType();
ReadFlags();
}
return;
}
case kTypedefType:
ReadNullability(); // read nullability.
ReadUInt(); // read index for canonical name.
@ -3148,6 +3159,9 @@ void TypeTranslator::BuildTypeInternal() {
case kSimpleFunctionType:
BuildFunctionType(true);
break;
case kRecordType:
BuildRecordType();
break;
case kTypeParameterType:
BuildTypeParameterType();
if (result_.IsTypeParameter() &&
@ -3299,6 +3313,62 @@ void TypeTranslator::BuildFunctionType(bool simple) {
result_ = signature.ptr();
}
void TypeTranslator::BuildRecordType() {
Nullability nullability = helper_->ReadNullability();
if (apply_canonical_type_erasure_ && nullability != Nullability::kNullable) {
nullability = Nullability::kLegacy;
}
const intptr_t positional_count = helper_->ReadListLength();
intptr_t named_count = 0;
{
AlternativeReadingScope alt(&helper_->reader_);
for (intptr_t i = 0; i < positional_count; ++i) {
helper_->SkipDartType();
}
named_count = helper_->ReadListLength();
}
const intptr_t num_fields = positional_count + named_count;
const Array& field_types =
Array::Handle(Z, Array::New(num_fields, Heap::kOld));
const Array& field_names =
(named_count == 0)
? Object::empty_array()
: Array::Handle(Z, Array::New(named_count, Heap::kOld));
// Suspend finalization of types inside this one. They will be finalized after
// the whole record type is constructed.
bool finalize = finalize_;
finalize_ = false;
intptr_t pos = 0;
for (intptr_t i = 0; i < positional_count; ++i) {
BuildTypeInternal(); // read ith positional field.
field_types.SetAt(pos++, result_);
}
helper_->ReadListLength();
for (intptr_t i = 0; i < named_count; ++i) {
String& name = H.DartSymbolObfuscate(helper_->ReadStringReference());
field_names.SetAt(i, name);
BuildTypeInternal();
field_types.SetAt(pos++, result_);
helper_->ReadFlags();
}
finalize_ = finalize;
RecordType& rec = RecordType::Handle(
Z, RecordType::New(field_types, field_names, nullability));
if (finalize_) {
rec ^= ClassFinalizer::FinalizeType(rec);
}
result_ = rec.ptr();
}
void TypeTranslator::BuildTypeParameterType() {
Nullability nullability = helper_->ReadNullability();
if (apply_canonical_type_erasure_ && nullability != Nullability::kNullable) {

View file

@ -1542,6 +1542,7 @@ class TypeTranslator {
void BuildTypeInternal();
void BuildInterfaceType(bool simple);
void BuildFunctionType(bool simple);
void BuildRecordType();
void BuildTypeParameterType();
void BuildIntersectionType();

View file

@ -1355,6 +1355,9 @@ void ScopeBuilder::VisitDartType() {
case kSimpleFunctionType:
VisitFunctionType(true);
return;
case kRecordType:
VisitRecordType();
return;
case kTypeParameterType:
VisitTypeParameterType();
return;
@ -1417,6 +1420,22 @@ void ScopeBuilder::VisitFunctionType(bool simple) {
VisitDartType(); // read return type.
}
void ScopeBuilder::VisitRecordType() {
helper_.ReadNullability(); // read nullability.
const intptr_t positional_count =
helper_.ReadListLength(); // read positional list length.
for (intptr_t i = 0; i < positional_count; ++i) {
VisitDartType(); // read positional[i].
}
const intptr_t named_count =
helper_.ReadListLength(); // read named list length.
for (intptr_t i = 0; i < named_count; ++i) {
helper_.SkipStringReference(); // read named[i].name.
VisitDartType(); // read named[i].type.
helper_.ReadFlags(); // read named[i].flags
}
}
void ScopeBuilder::VisitTypeParameterType() {
Function& function = Function::Handle(Z, parsed_function_->function().ptr());

View file

@ -46,6 +46,7 @@ class ScopeBuilder {
void VisitDartType();
void VisitInterfaceType(bool simple);
void VisitFunctionType(bool simple);
void VisitRecordType();
void VisitTypeParameterType();
void VisitIntersectionType();
void HandleLocalFunction(intptr_t parent_kernel_offset);

View file

@ -715,6 +715,12 @@ class FunctionType : public AllStatic {
FINAL_CLASS();
};
class RecordType : public AllStatic {
public:
static word InstanceSize();
FINAL_CLASS();
};
class TypeRef : public AllStatic {
public:
static word type_offset();

File diff suppressed because it is too large Load diff

View file

@ -446,6 +446,7 @@
SIZEOF(PcDescriptors, HeaderSize, UntaggedPcDescriptors) \
SIZEOF(Pointer, InstanceSize, UntaggedPointer) \
SIZEOF(ReceivePort, InstanceSize, UntaggedReceivePort) \
SIZEOF(RecordType, InstanceSize, UntaggedRecordType) \
SIZEOF(RegExp, InstanceSize, UntaggedRegExp) \
SIZEOF(Script, InstanceSize, UntaggedScript) \
SIZEOF(SendPort, InstanceSize, UntaggedSendPort) \

View file

@ -336,6 +336,7 @@ static void BuildInstantiateTypeParameterStub(Assembler* assembler,
__ Ret();
}
// TODO(dartbug.com/49719)
// Handle [FunctionType]s.
__ Bind(&type_parameter_value_is_not_type);
__ CompareImmediate(InstantiateTypeABI::kScratchReg, kFunctionTypeCid);

View file

@ -125,9 +125,6 @@ static const uint32_t kMaxSupportedKernelFormatVersion = 86;
V(AsyncForInStatement, 80) \
V(AssertBlock, 81) \
V(TypedefType, 87) \
V(NeverType, 98) \
V(IntersectionType, 99) \
V(RecordType, 100) \
V(InvalidType, 90) \
V(DynamicType, 91) \
V(VoidType, 92) \
@ -136,6 +133,9 @@ static const uint32_t kMaxSupportedKernelFormatVersion = 86;
V(TypeParameterType, 95) \
V(SimpleInterfaceType, 96) \
V(SimpleFunctionType, 97) \
V(NeverType, 98) \
V(IntersectionType, 99) \
V(RecordType, 100) \
V(ConstantExpression, 106) \
V(InstanceGet, 118) \
V(InstanceSet, 119) \

View file

@ -3382,6 +3382,10 @@ void MessageSerializer::Trace(Object* object) {
ILLEGAL(SuspendState)
ILLEGAL(UserTag)
// TODO(dartbug.com/49719): allow sending records as long as their
// elements are objects that can be sent.
ILLEGAL(RecordType)
// From "dart:ffi" we handle only Pointer/DynamicLibrary specially, since
// those are the only non-abstract classes (so we avoid checking more cids
// here that cannot happen in reality)

View file

@ -733,6 +733,7 @@ void Object::Init(IsolateGroup* isolate_group) {
*null_instance_ = Instance::null();
*null_function_ = Function::null();
*null_function_type_ = FunctionType::null();
*null_record_type_ = RecordType::null();
*null_type_arguments_ = TypeArguments::null();
*empty_type_arguments_ = TypeArguments::null();
*null_abstract_type_ = AbstractType::null();
@ -1160,6 +1161,11 @@ void Object::Init(IsolateGroup* isolate_group) {
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls = dynamic_class_;
*dynamic_type_ =
Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
@ -1241,6 +1247,8 @@ void Object::Init(IsolateGroup* isolate_group) {
ASSERT(null_function_->IsFunction());
ASSERT(!null_function_type_->IsSmi());
ASSERT(null_function_type_->IsFunctionType());
ASSERT(!null_record_type_->IsSmi());
ASSERT(null_record_type_->IsRecordType());
ASSERT(!null_type_arguments_->IsSmi());
ASSERT(null_type_arguments_->IsTypeArguments());
ASSERT(!null_compressed_stackmaps_->IsSmi());
@ -1740,6 +1748,12 @@ ErrorPtr Object::Init(IsolateGroup* isolate_group,
kInitialCanonicalFunctionTypeSize, Heap::kOld);
object_store->set_canonical_function_types(array);
// Initialize hash set for canonical record types.
const intptr_t kInitialCanonicalRecordTypeSize = 16;
array = HashTables::New<CanonicalRecordTypeSet>(
kInitialCanonicalRecordTypeSize, Heap::kOld);
object_store->set_canonical_record_types(array);
// Initialize hash set for canonical type parameters.
const intptr_t kInitialCanonicalTypeParameterSize = 4;
array = HashTables::New<CanonicalTypeParameterSet>(
@ -1757,6 +1771,8 @@ ErrorPtr Object::Init(IsolateGroup* isolate_group,
Class::Handle(zone, Class::New<Type, RTN::Type>(isolate_group));
const Class& function_type_cls = Class::Handle(
zone, Class::New<FunctionType, RTN::FunctionType>(isolate_group));
const Class& record_type_cls = Class::Handle(
zone, Class::New<RecordType, RTN::RecordType>(isolate_group));
const Class& type_ref_cls =
Class::Handle(zone, Class::New<TypeRef, RTN::TypeRef>(isolate_group));
const Class& type_parameter_cls = Class::Handle(
@ -1940,6 +1956,9 @@ ErrorPtr Object::Init(IsolateGroup* isolate_group,
RegisterPrivateClass(function_type_cls, Symbols::_FunctionType(), core_lib);
pending_classes.Add(function_type_cls);
RegisterPrivateClass(record_type_cls, Symbols::_RecordType(), core_lib);
pending_classes.Add(record_type_cls);
RegisterPrivateClass(type_ref_cls, Symbols::_TypeRef(), core_lib);
pending_classes.Add(type_ref_cls);
@ -2472,6 +2491,7 @@ ErrorPtr Object::Init(IsolateGroup* isolate_group,
cls = Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group);
cls = Class::New<Type, RTN::Type>(isolate_group);
cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
cls = Class::New<TypeRef, RTN::TypeRef>(isolate_group);
cls = Class::New<TypeParameter, RTN::TypeParameter>(isolate_group);
@ -17757,8 +17777,8 @@ bool Code::IsAllocationStubCode() const {
bool Code::IsTypeTestStubCode() const {
auto const cid = OwnerClassId();
return cid == kAbstractTypeCid || cid == kTypeCid ||
cid == kFunctionTypeCid || cid == kTypeRefCid ||
cid == kTypeParameterCid;
cid == kFunctionTypeCid || cid == kRecordTypeCid ||
cid == kTypeRefCid || cid == kTypeParameterCid;
}
bool Code::IsFunctionCode() const {
@ -19600,6 +19620,10 @@ AbstractTypePtr Instance::GetType(Heap::Space space) const {
signature ^= signature.Canonicalize(thread, nullptr);
return signature.ptr();
}
if (cls.IsRecordClass()) {
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
}
Type& type = Type::Handle(zone);
if (!cls.IsGeneric()) {
type = cls.DeclarationType();
@ -19816,6 +19840,10 @@ bool Instance::RuntimeTypeIsSubtypeOf(
Closure::Cast(*this).GetInstantiatedSignature(zone));
return sig.IsSubtypeOf(FunctionType::Cast(instantiated_other), Heap::kOld);
}
if (cls.IsRecordClass()) {
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
}
TypeArguments& type_arguments = TypeArguments::Handle(zone);
if (cls.NumTypeArguments() > 0) {
type_arguments = GetTypeArguments();
@ -20164,6 +20192,9 @@ AbstractTypePtr AbstractType::SetInstantiatedNullability(
if (IsFunctionType()) {
return FunctionType::Cast(*this).ToNullability(result_nullability, space);
}
if (IsRecordType()) {
return RecordType::Cast(*this).ToNullability(result_nullability, space);
}
if (IsTypeParameter()) {
return TypeParameter::Cast(*this).ToNullability(result_nullability, space);
}
@ -20251,6 +20282,35 @@ bool AbstractType::IsEquivalent(const Instance& other,
return false;
}
bool AbstractType::IsNullabilityEquivalent(Thread* thread,
const AbstractType& other_type,
TypeEquality kind) const {
Nullability this_type_nullability = nullability();
Nullability other_type_nullability = other_type.nullability();
if (kind == TypeEquality::kInSubtypeTest) {
if (thread->isolate_group()->use_strict_null_safety_checks() &&
this_type_nullability == Nullability::kNullable &&
other_type_nullability == Nullability::kNonNullable) {
return false;
}
} else {
if (kind == TypeEquality::kSyntactical) {
if (this_type_nullability == Nullability::kLegacy) {
this_type_nullability = Nullability::kNonNullable;
}
if (other_type_nullability == Nullability::kLegacy) {
other_type_nullability = Nullability::kNonNullable;
}
} else {
ASSERT(kind == TypeEquality::kCanonical);
}
if (this_type_nullability != other_type_nullability) {
return false;
}
}
return true;
}
bool AbstractType::IsRecursive(TrailPtr trail) const {
// AbstractType is an abstract class.
UNREACHABLE();
@ -20468,6 +20528,10 @@ void AbstractType::PrintName(NameVisibility name_visibility,
}
return;
}
if (IsRecordType()) {
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
}
const TypeArguments& args = TypeArguments::Handle(zone, arguments());
const intptr_t num_args = args.IsNull() ? 0 : args.Length();
intptr_t first_type_param_index;
@ -20508,7 +20572,7 @@ void AbstractType::PrintName(NameVisibility name_visibility,
}
StringPtr AbstractType::ClassName() const {
ASSERT(!IsFunctionType());
ASSERT(!IsFunctionType() && !IsRecordType());
return Class::Handle(type_class()).Name();
}
@ -20757,6 +20821,15 @@ bool AbstractType::IsSubtypeOf(const AbstractType& other,
// fall through to class-based type tests.
return false;
}
// Record types cannot be handled by Class::IsSubtypeOf().
if (IsRecordType()) {
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
} else if (other.IsRecordType()) {
// RecordTypes can only be subtyped by other RecordTypes, so don't
// fall through to class-based type tests.
return false;
}
const Class& type_cls = Class::Handle(zone, type_class());
return Class::IsSubtypeOf(type_cls, TypeArguments::Handle(zone, arguments()),
nullability(), other, space, trail);
@ -21128,34 +21201,13 @@ bool Type::IsEquivalent(const Instance& other,
if (type_class_id() != other_type.type_class_id()) {
return false;
}
Nullability this_type_nullability = nullability();
Nullability other_type_nullability = other_type.nullability();
Thread* thread = Thread::Current();
auto isolate_group = thread->isolate_group();
Zone* zone = thread->zone();
if (kind == TypeEquality::kInSubtypeTest) {
if (isolate_group->use_strict_null_safety_checks() &&
this_type_nullability == Nullability::kNullable &&
other_type_nullability == Nullability::kNonNullable) {
return false;
}
} else {
if (kind == TypeEquality::kSyntactical) {
if (this_type_nullability == Nullability::kLegacy) {
this_type_nullability = Nullability::kNonNullable;
}
if (other_type_nullability == Nullability::kLegacy) {
other_type_nullability = Nullability::kNonNullable;
}
} else {
ASSERT(kind == TypeEquality::kCanonical);
ASSERT(IsFinalized() && other_type.IsFinalized());
}
if (this_type_nullability != other_type_nullability) {
return false;
}
if (!IsNullabilityEquivalent(thread, other_type, kind)) {
return false;
}
if (!IsFinalized() || !other_type.IsFinalized()) {
ASSERT(kind != TypeEquality::kCanonical);
return false; // Too early to decide if equal.
}
if (arguments() == other_type.arguments()) {
@ -21232,34 +21284,13 @@ bool FunctionType::IsEquivalent(const Instance& other,
// Different number of type parameters or parameters.
return false;
}
Nullability this_type_nullability = nullability();
Nullability other_type_nullability = other_type.nullability();
Thread* thread = Thread::Current();
auto isolate_group = thread->isolate_group();
Zone* zone = thread->zone();
if (kind == TypeEquality::kInSubtypeTest) {
if (isolate_group->null_safety() &&
this_type_nullability == Nullability::kNullable &&
other_type_nullability == Nullability::kNonNullable) {
return false;
}
} else {
if (kind == TypeEquality::kSyntactical) {
if (this_type_nullability == Nullability::kLegacy) {
this_type_nullability = Nullability::kNonNullable;
}
if (other_type_nullability == Nullability::kLegacy) {
other_type_nullability = Nullability::kNonNullable;
}
} else {
ASSERT(kind == TypeEquality::kCanonical);
ASSERT(IsFinalized() && other_type.IsFinalized());
}
if (this_type_nullability != other_type_nullability) {
return false;
}
if (!IsNullabilityEquivalent(thread, other_type, kind)) {
return false;
}
if (!IsFinalized() || !other_type.IsFinalized()) {
ASSERT(kind != TypeEquality::kCanonical);
return false; // Too early to decide if equal.
}
// Equal function types must have equal signature types and equal optional
@ -22196,31 +22227,7 @@ bool TypeParameter::IsEquivalent(const Instance& other,
return false;
}
}
// Compare nullability.
Nullability this_type_param_nullability = nullability();
Nullability other_type_param_nullability = other_type_param.nullability();
if (kind == TypeEquality::kInSubtypeTest) {
if (IsolateGroup::Current()->use_strict_null_safety_checks() &&
(this_type_param_nullability == Nullability::kNullable) &&
(other_type_param_nullability == Nullability::kNonNullable)) {
return false;
}
} else {
if (kind == TypeEquality::kSyntactical) {
if (this_type_param_nullability == Nullability::kLegacy) {
this_type_param_nullability = Nullability::kNonNullable;
}
if (other_type_param_nullability == Nullability::kLegacy) {
other_type_param_nullability = Nullability::kNonNullable;
}
} else {
ASSERT(kind == TypeEquality::kCanonical);
}
if (this_type_param_nullability != other_type_param_nullability) {
return false;
}
}
return true;
return IsNullabilityEquivalent(Thread::Current(), other_type_param, kind);
}
bool TypeParameter::IsRecursive(TrailPtr trail) const {
@ -26849,6 +26856,14 @@ void DumpFunctionTypeTable(Isolate* isolate) {
table.Release();
}
void DumpRecordTypeTable(Isolate* isolate) {
OS::PrintErr("canonical record types:\n");
CanonicalRecordTypeSet table(
isolate->group()->object_store()->canonical_record_types());
table.Dump();
table.Release();
}
void DumpTypeParameterTable(Isolate* isolate) {
OS::PrintErr("canonical type parameters (cloned from declarations):\n");
CanonicalTypeParameterSet table(
@ -27062,4 +27077,402 @@ ErrorPtr Class::VerifyEntryPoint() const {
}
}
AbstractTypePtr RecordType::FieldTypeAt(intptr_t index) const {
const Array& field_types = Array::Handle(untag()->field_types());
return AbstractType::RawCast(field_types.At(index));
}
void RecordType::SetFieldTypeAt(intptr_t index,
const AbstractType& value) const {
ASSERT(!value.IsNull());
const Array& field_types = Array::Handle(untag()->field_types());
field_types.SetAt(index, value);
}
void RecordType::set_field_types(const Array& value) const {
ASSERT(!value.IsNull() && (value.Length() > 0));
untag()->set_field_types(value.ptr());
}
StringPtr RecordType::FieldNameAt(intptr_t index) const {
const Array& field_names = Array::Handle(untag()->field_names());
return String::RawCast(field_names.At(index));
}
void RecordType::SetFieldNameAt(intptr_t index, const String& value) const {
ASSERT(!value.IsNull());
ASSERT(value.IsSymbol());
const Array& field_names = Array::Handle(untag()->field_names());
field_names.SetAt(index, value);
}
void RecordType::set_field_names(const Array& value) const {
ASSERT(value.ptr() == Object::empty_array().ptr() || value.Length() > 0);
untag()->set_field_names(value.ptr());
}
void RecordType::Print(NameVisibility name_visibility,
BaseTextBuffer* printer) const {
if (IsNull()) {
printer->AddString("null");
return;
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
AbstractType& type = AbstractType::Handle(zone);
String& name = String::Handle(zone);
const intptr_t num_fields = NumFields();
const intptr_t num_positional_fields = NumPositionalFields();
printer->AddString("(");
for (intptr_t i = 0; i < num_fields; ++i) {
if (i != 0) {
printer->AddString(", ");
}
if (i >= num_positional_fields) {
name = FieldNameAt(i - num_positional_fields);
printer->AddString(name.ToCString());
printer->AddString(": ");
}
type = FieldTypeAt(i);
type.PrintName(name_visibility, printer);
}
printer->AddString(")");
printer->AddString(NullabilitySuffix(name_visibility));
}
const char* RecordType::ToCString() const {
Zone* zone = Thread::Current()->zone();
ZoneTextBuffer printer(zone);
Print(kInternalName, &printer);
return printer.buffer();
}
bool RecordType::IsInstantiated(Genericity genericity,
intptr_t num_free_fun_type_params,
TrailPtr trail) const {
AbstractType& type = AbstractType::Handle();
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = FieldTypeAt(i);
if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
return false;
}
}
return true;
}
RecordTypePtr RecordType::New(Heap::Space space) {
ObjectPtr raw =
Object::Allocate(RecordType::kClassId, RecordType::InstanceSize(), space,
RecordType::ContainsCompressedPointers());
return static_cast<RecordTypePtr>(raw);
}
RecordTypePtr RecordType::New(const Array& field_types,
const Array& field_names,
Nullability nullability,
Heap::Space space) {
Zone* Z = Thread::Current()->zone();
const RecordType& result = RecordType::Handle(Z, RecordType::New(space));
result.set_field_types(field_types);
result.set_field_names(field_names);
result.set_nullability(nullability);
result.SetHash(0);
result.StoreNonPointer(&result.untag()->type_state_,
UntaggedType::kAllocated);
result.InitializeTypeTestingStubNonAtomic(
Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
return result.ptr();
}
void RecordType::set_type_state(uint8_t state) const {
ASSERT(state <= UntaggedRecordType::kFinalizedUninstantiated);
StoreNonPointer(&untag()->type_state_, state);
}
void RecordType::SetIsFinalized() const {
ASSERT(!IsFinalized());
if (IsInstantiated()) {
set_type_state(UntaggedRecordType::kFinalizedInstantiated);
} else {
set_type_state(UntaggedRecordType::kFinalizedUninstantiated);
}
}
void RecordType::SetIsBeingFinalized() const {
ASSERT(!IsFinalized() && !IsBeingFinalized());
set_type_state(UntaggedRecordType::kBeingFinalized);
}
RecordTypePtr RecordType::ToNullability(Nullability value,
Heap::Space space) const {
if (nullability() == value) {
return ptr();
}
// Clone record type and set new nullability.
RecordType& type = RecordType::Handle();
// Always cloning in old space and removing space parameter would not satisfy
// currently existing requests for type instantiation in new space.
type ^= Object::Clone(*this, space);
type.set_nullability(value);
type.SetHash(0);
type.InitializeTypeTestingStubNonAtomic(
Code::Handle(TypeTestingStubGenerator::DefaultCodeForType(type)));
if (IsCanonical()) {
// Object::Clone does not clone canonical bit.
ASSERT(!type.IsCanonical());
type ^= type.Canonicalize(Thread::Current(), nullptr);
}
return type.ptr();
}
bool RecordType::IsEquivalent(const Instance& other,
TypeEquality kind,
TrailPtr trail) const {
ASSERT(!IsNull());
if (ptr() == other.ptr()) {
return true;
}
if (other.IsTypeRef()) {
// Unfold right hand type. Divergence is controlled by left hand type.
const AbstractType& other_ref_type =
AbstractType::Handle(TypeRef::Cast(other).type());
ASSERT(!other_ref_type.IsTypeRef());
return IsEquivalent(other_ref_type, kind, trail);
}
if (!other.IsRecordType()) {
return false;
}
const RecordType& other_type = RecordType::Cast(other);
if ((NumFields() != other_type.NumFields()) ||
(NumNamedFields() != other_type.NumNamedFields())) {
// Different number of positional or named fields.
return false;
}
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
if (!IsNullabilityEquivalent(thread, other_type, kind)) {
return false;
}
// Equal record types must have equal field types and names.
AbstractType& field_type = Type::Handle(zone);
AbstractType& other_field_type = Type::Handle(zone);
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
field_type = FieldTypeAt(i);
other_field_type = other_type.FieldTypeAt(i);
if (!field_type.IsEquivalent(other_field_type, kind, trail)) {
return false;
}
}
const intptr_t num_named_fields = NumNamedFields();
for (intptr_t i = 0; i < num_named_fields; ++i) {
field_type = FieldTypeAt(i);
other_field_type = other_type.FieldTypeAt(i);
if (FieldNameAt(i) != other_type.FieldNameAt(i)) {
return false;
}
}
return true;
}
uword RecordType::ComputeHash() const {
ASSERT(IsFinalized());
uint32_t result = 0;
// A legacy type should have the same hash as its non-nullable version to be
// consistent with the definition of type equality in Dart code.
Nullability type_nullability = nullability();
if (type_nullability == Nullability::kLegacy) {
type_nullability = Nullability::kNonNullable;
}
result = CombineHashes(result, static_cast<uint32_t>(type_nullability));
AbstractType& type = AbstractType::Handle();
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = FieldTypeAt(i);
result = CombineHashes(result, type.Hash());
}
const intptr_t num_named_fields = NumNamedFields();
if (num_named_fields > 0) {
String& field_name = String::Handle();
for (intptr_t i = 0; i < num_named_fields; ++i) {
field_name = FieldNameAt(i);
result = CombineHashes(result, field_name.Hash());
}
}
result = FinalizeHash(result, kHashBits);
SetHash(result);
return result;
}
bool RecordType::IsRecursive(TrailPtr trail) const {
AbstractType& type = AbstractType::Handle();
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = FieldTypeAt(i);
if (type.IsRecursive(trail)) {
return true;
}
}
return false;
}
bool RecordType::RequireConstCanonicalTypeErasure(Zone* zone,
TrailPtr trail) const {
if (IsNonNullable()) {
return true;
}
if (IsLegacy()) {
return false;
}
AbstractType& type = AbstractType::Handle();
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = FieldTypeAt(i);
if (type.RequireConstCanonicalTypeErasure(zone, trail)) {
return true;
}
}
return false;
}
AbstractTypePtr RecordType::Canonicalize(Thread* thread, TrailPtr trail) const {
ASSERT(IsFinalized());
Zone* zone = thread->zone();
AbstractType& type = AbstractType::Handle(zone);
if (IsCanonical()) {
#ifdef DEBUG
// Verify that all fields are allocated in old space and are canonical.
ASSERT(Array::Handle(zone, field_types()).IsOld());
ASSERT(Array::Handle(zone, field_names()).IsOld());
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = FieldTypeAt(i);
ASSERT(type.IsOld());
ASSERT(type.IsCanonical());
}
#endif
return ptr();
}
auto isolate_group = thread->isolate_group();
ObjectStore* object_store = isolate_group->object_store();
RecordType& rec = RecordType::Handle(zone);
{
SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
rec ^= table.GetOrNull(CanonicalRecordTypeKey(*this));
ASSERT(object_store->canonical_record_types() == table.Release().ptr());
}
if (rec.IsNull()) {
ASSERT(Array::Handle(zone, field_types()).IsOld());
ASSERT(Array::Handle(zone, field_names()).IsOld());
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = FieldTypeAt(i);
if (!type.IsCanonical()) {
type = type.Canonicalize(thread, trail);
SetFieldTypeAt(i, type);
SetHash(0);
}
}
if (IsCanonical()) {
// Canonicalizing fields types canonicalized this record as a
// side effect.
ASSERT(IsRecursive());
return this->ptr();
}
// Check to see if the record type got added to canonical table as part
// of the canonicalization of its signature types.
SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
rec ^= table.GetOrNull(CanonicalRecordTypeKey(*this));
if (rec.IsNull()) {
// Add this record type into the canonical table of record types.
if (this->IsNew()) {
rec ^= Object::Clone(*this, Heap::kOld);
} else {
rec = this->ptr();
}
ASSERT(rec.IsOld());
rec.SetCanonical(); // Mark object as being canonical.
bool present = table.Insert(rec);
ASSERT(!present);
}
object_store->set_canonical_record_types(table.Release());
}
return rec.ptr();
}
#if defined(DEBUG)
bool RecordType::CheckIsCanonical(Thread* thread) const {
Zone* zone = thread->zone();
auto isolate_group = thread->isolate_group();
RecordType& type = RecordType::Handle(zone);
ObjectStore* object_store = isolate_group->object_store();
{
ASSERT(thread->isolate_group()
->constant_canonicalization_mutex()
->IsOwnedByCurrentThread());
CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
type ^= table.GetOrNull(CanonicalRecordTypeKey(*this));
object_store->set_canonical_record_types(table.Release());
}
return ptr() == type.ptr();
}
#endif // DEBUG
void RecordType::EnumerateURIs(URIs* uris) const {
AbstractType& type = AbstractType::Handle();
const intptr_t num_fields = NumFields();
for (intptr_t i = 0; i < num_fields; ++i) {
type = FieldTypeAt(i);
type.EnumerateURIs(uris);
}
}
AbstractTypePtr RecordType::InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
const TypeArguments& function_type_arguments,
intptr_t num_free_fun_type_params,
Heap::Space space,
TrailPtr trail) const {
ASSERT(IsFinalized() || IsBeingFinalized());
Zone* zone = Thread::Current()->zone();
const intptr_t num_fields = NumFields();
const Array& old_field_types = Array::Handle(zone, field_types());
const Array& new_field_types =
Array::Handle(zone, Array::New(num_fields, space));
AbstractType& type = AbstractType::Handle(zone);
for (intptr_t i = 0; i < num_fields; ++i) {
type ^= old_field_types.At(i);
if (!type.IsInstantiated()) {
type = type.InstantiateFrom(instantiator_type_arguments,
function_type_arguments,
num_free_fun_type_params, space, trail);
// A returned null type indicates a failed instantiation in dead code that
// must be propagated up to the caller, the optimizing compiler.
if (type.IsNull()) {
return RecordType::null();
}
}
new_field_types.SetAt(i, type);
}
const auto& rec = RecordType::Handle(
zone, RecordType::New(new_field_types, Array::Handle(zone, field_names()),
nullability(), space));
if (IsFinalized()) {
rec.SetIsFinalized();
} else {
if (IsBeingFinalized()) {
rec.SetIsBeingFinalized();
}
}
// Canonicalization is not part of instantiation.
return rec.ptr();
}
} // namespace dart

View file

@ -439,6 +439,7 @@ class Object {
V(Instance, null_instance) \
V(Function, null_function) \
V(FunctionType, null_function_type) \
V(RecordType, null_record_type) \
V(TypeArguments, null_type_arguments) \
V(CompressedStackMaps, null_compressed_stackmaps) \
V(TypeArguments, empty_type_arguments) \
@ -1443,6 +1444,13 @@ class Class : public Object {
return GetClassId(cls) == kClosureCid;
}
// Check if this class represents the 'Record' class.
bool IsRecordClass() const {
// TODO(dartbug.com/49719)
// return id() == kRecordCid;
return false;
}
static bool IsInFullSnapshot(ClassPtr cls) {
NoSafepointScope no_safepoint;
return UntaggedLibrary::InFullSnapshotBit::decode(
@ -8470,6 +8478,10 @@ class AbstractType : public Instance {
TrailPtr trail = nullptr) const;
protected:
bool IsNullabilityEquivalent(Thread* thread,
const AbstractType& other_type,
TypeEquality kind) const;
HEAP_OBJECT_IMPLEMENTATION(AbstractType, Instance);
friend class Class;
friend class Function;
@ -8941,6 +8953,110 @@ class FunctionType : public AbstractType {
friend class Function;
};
// A RecordType represents the type of a record. It describes
// number of named and positional fields, field types and
// names of the named fields.
class RecordType : public AbstractType {
public:
static intptr_t type_state_offset() {
return OFFSET_OF(UntaggedRecordType, type_state_);
}
static intptr_t hash_offset() { return OFFSET_OF(UntaggedRecordType, hash_); }
static intptr_t nullability_offset() {
return OFFSET_OF(UntaggedRecordType, nullability_);
}
virtual bool IsFinalized() const {
return (untag()->type_state_ == UntaggedType::kFinalizedInstantiated) ||
(untag()->type_state_ == UntaggedType::kFinalizedUninstantiated);
}
virtual void SetIsFinalized() const;
virtual bool IsBeingFinalized() const {
return untag()->type_state_ == UntaggedType::kBeingFinalized;
}
virtual void SetIsBeingFinalized() const;
virtual bool HasTypeClass() const { return false; }
virtual Nullability nullability() const {
return static_cast<Nullability>(untag()->nullability_);
}
RecordTypePtr ToNullability(Nullability value, Heap::Space space) const;
virtual classid_t type_class_id() const { return kIllegalCid; }
virtual bool IsInstantiated(Genericity genericity = kAny,
intptr_t num_free_fun_type_params = kAllFree,
TrailPtr trail = nullptr) const;
virtual bool IsEquivalent(const Instance& other,
TypeEquality kind,
TrailPtr trail = nullptr) const;
virtual bool IsRecursive(TrailPtr trail = nullptr) const;
virtual bool RequireConstCanonicalTypeErasure(Zone* zone,
TrailPtr trail = nullptr) const;
virtual AbstractTypePtr InstantiateFrom(
const TypeArguments& instantiator_type_arguments,
const TypeArguments& function_type_arguments,
intptr_t num_free_fun_type_params,
Heap::Space space,
TrailPtr trail = nullptr) const;
virtual AbstractTypePtr Canonicalize(Thread* thread, TrailPtr trail) const;
#if defined(DEBUG)
// Check if type is canonical.
virtual bool CheckIsCanonical(Thread* thread) const;
#endif // DEBUG
virtual void EnumerateURIs(URIs* uris) const;
virtual uword Hash() const;
uword ComputeHash() const;
bool IsSubtypeOf(const RecordType& other, Heap::Space space) const;
ArrayPtr field_types() const {
return untag()->field_types();
}
AbstractTypePtr FieldTypeAt(intptr_t index) const;
void SetFieldTypeAt(intptr_t index, const AbstractType& value) const;
// Names of the named fields, sorted.
ArrayPtr field_names() const {
return untag()->field_names();
}
StringPtr FieldNameAt(intptr_t index) const;
void SetFieldNameAt(intptr_t index, const String& value) const;
intptr_t NumFields() const;
intptr_t NumNamedFields() const;
intptr_t NumPositionalFields() const;
void Print(NameVisibility name_visibility, BaseTextBuffer* printer) const;
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(UntaggedRecordType));
}
static RecordTypePtr New(const Array& field_types,
const Array& field_names,
Nullability nullability = Nullability::kLegacy,
Heap::Space space = Heap::kOld);
private:
void SetHash(intptr_t value) const;
void set_type_state(uint8_t state) const;
void set_nullability(Nullability value) const {
ASSERT(!IsCanonical());
StoreNonPointer(&untag()->nullability_, static_cast<uint8_t>(value));
}
void set_field_types(const Array& value) const;
void set_field_names(const Array& value) const;
static RecordTypePtr New(Heap::Space space);
FINAL_HEAP_OBJECT_IMPLEMENTATION(RecordType, AbstractType);
friend class Class;
friend class ClearTypeHashVisitor;
friend class Record;
};
// A TypeRef is used to break cycles in the representation of recursive types.
// Its only field is the recursive AbstractType it refers to, which can
// temporarily be null during finalization.
@ -12594,6 +12710,33 @@ inline void FunctionType::SetHash(intptr_t value) const {
untag()->set_hash(Smi::New(value));
}
inline uword RecordType::Hash() const {
ASSERT(IsFinalized());
intptr_t result = Smi::Value(untag()->hash());
if (result != 0) {
return result;
}
return ComputeHash();
}
inline void RecordType::SetHash(intptr_t value) const {
// This is only safe because we create a new Smi, which does not cause
// heap allocation.
untag()->set_hash(Smi::New(value));
}
inline intptr_t RecordType::NumFields() const {
return Array::LengthOf(field_types());
}
inline intptr_t RecordType::NumNamedFields() const {
return Array::LengthOf(field_names());
}
inline intptr_t RecordType::NumPositionalFields() const {
return NumFields() - NumNamedFields();
}
inline uword TypeParameter::Hash() const {
ASSERT(IsFinalized() || IsBeingFinalized()); // Bound may not be finalized.
intptr_t result = Smi::Value(untag()->hash());

View file

@ -67,6 +67,7 @@
V(PcDescriptors) \
V(Pointer) \
V(ReceivePort) \
V(RecordType) \
V(RegExp) \
V(Script) \
V(Sentinel) \

View file

@ -1240,6 +1240,29 @@ void FunctionType::PrintJSONImpl(JSONStream* stream, bool ref) const {
}
}
void RecordType::PrintJSONImpl(JSONStream* stream, bool ref) const {
JSONObject jsobj(stream);
PrintSharedInstanceJSON(&jsobj, ref);
jsobj.AddProperty("kind", "RecordType");
{
JSONArray arr(&jsobj, "fields");
const intptr_t num_fields = NumFields();
const intptr_t num_positional_fields = NumPositionalFields();
AbstractType& type = AbstractType::Handle();
String& name = String::Handle();
for (intptr_t i = 0; i < num_fields; ++i) {
JSONObject field(&arr);
type = FieldTypeAt(i);
field.AddProperty("type", type);
if (i >= num_positional_fields) {
name = FieldNameAt(i - num_positional_fields);
field.AddProperty("name", name.ToCString());
}
}
}
}
void TypeRef::PrintJSONImpl(JSONStream* stream, bool ref) const {
JSONObject jsobj(stream);
PrintSharedInstanceJSON(&jsobj, ref);

View file

@ -135,6 +135,7 @@ class ObjectPointerVisitor;
ARW_AR(Array, symbol_table) \
RW(Array, canonical_types) \
RW(Array, canonical_function_types) \
RW(Array, canonical_record_types) \
RW(Array, canonical_type_parameters) \
RW(Array, canonical_type_arguments) \
RW(Library, async_library) \

View file

@ -529,6 +529,7 @@ COMPRESSED_VISITOR(KernelProgramInfo)
COMPRESSED_VISITOR(WeakSerializationReference)
COMPRESSED_VISITOR(Type)
COMPRESSED_VISITOR(FunctionType)
COMPRESSED_VISITOR(RecordType)
COMPRESSED_VISITOR(TypeRef)
COMPRESSED_VISITOR(TypeParameter)
COMPRESSED_VISITOR(Function)

View file

@ -2718,6 +2718,20 @@ class UntaggedFunctionType : public UntaggedAbstractType {
friend class Function;
};
class UntaggedRecordType : public UntaggedAbstractType {
private:
RAW_HEAP_OBJECT_IMPLEMENTATION(RecordType);
COMPRESSED_POINTER_FIELD(ArrayPtr, field_types)
COMPRESSED_POINTER_FIELD(ArrayPtr, field_names);
COMPRESSED_POINTER_FIELD(SmiPtr, hash)
VISIT_TO(hash)
uint8_t type_state_;
uint8_t nullability_;
CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
};
class UntaggedTypeRef : public UntaggedAbstractType {
private:
RAW_HEAP_OBJECT_IMPLEMENTATION(TypeRef);

View file

@ -2869,6 +2869,11 @@ static void CollectStringifiedType(Zone* zone,
output.Add(instance);
return;
}
if (type.IsRecordType()) {
// _Record class is not useful for the CFE. We use null instead.
output.Add(instance);
return;
}
if (type.IsDynamicType()) {
// Dynamic is weird in that it seems to have a class with no name and a
// library called something like '7189777121420'. We use null instead.

View file

@ -376,6 +376,7 @@ class ObjectPointerVisitor;
V(_ParameterMirror, "_ParameterMirror") \
V(_Random, "_Random") \
V(_RawReceivePortImpl, "_RawReceivePortImpl") \
V(_RecordType, "_RecordType") \
V(_RegExp, "_RegExp") \
V(_SendPortImpl, "_SendPortImpl") \
V(_Smi, "_Smi") \

View file

@ -388,6 +388,7 @@ DEFINE_TAGGED_POINTER(TypeParameters, Object)
DEFINE_TAGGED_POINTER(AbstractType, Instance)
DEFINE_TAGGED_POINTER(Type, AbstractType)
DEFINE_TAGGED_POINTER(FunctionType, AbstractType)
DEFINE_TAGGED_POINTER(RecordType, AbstractType)
DEFINE_TAGGED_POINTER(TypeRef, AbstractType)
DEFINE_TAGGED_POINTER(TypeParameter, AbstractType)
DEFINE_TAGGED_POINTER(Closure, Instance)

View file

@ -126,7 +126,7 @@ CodePtr TypeTestingStubGenerator::DefaultCodeForType(
}
}
if (type.IsFunctionType()) {
if (type.IsFunctionType() || type.IsRecordType()) {
const bool nullable = Instance::NullIsAssignableTo(type);
return nullable ? StubCode::DefaultNullableTypeTest().ptr()
: StubCode::DefaultTypeTest().ptr();
@ -1006,6 +1006,7 @@ static void UnwrapAbstractType(compiler::Assembler* assembler,
__ CompareImmediate(scratch, kTypeParameterCid);
__ BranchIf(EQUAL, is_type_parameter);
}
// TODO(dartbug.com/49719)
}
void TypeTestingStubGenerator::BuildOptimizedTypeParameterArgumentValueCheck(
@ -1153,6 +1154,7 @@ void TypeTestingStubGenerator::BuildOptimizedTypeArgumentValueCheck(
__ Bind(&check_subtype_cid);
UnwrapAbstractType(assembler, TTSInternalRegs::kSubTypeArgumentReg,
TTSInternalRegs::kScratchReg, &sub_is_type);
// TODO(dartbug.com/49719)
__ Comment("Checks for FunctionType");
__ EnsureHasClassIdInDEBUG(kFunctionTypeCid,
TTSInternalRegs::kSubTypeArgumentReg,
@ -1367,6 +1369,10 @@ AbstractTypePtr TypeArgumentInstantiator::InstantiateType(
// No support for function types yet.
UNREACHABLE();
return nullptr;
} else if (type.IsRecordType()) {
// No support for record types yet.
UNREACHABLE();
return nullptr;
} else if (type.IsTypeRef()) {
// No support for recursive types.
UNREACHABLE();

View file

@ -210,6 +210,9 @@ class TypeArgumentClassFinder {
} else if (type.IsFunctionType()) {
// No support for function types yet.
return false;
} else if (type.IsRecordType()) {
// No support for record types yet.
return false;
} else if (type.IsTypeRef()) {
// No support for recursive types.
return false;

View file

@ -6,13 +6,11 @@
// These Dart classes correspond to the VM internal implementation classes.
// Equivalent of AbstractTypeLayout.
abstract class _AbstractType implements Type {
@pragma("vm:external-name", "AbstractType_toString")
external String toString();
}
// Equivalent of TypeLayout.
@pragma("vm:entry-point")
class _Type extends _AbstractType {
factory _Type._uninstantiable() {
@ -30,7 +28,6 @@ class _Type extends _AbstractType {
external bool operator ==(other);
}
// Equivalent of FunctionTypeLayout.
@pragma("vm:entry-point")
class _FunctionType extends _AbstractType {
factory _FunctionType._uninstantiable() {
@ -48,7 +45,13 @@ class _FunctionType extends _AbstractType {
external bool operator ==(other);
}
// Equivalent of TypeRefLayout.
@pragma("vm:entry-point")
class _RecordType extends _AbstractType {
factory _RecordType._uninstantiable() {
throw "Unreachable";
}
}
@pragma("vm:entry-point")
class _TypeRef extends _AbstractType {
factory _TypeRef._uninstantiable() {
@ -56,7 +59,6 @@ class _TypeRef extends _AbstractType {
}
}
// Equivalent of TypeParameterLayout.
@pragma("vm:entry-point")
class _TypeParameter extends _AbstractType {
factory _TypeParameter._uninstantiable() {