[vm] Record literals

TEST=language/record_literal_test

Issue: https://github.com/dart-lang/sdk/issues/49719
Change-Id: I287586c0adb19fe401d76c7a586133a1fe9f1d1d
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/257264
Commit-Queue: Alexander Markov <alexmarkov@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
This commit is contained in:
Alexander Markov 2022-09-12 23:08:17 +00:00 committed by Commit Bot
parent c94103ae05
commit 3ec7cf9c34
34 changed files with 1322 additions and 1009 deletions

View file

@ -834,6 +834,13 @@ class Assembler : public AssemblerBase {
void AddRegisters(Register dest, Register src) {
add(dest, dest, Operand(src));
}
void AddScaled(Register dest,
Register src,
ScaleFactor scale,
int32_t value) {
LoadImmediate(dest, value);
add(dest, dest, Operand(src, LSL, scale));
}
void SubImmediate(Register rd,
Register rn,
int32_t value,

View file

@ -1820,6 +1820,13 @@ class Assembler : public AssemblerBase {
void AddRegisters(Register dest, Register src) {
add(dest, dest, Operand(src));
}
void AddScaled(Register dest,
Register src,
ScaleFactor scale,
int32_t value) {
LoadImmediate(dest, value);
add(dest, dest, Operand(src, LSL, scale));
}
void SubImmediateSetFlags(Register dest,
Register rn,
int64_t imm,

View file

@ -728,6 +728,12 @@ class Assembler : public AssemblerBase {
void AddRegisters(Register dest, Register src) {
addl(dest, src);
}
void AddScaled(Register dest,
Register src,
ScaleFactor scale,
int32_t value) {
leal(dest, Address(src, scale, value));
}
void SubImmediate(Register reg, const Immediate& imm);
void SubRegisters(Register dest, Register src) {

View file

@ -943,6 +943,13 @@ class Assembler : public MicroAssembler {
void AddRegisters(Register dest, Register src) {
add(dest, dest, src);
}
void AddScaled(Register dest,
Register src,
ScaleFactor scale,
int32_t value) {
slli(dest, src, scale);
addi(dest, dest, value);
}
void SubRegisters(Register dest, Register src) {
sub(dest, dest, src);
}

View file

@ -767,6 +767,12 @@ class Assembler : public AssemblerBase {
void AddRegisters(Register dest, Register src) {
addq(dest, src);
}
void AddScaled(Register dest,
Register src,
ScaleFactor scale,
int32_t value) {
leaq(dest, Address(src, scale, value));
}
void AddImmediate(Register dest, Register src, int32_t value);
void AddImmediate(const Address& address, const Immediate& imm);
void SubImmediate(Register reg,

View file

@ -938,6 +938,10 @@ void ConstantPropagator::VisitAllocateClosure(AllocateClosureInstr* instr) {
SetValue(instr, non_constant_);
}
void ConstantPropagator::VisitAllocateRecord(AllocateRecordInstr* instr) {
SetValue(instr, non_constant_);
}
void ConstantPropagator::VisitLoadUntagged(LoadUntaggedInstr* instr) {
SetValue(instr, non_constant_);
}

View file

@ -7401,6 +7401,26 @@ void SuspendInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
#endif
}
LocationSummary* AllocateRecordInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 0;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
locs->set_out(0, Location::RegisterLocation(AllocateRecordABI::kResultReg));
return locs;
}
void AllocateRecordInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
const Code& stub = Code::ZoneHandle(
compiler->zone(),
compiler->isolate_group()->object_store()->allocate_record_stub());
__ LoadImmediate(AllocateRecordABI::kNumFieldsReg, num_fields());
__ LoadObject(AllocateRecordABI::kFieldNamesReg, field_names());
compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
locs(), deopt_id(), env());
}
#undef __
} // namespace dart

View file

@ -457,6 +457,7 @@ struct InstrAttrs {
M(CreateArray, _) \
M(AllocateObject, _) \
M(AllocateClosure, _) \
M(AllocateRecord, _) \
M(AllocateTypedData, _) \
M(LoadField, _) \
M(LoadUntagged, kNoGC) \
@ -6992,6 +6993,46 @@ class AllocateUninitializedContextInstr : public TemplateAllocation<0> {
DISALLOW_COPY_AND_ASSIGN(AllocateUninitializedContextInstr);
};
// Allocates and null initializes a record object.
class AllocateRecordInstr : public TemplateAllocation<0> {
public:
AllocateRecordInstr(const InstructionSource& source,
intptr_t num_fields,
const Array& field_names,
intptr_t deopt_id)
: TemplateAllocation(source, deopt_id),
num_fields_(num_fields),
field_names_(field_names) {
ASSERT(field_names.IsNotTemporaryScopedHandle());
ASSERT(field_names.IsCanonical());
}
DECLARE_INSTRUCTION(AllocateRecord)
virtual CompileType ComputeType() const;
intptr_t num_fields() const { return num_fields_; }
const Array& field_names() const { return field_names_; }
virtual bool HasUnknownSideEffects() const { return false; }
virtual bool WillAllocateNewOrRemembered() const {
return Heap::IsAllocatableInNewSpace(
compiler::target::Record::InstanceSize(num_fields_));
}
#define FIELD_LIST(F) \
F(const intptr_t, num_fields_) \
F(const Array&, field_names_)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(AllocateRecordInstr,
TemplateAllocation,
FIELD_LIST)
#undef FIELD_LIST
private:
DISALLOW_COPY_AND_ASSIGN(AllocateRecordInstr);
};
// This instruction captures the state of the object which had its allocation
// removed during the AllocationSinking pass.
// It does not produce any real code only deoptimization information.

View file

@ -2181,6 +2181,9 @@ void Slot::Write(FlowGraphSerializer* s) const {
case Kind::kArrayElement:
s->Write<intptr_t>(offset_in_bytes_);
break;
case Kind::kRecordField:
s->Write<intptr_t>(offset_in_bytes_);
break;
case Kind::kCapturedVariable:
s->Write<int8_t>(flags_);
s->Write<intptr_t>(offset_in_bytes_);
@ -2224,6 +2227,12 @@ const Slot& Slot::Read(FlowGraphDeserializer* d) {
offset = d->Read<intptr_t>();
data = ":array_element";
break;
case Kind::kRecordField:
flags = IsNullableBit::encode(true) |
IsCompressedBit::encode(Record::ContainsCompressedPointers());
offset = d->Read<intptr_t>();
data = ":record_field";
break;
case Kind::kCapturedVariable:
flags = d->Read<int8_t>();
offset = d->Read<intptr_t>();

View file

@ -2780,6 +2780,7 @@ void LoadFieldInstr::InferRange(RangeAnalysis* analysis, Range* range) {
case Slot::Kind::kDartField:
case Slot::Kind::kCapturedVariable:
case Slot::Kind::kRecordField:
// Use default value.
Definition::InferRange(analysis, range);
break;

View file

@ -46,61 +46,6 @@ class SlotCache : public ZoneAllocated {
PointerSet<const Slot> fields_;
};
#define NATIVE_SLOT_NAME(C, F) Kind::k##C##_##F
#define NATIVE_TO_STR(C, F) #C "_" #F
const char* Slot::KindToCString(Kind k) {
switch (k) {
#define NATIVE_CASE(C, __, F, ___, ____) \
case NATIVE_SLOT_NAME(C, F): \
return NATIVE_TO_STR(C, F);
NATIVE_SLOTS_LIST(NATIVE_CASE)
#undef NATIVE_CASE
case Kind::kTypeArguments:
return "TypeArguments";
case Kind::kArrayElement:
return "ArrayElement";
case Kind::kCapturedVariable:
return "CapturedVariable";
case Kind::kDartField:
return "DartField";
default:
UNREACHABLE();
return nullptr;
}
}
bool Slot::ParseKind(const char* str, Kind* out) {
ASSERT(str != nullptr && out != nullptr);
#define NATIVE_CASE(C, __, F, ___, ____) \
if (strcmp(str, NATIVE_TO_STR(C, F)) == 0) { \
*out = NATIVE_SLOT_NAME(C, F); \
return true; \
}
NATIVE_SLOTS_LIST(NATIVE_CASE)
#undef NATIVE_CASE
if (strcmp(str, "TypeArguments") == 0) {
*out = Kind::kTypeArguments;
return true;
}
if (strcmp(str, "ArrayElement") == 0) {
*out = Kind::kArrayElement;
return true;
}
if (strcmp(str, "CapturedVariable") == 0) {
*out = Kind::kCapturedVariable;
return true;
}
if (strcmp(str, "DartField") == 0) {
*out = Kind::kDartField;
return true;
}
return false;
}
#undef NATIVE_TO_STR
#undef NATIVE_SLOT_NAME
static classid_t GetUnboxedNativeSlotCid(Representation rep) {
// Currently we only support integer unboxed fields.
if (RepresentationUtils::IsUnboxedInteger(rep)) {
@ -243,6 +188,7 @@ bool Slot::IsImmutableLengthSlot() const {
case Slot::Kind::kFunctionType_named_parameter_names:
case Slot::Kind::kFunctionType_parameter_types:
case Slot::Kind::kFunctionType_type_parameters:
case Slot::Kind::kRecordField:
case Slot::Kind::kSuspendState_function_data:
case Slot::Kind::kSuspendState_then_callback:
case Slot::Kind::kSuspendState_error_callback:
@ -345,6 +291,15 @@ const Slot& Slot::GetArrayElementSlot(Thread* thread,
/*static_type=*/nullptr, kTagged);
}
const Slot& Slot::GetRecordFieldSlot(Thread* thread, intptr_t offset_in_bytes) {
return GetCanonicalSlot(
thread, Kind::kRecordField,
IsNullableBit::encode(true) |
IsCompressedBit::encode(Record::ContainsCompressedPointers()),
kDynamicCid, offset_in_bytes, ":record_field",
/*static_type=*/nullptr, kTagged);
}
const Slot& Slot::GetCanonicalSlot(Thread* thread,
Slot::Kind kind,
int8_t flags,
@ -567,6 +522,7 @@ bool Slot::Equals(const Slot& other) const {
case Kind::kTypeArguments:
case Kind::kTypeArgumentsIndex:
case Kind::kArrayElement:
case Kind::kRecordField:
return true;
case Kind::kCapturedVariable:

View file

@ -248,6 +248,9 @@ class Slot : public ZoneAllocated {
// Only used during allocation sinking and in MaterializeObjectInstr.
kArrayElement,
// A slot corresponding to a record field at the given offset.
kRecordField,
// A slot within a Context object that contains a value of a captured
// local variable.
kCapturedVariable,
@ -257,9 +260,6 @@ class Slot : public ZoneAllocated {
};
// clang-format on
static const char* KindToCString(Kind k);
static bool ParseKind(const char* str, Kind* k);
// Returns a slot that represents length field for the given [array_cid].
static const Slot& GetLengthFieldForArrayCid(intptr_t array_cid);
@ -278,6 +278,12 @@ class Slot : public ZoneAllocated {
static const Slot& GetArrayElementSlot(Thread* thread,
intptr_t offset_in_bytes);
// Returns a slot corresponding to a record field at [offset_in_bytes].
// TODO(dartbug.com/49719): distinguish slots of records with different
// shapes.
static const Slot& GetRecordFieldSlot(Thread* thread,
intptr_t offset_in_bytes);
// Returns a slot that represents the given captured local variable.
static const Slot& GetContextVariableSlotFor(Thread* thread,
const LocalVariable& var);
@ -301,6 +307,9 @@ class Slot : public ZoneAllocated {
bool IsTypeArguments() const { return kind() == Kind::kTypeArguments; }
bool IsArgumentOfType() const { return kind() == Kind::kTypeArgumentsIndex; }
bool IsArrayElement() const { return kind() == Kind::kArrayElement; }
bool IsRecordField() const {
return kind() == Kind::kRecordField;
}
bool IsImmutableLengthSlot() const;
const char* Name() const;

View file

@ -1649,6 +1649,10 @@ CompileType AllocateClosureInstr::ComputeType() const {
return CompileType::FromCid(kClosureCid);
}
CompileType AllocateRecordInstr::ComputeType() const {
return CompileType::FromCid(kRecordCid);
}
CompileType LoadUntaggedInstr::ComputeType() const {
return CompileType::Dynamic();
}

View file

@ -933,6 +933,15 @@ Fragment BaseFlowGraphBuilder::CreateArray() {
return Fragment(array);
}
Fragment BaseFlowGraphBuilder::AllocateRecord(TokenPosition position,
intptr_t num_fields,
const Array& field_names) {
AllocateRecordInstr* allocate = new (Z) AllocateRecordInstr(
InstructionSource(position), num_fields, field_names, GetNextDeoptId());
Push(allocate);
return Fragment(allocate);
}
Fragment BaseFlowGraphBuilder::AllocateTypedData(TokenPosition position,
classid_t class_id) {
Value* num_elements = Pop();

View file

@ -353,6 +353,9 @@ class BaseFlowGraphBuilder {
// Top of the stack should be the closure function.
Fragment AllocateClosure(TokenPosition position = TokenPosition::kNoSource);
Fragment CreateArray();
Fragment AllocateRecord(TokenPosition position,
intptr_t num_fields,
const Array& field_names);
Fragment AllocateTypedData(TokenPosition position, classid_t class_id);
Fragment InstantiateType(const AbstractType& type);
Fragment InstantiateTypeArguments(const TypeArguments& type_arguments);

View file

@ -1129,6 +1129,8 @@ Fragment StreamingFlowGraphBuilder::BuildExpression(
break;
case kMapLiteral:
return BuildMapLiteral(position);
case kRecordLiteral:
return BuildRecordLiteral(position);
case kFunctionExpression:
return BuildFunctionExpression();
case kLet:
@ -4026,6 +4028,69 @@ Fragment StreamingFlowGraphBuilder::BuildMapLiteral(TokenPosition* p) {
StaticCall(position, factory_method, 2, ICData::kStatic);
}
Fragment StreamingFlowGraphBuilder::BuildRecordLiteral(TokenPosition* p) {
const TokenPosition position = ReadPosition(); // read position.
if (p != nullptr) *p = position;
// Figure out record shape.
const intptr_t positional_count = ReadListLength();
intptr_t named_count = -1;
const Array* field_names = &Object::empty_array();
{
AlternativeReadingScope alt(&reader_);
for (intptr_t i = 0; i < positional_count; ++i) {
SkipExpression();
}
named_count = ReadListLength();
if (named_count > 0) {
Array& names = Array::ZoneHandle(Z, Array::New(named_count, Heap::kOld));
for (intptr_t i = 0; i < named_count; ++i) {
String& name =
H.DartSymbolObfuscate(ReadStringReference()); // read ith name.
SkipExpression(); // read ith expression.
names.SetAt(i, name);
}
names ^= H.Canonicalize(names);
field_names = &names;
}
}
const intptr_t num_fields = positional_count + named_count;
// TODO(dartbug.com/49719): provide specialized allocation stubs for small
// records.
Fragment instructions;
instructions += B->AllocateRecord(position, num_fields, *field_names);
LocalVariable* record = MakeTemporary();
// List of positional.
intptr_t pos = 0;
for (intptr_t i = 0; i < positional_count; ++i, ++pos) {
instructions += LoadLocal(record);
instructions += BuildExpression(); // read ith expression.
instructions += B->StoreNativeField(
Slot::GetRecordFieldSlot(thread(),
compiler::target::Record::field_offset(pos)),
StoreFieldInstr::Kind::kInitializing);
}
// List of named.
ReadListLength(); // read list length.
for (intptr_t i = 0; i < named_count; ++i, ++pos) {
SkipStringReference(); // read ith name.
instructions += LoadLocal(record);
instructions += BuildExpression(); // read ith expression.
instructions += B->StoreNativeField(
Slot::GetRecordFieldSlot(thread(),
compiler::target::Record::field_offset(pos)),
StoreFieldInstr::Kind::kInitializing);
}
SkipDartType(); // read recordType.
return instructions;
}
Fragment StreamingFlowGraphBuilder::BuildFunctionExpression() {
ReadPosition(); // read position.
return BuildFunctionNode(TokenPosition::kNoSource, StringIndex(),

View file

@ -320,6 +320,7 @@ class StreamingFlowGraphBuilder : public KernelReaderHelper {
Fragment BuildThrow(TokenPosition* position);
Fragment BuildListLiteral(TokenPosition* position);
Fragment BuildMapLiteral(TokenPosition* position);
Fragment BuildRecordLiteral(TokenPosition* position);
Fragment BuildFunctionExpression();
Fragment BuildLet(TokenPosition* position);
Fragment BuildBlockExpression();

View file

@ -40,6 +40,7 @@ class KernelFingerprintHelper : public KernelReaderHelper {
void CalculateVariableDeclarationFingerprint();
void CalculateStatementListFingerprint();
void CalculateListOfExpressionsFingerprint();
void CalculateListOfNamedExpressionsFingerprint();
void CalculateListOfDartTypesFingerprint();
void CalculateListOfVariableDeclarationsFingerprint();
void CalculateStringReferenceFingerprint();
@ -87,14 +88,8 @@ void KernelFingerprintHelper::CalculateArgumentsFingerprint() {
BuildHash(ReadUInt()); // read argument count.
CalculateListOfDartTypesFingerprint(); // read list of types.
CalculateListOfExpressionsFingerprint(); // read positionals.
// List of named.
intptr_t list_length = ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
CalculateStringReferenceFingerprint(); // read ith name index.
CalculateExpressionFingerprint(); // read ith expression.
}
CalculateListOfExpressionsFingerprint(); // read positional.
CalculateListOfNamedExpressionsFingerprint(); // read named.
}
void KernelFingerprintHelper::CalculateVariableDeclarationFingerprint() {
@ -128,6 +123,14 @@ void KernelFingerprintHelper::CalculateListOfExpressionsFingerprint() {
}
}
void KernelFingerprintHelper::CalculateListOfNamedExpressionsFingerprint() {
const intptr_t list_length = ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
CalculateStringReferenceFingerprint(); // read ith name index.
CalculateExpressionFingerprint(); // read ith expression.
}
}
void KernelFingerprintHelper::CalculateListOfDartTypesFingerprint() {
intptr_t list_length = ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
@ -589,6 +592,12 @@ void KernelFingerprintHelper::CalculateExpressionFingerprint() {
}
return;
}
case kRecordLiteral:
ReadPosition(); // read position.
CalculateListOfExpressionsFingerprint(); // read positionals.
CalculateListOfNamedExpressionsFingerprint(); // read named.
CalculateDartTypeFingerprint(); // read recordType.
return;
case kFunctionExpression:
ReadPosition(); // read position.
CalculateFunctionNodeFingerprint(); // read function node.

View file

@ -2300,6 +2300,14 @@ void KernelReaderHelper::SkipListOfExpressions() {
}
}
void KernelReaderHelper::SkipListOfNamedExpressions() {
const intptr_t list_length = ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
SkipStringReference(); // read ith name index.
SkipExpression(); // read ith expression.
}
}
void KernelReaderHelper::SkipListOfDartTypes() {
intptr_t list_length = ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
@ -2603,6 +2611,12 @@ void KernelReaderHelper::SkipExpression() {
}
return;
}
case kRecordLiteral:
ReadPosition(); // read position.
SkipListOfExpressions(); // read positionals.
SkipListOfNamedExpressions(); // read named.
SkipDartType(); // read recordType.
return;
case kFunctionExpression:
ReadPosition(); // read position.
SkipFunctionNode(); // read function node.
@ -2829,14 +2843,8 @@ void KernelReaderHelper::SkipArguments() {
ReadUInt(); // read argument count.
SkipListOfDartTypes(); // read list of types.
SkipListOfExpressions(); // read positionals.
// List of named.
intptr_t list_length = ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
SkipStringReference(); // read ith name index.
SkipExpression(); // read ith expression.
}
SkipListOfExpressions(); // read positional.
SkipListOfNamedExpressions(); // read named.
}
void KernelReaderHelper::SkipVariableDeclaration() {

View file

@ -1275,6 +1275,7 @@ class KernelReaderHelper {
void SkipFunctionType(bool simple);
void SkipStatementList();
void SkipListOfExpressions();
void SkipListOfNamedExpressions();
void SkipListOfDartTypes();
void SkipListOfStrings();
void SkipListOfVariableDeclarations();

View file

@ -823,10 +823,7 @@ void ScopeBuilder::VisitExpression() {
}
case kStringConcatenation: {
helper_.ReadPosition(); // read position.
intptr_t list_length = helper_.ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
VisitExpression(); // read ith expression.
}
VisitListOfExpressions();
return;
}
case kIsExpression:
@ -859,10 +856,7 @@ void ScopeBuilder::VisitExpression() {
case kListLiteral: {
helper_.ReadPosition(); // read position.
VisitDartType(); // read type.
intptr_t list_length = helper_.ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
VisitExpression(); // read ith expression.
}
VisitListOfExpressions();
return;
}
case kSetLiteral: {
@ -882,6 +876,12 @@ void ScopeBuilder::VisitExpression() {
}
return;
}
case kRecordLiteral:
helper_.ReadPosition(); // read position.
VisitListOfExpressions(); // read positionals.
VisitListOfNamedExpressions(); // read named.
VisitDartType(); // read recordType.
return;
case kFunctionExpression: {
intptr_t offset = helper_.ReaderOffset() - 1; // -1 to include tag byte.
helper_.ReadPosition(); // read position.
@ -1085,10 +1085,7 @@ void ScopeBuilder::VisitStatement() {
if (tag == kSomething) {
VisitExpression(); // read rest of condition.
}
list_length = helper_.ReadListLength(); // read number of updates.
for (intptr_t i = 0; i < list_length; ++i) {
VisitExpression(); // read ith update.
}
VisitListOfExpressions(); // read updates.
VisitStatement(); // read body.
ExitScope(position, helper_.reader_.max_position());
@ -1259,6 +1256,21 @@ void ScopeBuilder::VisitStatement() {
}
}
void ScopeBuilder::VisitListOfExpressions() {
const intptr_t list_length = helper_.ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
VisitExpression();
}
}
void ScopeBuilder::VisitListOfNamedExpressions() {
const intptr_t list_length = helper_.ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
helper_.SkipStringReference(); // read ith name index.
VisitExpression(); // read ith expression.
}
}
void ScopeBuilder::VisitArguments() {
helper_.ReadUInt(); // read argument_count.
@ -1268,18 +1280,8 @@ void ScopeBuilder::VisitArguments() {
VisitDartType(); // read ith type.
}
// Positional.
list_length = helper_.ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
VisitExpression(); // read ith positional.
}
// Named.
list_length = helper_.ReadListLength(); // read list length.
for (intptr_t i = 0; i < list_length; ++i) {
helper_.SkipStringReference(); // read ith name index.
VisitExpression(); // read ith expression.
}
VisitListOfExpressions(); // Positional.
VisitListOfNamedExpressions(); // Named.
}
void ScopeBuilder::VisitVariableDeclaration() {

View file

@ -40,6 +40,8 @@ class ScopeBuilder {
void VisitInitializer();
void VisitExpression();
void VisitStatement();
void VisitListOfExpressions();
void VisitListOfNamedExpressions();
void VisitArguments();
void VisitVariableDeclaration();
void VisitVariableGet(intptr_t declaration_binary_offset);

File diff suppressed because it is too large Load diff

View file

@ -1062,6 +1062,87 @@ void StubCodeCompiler::GenerateAllocateGrowableArrayStub(Assembler* assembler) {
#endif // defined(TARGET_ARCH_IA32)
}
void StubCodeCompiler::GenerateAllocateRecordStub(Assembler* assembler) {
const Register temp_reg = AllocateRecordABI::kTempReg;
const Register result_reg = AllocateRecordABI::kResultReg;
const Register num_fields_reg = AllocateRecordABI::kNumFieldsReg;
const Register field_names_reg = AllocateRecordABI::kFieldNamesReg;
Label slow_case;
// Check for allocation tracing.
NOT_IN_PRODUCT(__ MaybeTraceAllocation(kRecordCid, &slow_case, temp_reg));
// Compute the rounded instance size.
const intptr_t fixed_size_plus_alignment_padding =
(target::Record::field_offset(0) +
target::ObjectAlignment::kObjectAlignment - 1);
__ AddScaled(temp_reg, num_fields_reg, TIMES_COMPRESSED_WORD_SIZE,
fixed_size_plus_alignment_padding);
__ AndImmediate(temp_reg, -target::ObjectAlignment::kObjectAlignment);
// Now allocate the object.
__ LoadFromOffset(result_reg, Address(THR, target::Thread::top_offset()));
__ AddRegisters(temp_reg, result_reg);
// Check if the allocation fits into the remaining space.
__ CompareWithMemoryValue(temp_reg,
Address(THR, target::Thread::end_offset()));
__ BranchIf(UNSIGNED_GREATER_EQUAL, &slow_case);
// Successfully allocated the object, now update top to point to
// next object start and initialize the object.
__ StoreToOffset(temp_reg, Address(THR, target::Thread::top_offset()));
__ SubRegisters(temp_reg, result_reg);
__ AddImmediate(result_reg, kHeapObjectTag);
// Calculate the size tag.
{
Label size_tag_overflow, done;
__ CompareImmediate(temp_reg, target::UntaggedObject::kSizeTagMaxSizeTag);
__ BranchIf(UNSIGNED_GREATER, &size_tag_overflow, Assembler::kNearJump);
__ LslImmediate(temp_reg,
target::UntaggedObject::kTagBitsSizeTagPos -
target::ObjectAlignment::kObjectAlignmentLog2);
__ Jump(&done, Assembler::kNearJump);
__ Bind(&size_tag_overflow);
// Set overflow size tag value.
__ LoadImmediate(temp_reg, 0);
__ Bind(&done);
uword tags = target::MakeTagWordForNewSpaceObject(kRecordCid, 0);
__ OrImmediate(temp_reg, tags);
__ StoreToOffset(
temp_reg,
FieldAddress(result_reg, target::Object::tags_offset())); // Tags.
}
__ StoreToOffset(
num_fields_reg,
FieldAddress(result_reg, target::Record::num_fields_offset()),
kFourBytes);
__ StoreCompressedIntoObjectNoBarrier(
result_reg,
FieldAddress(result_reg, target::Record::field_names_offset()),
field_names_reg);
__ Ret();
__ Bind(&slow_case);
__ EnterStubFrame();
__ PushObject(NullObject()); // Space on the stack for the return value.
__ SmiTag(num_fields_reg);
__ PushRegistersInOrder({num_fields_reg, field_names_reg});
__ CallRuntime(kAllocateRecordRuntimeEntry, 2);
__ Drop(2);
__ PopRegister(AllocateRecordABI::kResultReg);
EnsureIsNewOrRemembered(assembler, /*preserve_registers=*/false);
__ LeaveStubFrame();
__ Ret();
}
// The UnhandledException class lives in the VM isolate, so it cannot cache
// an allocation stub for itself. Instead, we cache it in the stub code list.
void StubCodeCompiler::GenerateAllocateUnhandledExceptionStub(

View file

@ -514,6 +514,14 @@ struct AllocateArrayABI {
static const Register kTypeArgumentsReg = R1;
};
// ABI for AllocateRecordStub.
struct AllocateRecordABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kNumFieldsReg = R2;
static const Register kFieldNamesReg = R1;
static const Register kTempReg = R3;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;

View file

@ -348,6 +348,14 @@ struct AllocateArrayABI {
static const Register kTypeArgumentsReg = R1;
};
// ABI for AllocateRecordStub.
struct AllocateRecordABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kNumFieldsReg = R2;
static const Register kFieldNamesReg = R1;
static const Register kTempReg = R3;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;

View file

@ -243,6 +243,14 @@ struct AllocateArrayABI {
static const Register kTypeArgumentsReg = ECX;
};
// ABI for AllocateRecordStub.
struct AllocateRecordABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kNumFieldsReg = EDX;
static const Register kFieldNamesReg = ECX;
static const Register kTempReg = EBX;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;

View file

@ -357,6 +357,14 @@ struct AllocateArrayABI {
static constexpr Register kTypeArgumentsReg = T1;
};
// ABI for AllocateRecordStub.
struct AllocateRecordABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kNumFieldsReg = T2;
static const Register kFieldNamesReg = T1;
static const Register kTempReg = T3;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static constexpr Register kResultReg = AllocateObjectABI::kResultReg;

View file

@ -319,6 +319,14 @@ struct AllocateArrayABI {
static const Register kTypeArgumentsReg = RBX;
};
// ABI for AllocateRecordStub.
struct AllocateRecordABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kNumFieldsReg = R10;
static const Register kFieldNamesReg = RBX;
static const Register kTempReg = RDX;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;

View file

@ -235,6 +235,7 @@ class ObjectPointerVisitor;
RW(Code, allocate_growable_array_stub) \
RW(Code, allocate_object_stub) \
RW(Code, allocate_object_parametrized_stub) \
RW(Code, allocate_record_stub) \
RW(Code, allocate_unhandled_exception_stub) \
RW(Code, clone_context_stub) \
RW(Code, write_barrier_wrappers_stub) \
@ -326,6 +327,7 @@ class ObjectPointerVisitor;
DO(allocate_growable_array_stub, AllocateGrowableArray) \
DO(allocate_object_stub, AllocateObject) \
DO(allocate_object_parametrized_stub, AllocateObjectParameterized) \
DO(allocate_record_stub, AllocateRecord) \
DO(allocate_unhandled_exception_stub, AllocateUnhandledException) \
DO(clone_context_stub, CloneContext) \
DO(call_closure_no_such_method_stub, CallClosureNoSuchMethod) \

View file

@ -709,6 +709,19 @@ DEFINE_RUNTIME_ENTRY(CloneContext, 1) {
arguments.SetReturn(cloned_ctx);
}
// Allocate a new record instance.
// Arg0: number of fields.
// Arg1: field names.
// Return value: newly allocated record.
DEFINE_RUNTIME_ENTRY(AllocateRecord, 2) {
const Smi& num_fields = Smi::CheckedHandle(zone, arguments.ArgAt(0));
const auto& field_names = Array::CheckedHandle(zone, arguments.ArgAt(1));
const Record& record =
Record::Handle(zone, Record::New(num_fields.Value(), field_names,
SpaceForRuntimeAllocation()));
arguments.SetReturn(record);
}
// Allocate a SuspendState object.
// Arg0: frame size.
// Arg1: existing SuspendState object or function data.

View file

@ -18,6 +18,7 @@ namespace dart {
V(AllocateClosure) \
V(AllocateContext) \
V(AllocateObject) \
V(AllocateRecord) \
V(AllocateSuspendState) \
V(BoxDouble) \
V(BreakpointRuntimeHandler) \

View file

@ -193,9 +193,7 @@ CodePtr StubCode::GetAllocationStubForClass(const Class& cls) {
case kClosureCid:
return object_store->allocate_closure_stub();
case kRecordCid:
// TODO(dartbug.com/49719)
UNIMPLEMENTED();
break;
return object_store->allocate_record_stub();
}
Code& stub = Code::Handle(zone, cls.allocation_stub());
if (stub.IsNull()) {

View file

@ -56,6 +56,7 @@ namespace dart {
V(AllocateObject) \
V(AllocateObjectParameterized) \
V(AllocateObjectSlow) \
V(AllocateRecord) \
V(AllocateUnhandledException) \
V(BoxDouble) \
V(CloneContext) \