[vm/compiler] Remove PushArguments from AllocateObjectInstr

Issue: https://github.com/dart-lang/sdk/issues/39788
Change-Id: Ideda280bce1e8a4b463f47d950c1a32b48e7b204
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/129324
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Alexander Markov 2020-01-09 01:37:27 +00:00 committed by commit-bot@chromium.org
parent 361f4abfe0
commit 9b555ca6d3
23 changed files with 152 additions and 94 deletions

View file

@ -5387,36 +5387,44 @@ class TemplateAllocation : public AllocationInstr {
virtual void RawSetInputAt(intptr_t i, Value* value) { inputs_[i] = value; }
};
class AllocateObjectInstr : public TemplateAllocation<0, NoThrow> {
class AllocateObjectInstr : public AllocationInstr {
public:
AllocateObjectInstr(TokenPosition token_pos,
const Class& cls,
PushArgumentsArray* arguments)
Value* type_arguments = nullptr)
: token_pos_(token_pos),
cls_(cls),
arguments_(arguments),
type_arguments_(type_arguments),
identity_(AliasIdentity::Unknown()),
closure_function_(Function::ZoneHandle()) {
// Either no arguments or one type-argument and one instantiator.
ASSERT(arguments->is_empty() || (arguments->length() == 1));
ASSERT((cls.NumTypeArguments() > 0) == (type_arguments != nullptr));
if (type_arguments != nullptr) {
SetInputAt(0, type_arguments);
}
}
DECLARE_INSTRUCTION(AllocateObject)
virtual CompileType ComputeType() const;
virtual intptr_t ArgumentCount() const { return arguments_->length(); }
virtual PushArgumentInstr* PushArgumentAt(intptr_t index) const {
return (*arguments_)[index];
}
const Class& cls() const { return cls_; }
virtual TokenPosition token_pos() const { return token_pos_; }
Value* type_arguments() const { return type_arguments_; }
const Function& closure_function() const { return closure_function_; }
void set_closure_function(const Function& function) {
closure_function_ = function.raw();
}
virtual intptr_t InputCount() const {
return (type_arguments_ != nullptr) ? 1 : 0;
}
virtual Value* InputAt(intptr_t i) const {
ASSERT(type_arguments_ != nullptr && i == 0);
return type_arguments_;
}
virtual bool MayThrow() const { return false; }
virtual bool ComputeCanDeoptimize() const { return false; }
virtual bool HasUnknownSideEffects() const { return false; }
@ -5437,9 +5445,15 @@ class AllocateObjectInstr : public TemplateAllocation<0, NoThrow> {
ADD_EXTRA_INFO_TO_S_EXPRESSION_SUPPORT
private:
virtual void RawSetInputAt(intptr_t i, Value* value) {
ASSERT((type_arguments_ != nullptr) && (i == 0));
ASSERT(value != nullptr);
type_arguments_ = value;
}
const TokenPosition token_pos_;
const Class& cls_;
PushArgumentsArray* const arguments_;
Value* type_arguments_;
AliasIdentity identity_;
Function& closure_function_;

View file

@ -7392,21 +7392,29 @@ void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
return MakeCallSummary(zone);
const intptr_t kNumInputs = (type_arguments() != nullptr) ? 1 : 0;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(R0));
return locs;
}
void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (ArgumentCount() == 1) {
if (type_arguments() != nullptr) {
TypeUsageInfo* type_usage_info = compiler->thread()->type_usage_info();
if (type_usage_info != nullptr) {
RegisterTypeArgumentsUse(compiler->function(), type_usage_info, cls_,
ArgumentAt(0));
type_arguments()->definition());
}
}
const Code& stub = Code::ZoneHandle(
compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
compiler->GenerateCall(token_pos(), stub, RawPcDescriptors::kOther, locs());
__ Drop(ArgumentCount()); // Discard arguments.
}
void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {

View file

@ -6387,21 +6387,29 @@ void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
return MakeCallSummary(zone);
const intptr_t kNumInputs = (type_arguments() != nullptr) ? 1 : 0;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(R0));
return locs;
}
void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (ArgumentCount() == 1) {
if (type_arguments() != nullptr) {
TypeUsageInfo* type_usage_info = compiler->thread()->type_usage_info();
if (type_usage_info != nullptr) {
RegisterTypeArgumentsUse(compiler->function(), type_usage_info, cls_,
ArgumentAt(0));
type_arguments()->definition());
}
}
const Code& stub = Code::ZoneHandle(
compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
compiler->GenerateCall(token_pos(), stub, RawPcDescriptors::kOther, locs());
__ Drop(ArgumentCount()); // Discard arguments.
}
void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {

View file

@ -867,15 +867,14 @@ AllocateObjectInstr* FlowGraphDeserializer::DeserializeAllocateObject(
auto const cls_sexp = CheckTaggedList(Retrieve(sexp, 1), "Class");
if (!ParseClass(cls_sexp, &cls)) return nullptr;
intptr_t args_len = 0;
if (auto const len_sexp = CheckInteger(sexp->ExtraLookupValue("args_len"))) {
args_len = len_sexp->value();
Value* type_arguments = nullptr;
if (cls.NumTypeArguments() > 0) {
type_arguments = ParseValue(Retrieve(sexp, 2));
if (type_arguments == nullptr) return nullptr;
}
auto const arguments = FetchPushedArguments(sexp, args_len);
if (arguments == nullptr) return nullptr;
auto const inst =
new (zone()) AllocateObjectInstr(info.token_pos, cls, arguments);
new (zone()) AllocateObjectInstr(info.token_pos, cls, type_arguments);
if (auto const closure_sexp = CheckTaggedList(
sexp->ExtraLookupValue("closure_function"), "Function")) {

View file

@ -6617,14 +6617,22 @@ void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
return MakeCallSummary(zone);
const intptr_t kNumInputs = (type_arguments() != nullptr) ? 1 : 0;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(EAX));
return locs;
}
void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
const Code& stub = Code::ZoneHandle(
compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
compiler->GenerateCall(token_pos(), stub, RawPcDescriptors::kOther, locs());
__ Drop(ArgumentCount()); // Discard arguments.
}
void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {

View file

@ -665,11 +665,10 @@ void RelationalOpInstr::PrintOperandsTo(BufferFormatter* f) const {
void AllocateObjectInstr::PrintOperandsTo(BufferFormatter* f) const {
f->Print("%s", String::Handle(cls().ScrubbedName()).ToCString());
for (intptr_t i = 0; i < ArgumentCount(); i++) {
for (intptr_t i = 0; i < InputCount(); ++i) {
f->Print(", ");
ArgumentValueAt(i)->PrintTo(f);
InputAt(i)->PrintTo(f);
}
if (Identity().IsNotAliased()) {
f->Print(" <not-aliased>");
}

View file

@ -1289,6 +1289,9 @@ void AllocateObjectInstr::AddOperandsToSExpression(
if (auto const sexp_cls = s->DartValueToSExp(cls())) {
sexp->Add(sexp_cls);
}
if (type_arguments() != nullptr) {
sexp->Add(type_arguments()->ToSExpression(s));
}
}
void AllocateObjectInstr::AddExtraInfoToSExpression(
@ -1296,9 +1299,6 @@ void AllocateObjectInstr::AddExtraInfoToSExpression(
FlowGraphSerializer* s) const {
Instruction::AddExtraInfoToSExpression(sexp, s);
s->AddExtraInteger(sexp, "size", cls().instance_size());
if (ArgumentCount() > 0 || FLAG_verbose_flow_graph_serialization) {
s->AddExtraInteger(sexp, "args_len", ArgumentCount());
}
if (auto const closure = s->DartValueToSExp(closure_function())) {
sexp->AddExtra("closure_function", closure);
}

View file

@ -6797,21 +6797,29 @@ void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
return MakeCallSummary(zone);
const intptr_t kNumInputs = (type_arguments() != nullptr) ? 1 : 0;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(RAX));
return locs;
}
void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (ArgumentCount() == 1) {
if (type_arguments() != nullptr) {
TypeUsageInfo* type_usage_info = compiler->thread()->type_usage_info();
if (type_usage_info != nullptr) {
RegisterTypeArgumentsUse(compiler->function(), type_usage_info, cls_,
ArgumentAt(0));
type_arguments()->definition());
}
}
const Code& stub = Code::ZoneHandle(
compiler->zone(), StubCode::GetAllocationStubForClass(cls()));
compiler->GenerateCall(token_pos(), stub, RawPcDescriptors::kOther, locs());
__ Drop(ArgumentCount()); // Discard arguments.
}
void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {

View file

@ -197,6 +197,13 @@ class GraphInfoCollector : public ValueObject {
continue;
}
++instruction_count_;
// Count inputs of certain instructions as if separate PushArgument
// instructions are used for inputs. This is done in order to
// preserve inlining behavior and avoid code size growth after
// PushArgument instructions are eliminated.
if (current->IsAllocateObject()) {
instruction_count_ += current->InputCount();
}
if (current->IsInstanceCall() || current->IsStaticCall() ||
current->IsClosureCall()) {
++call_site_count_;

View file

@ -1771,12 +1771,11 @@ class LoadOptimizer : public ValueObject {
}
Definition* forward_def = graph_->constant_null();
if (alloc->ArgumentCount() > 0) {
ASSERT(alloc->ArgumentCount() == 1);
if (alloc->type_arguments() != nullptr) {
const Slot& type_args_slot = Slot::GetTypeArgumentsSlotFor(
graph_->thread(), alloc->cls());
if (slot->IsIdentical(type_args_slot)) {
forward_def = alloc->ArgumentAt(0);
forward_def = alloc->type_arguments()->definition();
}
}
gen->Add(place_id);
@ -3258,11 +3257,11 @@ void AllocationSinking::InsertMaterializations(Definition* alloc) {
}
}
if (alloc->ArgumentCount() > 0) {
AllocateObjectInstr* alloc_object = alloc->AsAllocateObject();
ASSERT(alloc_object->ArgumentCount() == 1);
AddSlot(slots, Slot::GetTypeArgumentsSlotFor(flow_graph_->thread(),
alloc_object->cls()));
if (auto alloc_object = alloc->AsAllocateObject()) {
if (alloc_object->type_arguments() != nullptr) {
AddSlot(slots, Slot::GetTypeArgumentsSlotFor(flow_graph_->thread(),
alloc_object->cls()));
}
}
// Collect all instructions that mention this object in the environment.

View file

@ -298,8 +298,8 @@ static void TestAliasingViaRedefinition(
{
BlockBuilder builder(H.flow_graph(), b1);
auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
v0 = builder.AddDefinition(new AllocateObjectInstr(
TokenPosition::kNoSource, cls, new PushArgumentsArray(0)));
v0 = builder.AddDefinition(
new AllocateObjectInstr(TokenPosition::kNoSource, cls));
v1 = builder.AddDefinition(
new LoadFieldInstr(new Value(v0), slot, TokenPosition::kNoSource));
auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v0));
@ -474,10 +474,10 @@ static void TestAliasingViaStore(
{
BlockBuilder builder(H.flow_graph(), b1);
auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
v0 = builder.AddDefinition(new AllocateObjectInstr(
TokenPosition::kNoSource, cls, new PushArgumentsArray(0)));
v5 = builder.AddDefinition(new AllocateObjectInstr(
TokenPosition::kNoSource, cls, new PushArgumentsArray(0)));
v0 = builder.AddDefinition(
new AllocateObjectInstr(TokenPosition::kNoSource, cls));
v5 = builder.AddDefinition(
new AllocateObjectInstr(TokenPosition::kNoSource, cls));
if (!make_host_escape) {
builder.AddInstruction(new StoreInstanceFieldInstr(
slot, new Value(v5), new Value(v0), kEmitStoreBarrier,

View file

@ -840,9 +840,7 @@ Fragment BaseFlowGraphBuilder::AllocateClosure(
TokenPosition position,
const Function& closure_function) {
const Class& cls = Class::ZoneHandle(Z, I->object_store()->closure_class());
ArgumentArray arguments = new (Z) ZoneGrowableArray<PushArgumentInstr*>(Z, 0);
AllocateObjectInstr* allocate =
new (Z) AllocateObjectInstr(position, cls, arguments);
AllocateObjectInstr* allocate = new (Z) AllocateObjectInstr(position, cls);
allocate->set_closure_function(closure_function);
Push(allocate);
return Fragment(allocate);
@ -890,9 +888,10 @@ Fragment BaseFlowGraphBuilder::LoadClassId() {
Fragment BaseFlowGraphBuilder::AllocateObject(TokenPosition position,
const Class& klass,
intptr_t argument_count) {
ArgumentArray arguments = GetArguments(argument_count);
ASSERT((argument_count == 0) || (argument_count == 1));
Value* type_arguments = (argument_count > 0) ? Pop() : nullptr;
AllocateObjectInstr* allocate =
new (Z) AllocateObjectInstr(position, klass, arguments);
new (Z) AllocateObjectInstr(position, klass, type_arguments);
Push(allocate);
return Fragment(allocate);
}

View file

@ -1120,11 +1120,7 @@ void BytecodeFlowGraphBuilder::BuildAllocate() {
const Class& klass = Class::Cast(ConstantAt(DecodeOperandD()).value());
const ArgumentArray arguments =
new (Z) ZoneGrowableArray<PushArgumentInstr*>(Z, 0);
AllocateObjectInstr* allocate =
new (Z) AllocateObjectInstr(position_, klass, arguments);
AllocateObjectInstr* allocate = new (Z) AllocateObjectInstr(position_, klass);
code_ <<= allocate;
B->Push(allocate);
@ -1136,10 +1132,10 @@ void BytecodeFlowGraphBuilder::BuildAllocateT() {
}
const Class& klass = Class::Cast(PopConstant().value());
const ArgumentArray arguments = GetArguments(1);
Value* type_arguments = Pop();
AllocateObjectInstr* allocate =
new (Z) AllocateObjectInstr(position_, klass, arguments);
new (Z) AllocateObjectInstr(position_, klass, type_arguments);
code_ <<= allocate;
B->Push(allocate);

View file

@ -3296,7 +3296,6 @@ Fragment StreamingFlowGraphBuilder::BuildStaticInvocation(TokenPosition* p) {
const TypeArguments& type_arguments =
PeekArgumentsInstantiatedType(klass);
instructions += TranslateInstantiatedTypeArguments(type_arguments);
instructions += PushArgument();
instructions += AllocateObject(position, klass, 1);
} else {
instructions += AllocateObject(position, klass, 0);
@ -3394,7 +3393,6 @@ Fragment StreamingFlowGraphBuilder::BuildConstructorInvocation(
instructions += TranslateInstantiatedTypeArguments(type_arguments);
}
instructions += PushArgument();
instructions += AllocateObject(position, klass, 1);
} else {
instructions += AllocateObject(position, klass, 0);

View file

@ -1295,7 +1295,6 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
body += LoadNativeField(Slot::GetTypeArgumentsIndexSlot(
thread_, Pointer::kNativeTypeArgPos));
body += LoadNativeField(Slot::Type_arguments());
body += PushArgument(); // We instantiate a Pointer<X>.
body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
LocalVariable* pointer = MakeTemporary();
body += LoadLocal(pointer);
@ -1421,7 +1420,6 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
ASSERT(function.NumTypeParameters() == 1);
ASSERT(function.NumParameters() == 1);
body += LoadLocal(parsed_function_->RawTypeArgumentsVariable());
body += PushArgument();
body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
body += LoadLocal(MakeTemporary()); // Duplicate Pointer.
body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Address.
@ -2883,7 +2881,6 @@ Fragment FlowGraphBuilder::FfiPointerFromAddress(const Type& result_type) {
Fragment code;
code += Constant(args);
code += PushArgument();
code += AllocateObject(TokenPosition::kNoSource, result_class, 1);
LocalVariable* pointer = MakeTemporary();
code += LoadLocal(pointer);

View file

@ -1788,7 +1788,8 @@ void StubCodeCompiler::GenerateArrayWriteBarrierStub(Assembler* assembler) {
// Called for inline allocation of objects.
// Input parameters:
// LR : return address.
// SP + 0 : type arguments object (only if class is parameterized).
// kAllocationStubTypeArgumentsReg (R3): type arguments object
// (only if class is parameterized).
void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const Class& cls) {
// The generated code is different if the class is parameterized.
@ -1798,10 +1799,12 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const Register kNullReg = R8;
const Register kOtherNullReg = R9;
const Register kTypeArgumentsReg = R3;
const Register kTypeArgumentsReg = kAllocationStubTypeArgumentsReg;
const Register kInstanceReg = R0;
const Register kEndReg = R1;
const Register kEndOfInstanceReg = R2;
static_assert(kAllocationStubTypeArgumentsReg == R3,
"Adjust register allocation in the AllocationStub");
// kInlineInstanceSize is a constant used as a threshold for determining
// when the object initialization should be done as a loop or as
@ -1810,9 +1813,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
ASSERT(instance_size % target::ObjectAlignment::kObjectAlignment == 0);
if (is_cls_parameterized) {
__ ldr(kTypeArgumentsReg, Address(SP, 0));
}
__ LoadObject(kNullReg, NullObject());
if (FLAG_inline_alloc &&

View file

@ -1862,7 +1862,8 @@ void StubCodeCompiler::GenerateArrayWriteBarrierStub(Assembler* assembler) {
// Called for inline allocation of objects.
// Input parameters:
// LR : return address.
// SP + 0 : type arguments object (only if class is parameterized).
// kAllocationStubTypeArgumentsReg (R1): type arguments object
// (only if class is parameterized).
void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const Class& cls) {
// The generated code is different if the class is parameterized.
@ -1870,11 +1871,13 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset(
cls) != target::Class::kNoTypeArguments);
const Register kTypeArgumentsReg = R1;
const Register kTypeArgumentsReg = kAllocationStubTypeArgumentsReg;
const Register kInstanceReg = R0;
const Register kNullReg = R3;
const Register kTempReg = R4;
const Register kTopReg = R5;
static_assert(kAllocationStubTypeArgumentsReg == R1,
"Adjust register allocation in the AllocationStub");
// kInlineInstanceSize is a constant used as a threshold for determining
// when the object initialization should be done as a loop or as
@ -1882,9 +1885,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const int kInlineInstanceSize = 12;
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
if (is_cls_parameterized) {
__ ldr(kTypeArgumentsReg, Address(SP));
}
__ LoadObject(kNullReg, NullObject());
if (FLAG_inline_alloc &&

View file

@ -1440,14 +1440,14 @@ void StubCodeCompiler::GenerateArrayWriteBarrierStub(Assembler* assembler) {
// Called for inline allocation of objects.
// Input parameters:
// ESP + 4 : type arguments object (only if class is parameterized).
// ESP : points to return address.
// kAllocationStubTypeArgumentsReg (EDX) : type arguments object
// (only if class is parameterized).
// Uses EAX, EBX, ECX, EDX, EDI as temporary registers.
// Returns patch_code_pc offset where patching code for disabling the stub
// has been generated (similar to regularly generated Dart code).
void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const Class& cls) {
const intptr_t kObjectTypeArgumentsOffset = 1 * target::kWordSize;
const Immediate& raw_null = Immediate(target::ToRawPointer(NullObject()));
// The generated code is different if the class is parameterized.
const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0;
@ -1459,10 +1459,11 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const int kInlineInstanceSize = 12; // In words.
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
if (is_cls_parameterized) {
__ movl(EDX, Address(ESP, kObjectTypeArgumentsOffset));
// EDX: instantiated type arguments.
}
// EDX: instantiated type arguments (if is_cls_parameterized).
static_assert(kAllocationStubTypeArgumentsReg == EDX,
"Adjust register allocation in the AllocationStub");
if (FLAG_inline_alloc &&
target::Heap::IsAllocatableInNewSpace(instance_size) &&
!target::Class::TraceAllocation(cls)) {
@ -1530,7 +1531,8 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
// EDX: new object type arguments.
// Set the type arguments in the new object.
const intptr_t offset = target::Class::TypeArgumentsFieldOffset(cls);
__ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, offset), EDX);
__ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, offset),
kAllocationStubTypeArgumentsReg);
}
// Done allocating and initializing the instance.
// EAX: new object (tagged).
@ -1547,7 +1549,8 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
__ PushObject(
CastHandle<Object>(cls)); // Push class of object to be allocated.
if (is_cls_parameterized) {
__ pushl(EDX); // Push type arguments of object to be allocated.
// Push type arguments of object to be allocated.
__ pushl(kAllocationStubTypeArgumentsReg);
} else {
__ pushl(raw_null); // Push null type arguments.
}

View file

@ -1791,11 +1791,11 @@ void StubCodeCompiler::GenerateArrayWriteBarrierStub(Assembler* assembler) {
// Called for inline allocation of objects.
// Input parameters:
// RSP + 8 : type arguments object (only if class is parameterized).
// RSP : points to return address.
// kAllocationStubTypeArgumentsReg (RDX) : type arguments object
// (only if class is parameterized).
void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const Class& cls) {
const intptr_t kObjectTypeArgumentsOffset = 1 * target::kWordSize;
// The generated code is different if the class is parameterized.
const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0;
ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset(
@ -1807,10 +1807,11 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
__ LoadObject(R9, NullObject());
if (is_cls_parameterized) {
__ movq(RDX, Address(RSP, kObjectTypeArgumentsOffset));
// RDX: instantiated type arguments.
}
// RDX: instantiated type arguments (if is_cls_parameterized).
static_assert(kAllocationStubTypeArgumentsReg == RDX,
"Adjust register allocation in the AllocationStub");
if (FLAG_inline_alloc &&
target::Heap::IsAllocatableInNewSpace(instance_size) &&
!target::Class::TraceAllocation(cls)) {
@ -1883,7 +1884,8 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
// RDX: new object type arguments.
// Set the type arguments in the new object.
const intptr_t offset = target::Class::TypeArgumentsFieldOffset(cls);
__ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, offset), RDX);
__ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, offset),
kAllocationStubTypeArgumentsReg);
}
// Done allocating and initializing the instance.
// RAX: new object (tagged).
@ -1900,7 +1902,8 @@ void StubCodeCompiler::GenerateAllocationStubForClass(Assembler* assembler,
__ PushObject(
CastHandle<Object>(cls)); // Push class of object to be allocated.
if (is_cls_parameterized) {
__ pushq(RDX); // Push type arguments of object to be allocated.
// Push type arguments of object to be allocated.
__ pushq(kAllocationStubTypeArgumentsReg);
} else {
__ pushq(R9); // Push null type arguments.
}

View file

@ -292,6 +292,9 @@ const Register kWriteBarrierObjectReg = R1;
const Register kWriteBarrierValueReg = R0;
const Register kWriteBarrierSlotReg = R9;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = R3;
// List of registers used in load/store multiple.
typedef uint16_t RegList;
const RegList kAllCpuRegistersList = 0xFFFF;

View file

@ -135,6 +135,9 @@ const Register kWriteBarrierObjectReg = R1;
const Register kWriteBarrierValueReg = R0;
const Register kWriteBarrierSlotReg = R25;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = R1;
// Masks, sizes, etc.
const int kXRegSizeInBits = 64;
const int kWRegSizeInBits = 32;

View file

@ -81,6 +81,9 @@ const Register kWriteBarrierObjectReg = EDX;
const Register kWriteBarrierValueReg = kNoRegister;
const Register kWriteBarrierSlotReg = EDI;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = EDX;
typedef uint32_t RegList;
const RegList kAllCpuRegistersList = 0xFF;

View file

@ -127,6 +127,9 @@ const Register kWriteBarrierObjectReg = RDX;
const Register kWriteBarrierValueReg = RAX;
const Register kWriteBarrierSlotReg = R13;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = RDX;
typedef uint32_t RegList;
const RegList kAllCpuRegistersList = 0xFFFF;
const RegList kAllFpuRegistersList = 0xFFFF;