[vm/compiler] Create AllocateObjectABI struct in constants.

Also make all other Allocate*ABI structs explicitly use the same result
register as AllocateObjectABI for consistency.

TEST=Refactoring, so existing tests.

Cq-Include-Trybots: luci.dart.try:vm-kernel-linux-debug-ia32-try,vm-kernel-linux-debug-x64-try,vm-kernel-linux-debug-x64c-try,vm-kernel-linux-debug-simarm64c-try,vm-kernel-nnbd-linux-debug-ia32-try,vm-kernel-nnbd-linux-debug-x64-try,vm-kernel-precomp-linux-debug-simarm64c-try,vm-kernel-precomp-linux-debug-simarm_x64-try,vm-kernel-precomp-linux-debug-x64-try,vm-kernel-precomp-linux-debug-x64c-try,vm-kernel-precomp-nnbd-linux-debug-simarm_x64-try,vm-kernel-precomp-nnbd-linux-debug-x64-try,vm-kernel-linux-release-simarm-try,vm-kernel-linux-release-simarm64-try,vm-kernel-nnbd-linux-release-simarm-try,vm-kernel-nnbd-linux-release-simarm64-try,vm-kernel-precomp-linux-release-simarm-try,vm-kernel-precomp-linux-release-simarm64-try,vm-kernel-precomp-nnbd-linux-release-simarm64-try
Change-Id: Iede8ff499ae3e7741e57090c36bc6b5dcc9217b7
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/201184
Reviewed-by: Daco Harkes <dacoharkes@google.com>
Reviewed-by: Clement Skau <cskau@google.com>
Commit-Queue: Tess Strickland <sstrickl@google.com>
This commit is contained in:
Tess Strickland 2021-05-26 09:39:22 +00:00 committed by commit-bot@chromium.org
parent 619f46cd19
commit 1d0bac85bc
13 changed files with 172 additions and 156 deletions

View file

@ -6164,6 +6164,7 @@ class TemplateAllocation : public AllocationInstr {
class AllocateObjectInstr : public AllocationInstr {
public:
enum { kTypeArgumentsPos = 0 };
AllocateObjectInstr(const InstructionSource& source,
const Class& cls,
intptr_t deopt_id,
@ -6173,7 +6174,7 @@ class AllocateObjectInstr : public AllocationInstr {
type_arguments_(type_arguments) {
ASSERT((cls.NumTypeArguments() > 0) == (type_arguments != nullptr));
if (type_arguments != nullptr) {
SetInputAt(0, type_arguments);
SetInputAt(kTypeArgumentsPos, type_arguments);
}
}
@ -6187,7 +6188,7 @@ class AllocateObjectInstr : public AllocationInstr {
return (type_arguments_ != nullptr) ? 1 : 0;
}
virtual Value* InputAt(intptr_t i) const {
ASSERT(type_arguments_ != nullptr && i == 0);
ASSERT(type_arguments_ != nullptr && i == kTypeArgumentsPos);
return type_arguments_;
}
@ -6205,7 +6206,7 @@ class AllocateObjectInstr : public AllocationInstr {
private:
virtual void RawSetInputAt(intptr_t i, Value* value) {
ASSERT((type_arguments_ != nullptr) && (i == 0));
ASSERT((type_arguments_ != nullptr) && (i == kTypeArgumentsPos));
ASSERT(value != nullptr);
type_arguments_ = value;
}

View file

@ -7536,10 +7536,10 @@ LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
locs->set_in(kTypeArgumentsPos, Location::RegisterLocation(
AllocateObjectABI::kTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(R0));
locs->set_out(0, Location::RegisterLocation(AllocateObjectABI::kResultReg));
return locs;
}

View file

@ -6553,10 +6553,10 @@ LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
locs->set_in(kTypeArgumentsPos, Location::RegisterLocation(
AllocateObjectABI::kTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(R0));
locs->set_out(0, Location::RegisterLocation(AllocateObjectABI::kResultReg));
return locs;
}

View file

@ -6665,10 +6665,10 @@ LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
locs->set_in(kTypeArgumentsPos, Location::RegisterLocation(
AllocateObjectABI::kTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(EAX));
locs->set_out(0, Location::RegisterLocation(AllocateObjectABI::kResultReg));
return locs;
}

View file

@ -6966,10 +6966,10 @@ LocationSummary* AllocateObjectInstr::MakeLocationSummary(Zone* zone,
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
if (type_arguments() != nullptr) {
locs->set_in(0,
Location::RegisterLocation(kAllocationStubTypeArgumentsReg));
locs->set_in(kTypeArgumentsPos, Location::RegisterLocation(
AllocateObjectABI::kTypeArgumentsReg));
}
locs->set_out(0, Location::RegisterLocation(RAX));
locs->set_out(0, Location::RegisterLocation(AllocateObjectABI::kResultReg));
return locs;
}

View file

@ -1719,10 +1719,7 @@ void StubCodeCompiler::GenerateArrayWriteBarrierStub(Assembler* assembler) {
static void GenerateAllocateObjectHelper(Assembler* assembler,
bool is_cls_parameterized) {
const Register kInstanceReg = R0;
// R1
const Register kTagsReg = R2;
// kAllocationStubTypeArgumentsReg = R3
{
Label slow_case;
@ -1737,10 +1734,12 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
__ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
// Load two words from Thread::top: top and end.
// kInstanceReg: potential next object start.
__ ldrd(kInstanceReg, kEndReg, THR, target::Thread::top_offset());
// AllocateObjectABI::kResultReg: potential next object start.
__ ldrd(AllocateObjectABI::kResultReg, kEndReg, THR,
target::Thread::top_offset());
__ add(kNewTopReg, kInstanceReg, Operand(kInstanceSizeReg));
__ add(kNewTopReg, AllocateObjectABI::kResultReg,
Operand(kInstanceSizeReg));
__ CompareRegisters(kEndReg, kNewTopReg);
__ b(&slow_case, UNSIGNED_LESS_EQUAL);
@ -1751,7 +1750,8 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
} // kEndReg = R1, kInstanceSizeReg = R9
// Tags.
__ str(kTagsReg, Address(kInstanceReg, target::Object::tags_offset()));
__ str(kTagsReg, Address(AllocateObjectABI::kResultReg,
target::Object::tags_offset()));
// Initialize the remaining words of the object.
{
@ -1760,7 +1760,7 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
__ LoadObject(kNullReg, NullObject());
__ AddImmediate(kFieldReg, kInstanceReg,
__ AddImmediate(kFieldReg, AllocateObjectABI::kResultReg,
target::Instance::first_field_offset());
Label done, init_loop;
__ Bind(&init_loop);
@ -1792,14 +1792,16 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
// Set the type arguments in the new object.
__ StoreIntoObjectNoBarrier(
kInstanceReg,
Address(kInstanceReg, kTypeOffestReg, LSL, target::kWordSizeLog2),
kAllocationStubTypeArgumentsReg);
AllocateObjectABI::kResultReg,
Address(AllocateObjectABI::kResultReg, kTypeOffestReg, LSL,
target::kWordSizeLog2),
AllocateObjectABI::kTypeArgumentsReg);
__ Bind(&not_parameterized_case);
} // kClsIdReg = R1, kTypeOffestReg = R9
__ AddImmediate(kInstanceReg, kInstanceReg, kHeapObjectTag);
__ AddImmediate(AllocateObjectABI::kResultReg,
AllocateObjectABI::kResultReg, kHeapObjectTag);
__ Ret();
@ -1811,7 +1813,7 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
const Register kStubReg = R8;
if (!is_cls_parameterized) {
__ LoadObject(kAllocationStubTypeArgumentsReg, NullObject());
__ LoadObject(AllocateObjectABI::kTypeArgumentsReg, NullObject());
}
// Tail call to generic allocation stub.
@ -1833,10 +1835,8 @@ void StubCodeCompiler::GenerateAllocateObjectParameterizedStub(
}
void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
const Register kInstanceReg = R0;
const Register kClsReg = R1;
const Register kTagsReg = R2;
// kAllocationStubTypeArgumentsReg = R3
if (!FLAG_use_bare_instructions) {
__ ldr(CODE_REG,
@ -1847,21 +1847,21 @@ void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
// calling into the runtime.
__ EnterStubFrame();
__ ExtractClassIdFromTags(kInstanceReg, kTagsReg);
__ LoadClassById(kClsReg, kInstanceReg);
__ ExtractClassIdFromTags(AllocateObjectABI::kResultReg, kTagsReg);
__ LoadClassById(kClsReg, AllocateObjectABI::kResultReg);
__ LoadObject(kInstanceReg, NullObject());
__ LoadObject(AllocateObjectABI::kResultReg, NullObject());
// Pushes result slot, then parameter class.
__ PushRegisterPair(kClsReg, kInstanceReg);
__ PushRegisterPair(kClsReg, AllocateObjectABI::kResultReg);
// Should be Object::null() if class is non-parameterized.
__ Push(kAllocationStubTypeArgumentsReg);
__ Push(AllocateObjectABI::kTypeArgumentsReg);
__ CallRuntime(kAllocateObjectRuntimeEntry, 2);
// Load result off the stack into result register.
__ ldr(kInstanceReg, Address(SP, 2 * target::kWordSize));
__ ldr(AllocateObjectABI::kResultReg, Address(SP, 2 * target::kWordSize));
// Write-barrier elimination is enabled for [cls] and we therefore need to
// ensure that the object is in new-space or has remembered bit set.
@ -1894,10 +1894,7 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const uword tags =
target::MakeTagWordForNewSpaceObject(cls_id, instance_size);
// Note: Keep in sync with helper function.
// kInstanceReg = R0
const Register kTagsReg = R2;
// kAllocationStubTypeArgumentsReg = R3
__ LoadImmediate(kTagsReg, tags);
@ -1933,7 +1930,7 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
}
} else {
if (!is_cls_parameterized) {
__ LoadObject(kAllocationStubTypeArgumentsReg, NullObject());
__ LoadObject(AllocateObjectABI::kTypeArgumentsReg, NullObject());
}
__ ldr(PC,
Address(THR,

View file

@ -1865,8 +1865,6 @@ void StubCodeCompiler::GenerateArrayWriteBarrierStub(Assembler* assembler) {
static void GenerateAllocateObjectHelper(Assembler* assembler,
bool is_cls_parameterized) {
const Register kInstanceReg = R0;
// kAllocationStubTypeArgumentsReg = R1
const Register kTagsReg = R2;
{
@ -1882,11 +1880,12 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
__ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
// Load two words from Thread::top: top and end.
// kInstanceReg: potential next object start.
__ ldp(kInstanceReg, kEndReg,
// AllocateObjectABI::kResultReg: potential next object start.
__ ldp(AllocateObjectABI::kResultReg, kEndReg,
Address(THR, target::Thread::top_offset(), Address::PairOffset));
__ add(kNewTopReg, kInstanceReg, Operand(kInstanceSizeReg));
__ add(kNewTopReg, AllocateObjectABI::kResultReg,
Operand(kInstanceSizeReg));
__ CompareRegisters(kEndReg, kNewTopReg);
__ b(&slow_case, UNSIGNED_LESS_EQUAL);
@ -1897,13 +1896,14 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
} // kInstanceSizeReg = R4, kEndReg = R5
// Tags.
__ str(kTagsReg, Address(kInstanceReg, target::Object::tags_offset()));
__ str(kTagsReg, Address(AllocateObjectABI::kResultReg,
target::Object::tags_offset()));
// Initialize the remaining words of the object.
{
const Register kFieldReg = R4;
__ AddImmediate(kFieldReg, kInstanceReg,
__ AddImmediate(kFieldReg, AllocateObjectABI::kResultReg,
target::Instance::first_field_offset());
Label done, init_loop;
__ Bind(&init_loop);
@ -1935,14 +1935,16 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
// Set the type arguments in the new object.
__ StoreIntoObjectNoBarrier(
kInstanceReg,
Address(kInstanceReg, kTypeOffestReg, UXTX, Address::Scaled),
kAllocationStubTypeArgumentsReg);
AllocateObjectABI::kResultReg,
Address(AllocateObjectABI::kResultReg, kTypeOffestReg, UXTX,
Address::Scaled),
AllocateObjectABI::kTypeArgumentsReg);
__ Bind(&not_parameterized_case);
} // kClsIdReg = R4, kTypeOffestReg = R5
__ AddImmediate(kInstanceReg, kInstanceReg, kHeapObjectTag);
__ AddImmediate(AllocateObjectABI::kResultReg,
AllocateObjectABI::kResultReg, kHeapObjectTag);
__ ret();
@ -1951,7 +1953,7 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
// Fall back on slow case:
if (!is_cls_parameterized) {
__ mov(kAllocationStubTypeArgumentsReg, NULL_REG);
__ mov(AllocateObjectABI::kTypeArgumentsReg, NULL_REG);
}
// Tail call to generic allocation stub.
__ ldr(
@ -1971,8 +1973,6 @@ void StubCodeCompiler::GenerateAllocateObjectParameterizedStub(
}
void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
const Register kInstanceReg = R0;
// kAllocationStubTypeArgumentsReg = R1
const Register kTagsToClsIdReg = R2;
if (!FLAG_use_bare_instructions) {
@ -1990,12 +1990,12 @@ void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
__ PushPair(R0, NULL_REG); // Pushes result slot, then class object.
// Should be Object::null() if class is non-parameterized.
__ Push(kAllocationStubTypeArgumentsReg);
__ Push(AllocateObjectABI::kTypeArgumentsReg);
__ CallRuntime(kAllocateObjectRuntimeEntry, 2);
// Load result off the stack into result register.
__ ldr(kInstanceReg, Address(SP, 2 * target::kWordSize));
__ ldr(AllocateObjectABI::kResultReg, Address(SP, 2 * target::kWordSize));
// Write-barrier elimination is enabled for [cls] and we therefore need to
// ensure that the object is in new-space or has remembered bit set.
@ -2013,9 +2013,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const Class& cls,
const Code& allocate_object,
const Code& allocat_object_parametrized) {
static_assert(kAllocationStubTypeArgumentsReg == R1,
"Adjust register allocation in the AllocationStub");
classid_t cls_id = target::Class::GetId(cls);
ASSERT(cls_id != kIllegalCid);
@ -2034,8 +2031,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
target::MakeTagWordForNewSpaceObject(cls_id, instance_size);
// Note: Keep in sync with helper function.
// kInstanceReg = R0
// kAllocationStubTypeArgumentsReg = R1
const Register kTagsReg = R2;
__ LoadImmediate(kTagsReg, tags);
@ -2074,7 +2069,7 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
}
} else {
if (!is_cls_parameterized) {
__ LoadObject(kAllocationStubTypeArgumentsReg, NullObject());
__ LoadObject(AllocateObjectABI::kTypeArgumentsReg, NullObject());
}
__ ldr(R4,
Address(THR,

View file

@ -1453,9 +1453,9 @@ void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
// Called for inline allocation of objects.
// Input parameters:
// ESP : points to return address.
// kAllocationStubTypeArgumentsReg (EDX) : type arguments object
// (only if class is parameterized).
// Uses EAX, EBX, ECX, EDX, EDI as temporary registers.
// AllocateObjectABI::kTypeArgumentsPos : type arguments object
// (only if class is parameterized).
// Uses AllocateObjectABI::kResultReg, EBX, ECX, EDI as temporary registers.
// Returns patch_code_pc offset where patching code for disabling the stub
// has been generated (similar to regularly generated Dart code).
void StubCodeCompiler::GenerateAllocationStubForClass(
@ -1476,41 +1476,45 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const intptr_t instance_size = target::Class::GetInstanceSize(cls);
ASSERT(instance_size > 0);
// EDX: instantiated type arguments (if is_cls_parameterized).
static_assert(kAllocationStubTypeArgumentsReg == EDX,
"Adjust register allocation in the AllocationStub");
// AllocateObjectABI::kTypeArgumentsReg: new object type arguments
// (if is_cls_parameterized).
if (!FLAG_use_slow_path && FLAG_inline_alloc &&
target::Heap::IsAllocatableInNewSpace(instance_size) &&
!target::Class::TraceAllocation(cls)) {
Label slow_case;
// Allocate the object and update top to point to
// next object start and initialize the allocated object.
// EDX: instantiated type arguments (if is_cls_parameterized).
__ movl(EAX, Address(THR, target::Thread::top_offset()));
__ leal(EBX, Address(EAX, instance_size));
// AllocateObjectABI::kTypeArgumentsReg: new object type arguments
// (if is_cls_parameterized).
__ movl(AllocateObjectABI::kResultReg,
Address(THR, target::Thread::top_offset()));
__ leal(EBX, Address(AllocateObjectABI::kResultReg, instance_size));
// Check if the allocation fits into the remaining space.
// EAX: potential new object start.
// AllocateObjectABI::kResultReg: potential new object start.
// EBX: potential next object start.
__ cmpl(EBX, Address(THR, target::Thread::end_offset()));
__ j(ABOVE_EQUAL, &slow_case);
__ movl(Address(THR, target::Thread::top_offset()), EBX);
// EAX: new object start (untagged).
// AllocateObjectABI::kResultReg: new object start (untagged).
// EBX: next object start.
// EDX: new object type arguments (if is_cls_parameterized).
// AllocateObjectABI::kTypeArgumentsReg: new object type arguments
// (if is_cls_parameterized).
// Set the tags.
ASSERT(target::Class::GetId(cls) != kIllegalCid);
uword tags = target::MakeTagWordForNewSpaceObject(target::Class::GetId(cls),
instance_size);
__ movl(Address(EAX, target::Object::tags_offset()), Immediate(tags));
__ addl(EAX, Immediate(kHeapObjectTag));
__ movl(
Address(AllocateObjectABI::kResultReg, target::Object::tags_offset()),
Immediate(tags));
__ addl(AllocateObjectABI::kResultReg, Immediate(kHeapObjectTag));
// Initialize the remaining words of the object.
// EAX: new object (tagged).
// AllocateObjectABI::kResultReg: new object (tagged).
// EBX: next object start.
// EDX: new object type arguments (if is_cls_parameterized).
// AllocateObjectABI::kTypeArgumentsReg: new object type arguments
// (if is_cls_parameterized).
// First try inlining the initialization without a loop.
if (instance_size < (kInlineInstanceSize * target::kWordSize)) {
// Check if the object contains any non-header fields.
@ -1518,42 +1522,49 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
for (intptr_t current_offset = target::Instance::first_field_offset();
current_offset < instance_size;
current_offset += target::kWordSize) {
__ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, current_offset),
NullObject());
__ StoreIntoObjectNoBarrier(
AllocateObjectABI::kResultReg,
FieldAddress(AllocateObjectABI::kResultReg, current_offset),
NullObject());
}
} else {
__ leal(ECX, FieldAddress(EAX, target::Instance::first_field_offset()));
__ leal(ECX, FieldAddress(AllocateObjectABI::kResultReg,
target::Instance::first_field_offset()));
// Loop until the whole object is initialized.
// EAX: new object (tagged).
// AllocateObjectABI::kResultReg: new object (tagged).
// EBX: next object start.
// ECX: next word to be initialized.
// EDX: new object type arguments (if is_cls_parameterized).
// AllocateObjectABI::kTypeArgumentsReg: new object type arguments
// (if is_cls_parameterized).
Label init_loop;
Label done;
__ Bind(&init_loop);
__ cmpl(ECX, EBX);
__ j(ABOVE_EQUAL, &done, Assembler::kNearJump);
__ StoreIntoObjectNoBarrier(EAX, Address(ECX, 0), NullObject());
__ StoreIntoObjectNoBarrier(AllocateObjectABI::kResultReg,
Address(ECX, 0), NullObject());
__ addl(ECX, Immediate(target::kWordSize));
__ jmp(&init_loop, Assembler::kNearJump);
__ Bind(&done);
}
if (is_cls_parameterized) {
// EAX: new object (tagged).
// EDX: new object type arguments.
// AllocateObjectABI::kResultReg: new object (tagged).
// AllocateObjectABI::kTypeArgumentsReg: new object type arguments.
// Set the type arguments in the new object.
const intptr_t offset = target::Class::TypeArgumentsFieldOffset(cls);
__ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, offset),
kAllocationStubTypeArgumentsReg);
__ StoreIntoObjectNoBarrier(
AllocateObjectABI::kResultReg,
FieldAddress(AllocateObjectABI::kResultReg, offset),
AllocateObjectABI::kTypeArgumentsReg);
}
// Done allocating and initializing the instance.
// EAX: new object (tagged).
// AllocateObjectABI::kResultReg: new object (tagged).
__ ret();
__ Bind(&slow_case);
}
// If is_cls_parameterized:
// EDX: new object type arguments.
// AllocateObjectABI::kTypeArgumentsReg: new object type arguments.
// Create a stub frame as we are pushing some objects on the stack before
// calling into the runtime.
__ EnterStubFrame();
@ -1562,14 +1573,14 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
CastHandle<Object>(cls)); // Push class of object to be allocated.
if (is_cls_parameterized) {
// Push type arguments of object to be allocated.
__ pushl(kAllocationStubTypeArgumentsReg);
__ pushl(AllocateObjectABI::kTypeArgumentsReg);
} else {
__ pushl(raw_null); // Push null type arguments.
}
__ CallRuntime(kAllocateObjectRuntimeEntry, 2); // Allocate object.
__ popl(EAX); // Pop argument (type arguments of object).
__ popl(EAX); // Pop argument (class of object).
__ popl(EAX); // Pop result (newly allocated object).
__ popl(AllocateObjectABI::kResultReg); // Drop type arguments.
__ popl(AllocateObjectABI::kResultReg); // Drop class.
__ popl(AllocateObjectABI::kResultReg); // Pop allocated object.
if (AllocateObjectInstr::WillAllocateNewOrRemembered(cls)) {
// Write-barrier elimination is enabled for [cls] and we therefore need to
@ -1577,7 +1588,7 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
EnsureIsNewOrRemembered(assembler, /*preserve_registers=*/false);
}
// EAX: new object
// AllocateObjectABI::kResultReg: new object
// Restore the frame pointer.
__ LeaveFrame();
__ ret();

View file

@ -1790,7 +1790,6 @@ void StubCodeCompiler::GenerateArrayWriteBarrierStub(Assembler* assembler) {
static void GenerateAllocateObjectHelper(Assembler* assembler,
bool is_cls_parameterized) {
// Note: Keep in sync with calling function.
// kAllocationStubTypeArgumentsReg = RDX
const Register kTagsReg = R8;
{
@ -1804,8 +1803,10 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
__ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
__ movq(RAX, Address(THR, target::Thread::top_offset()));
__ leaq(kNewTopReg, Address(RAX, kInstanceSizeReg, TIMES_1, 0));
__ movq(AllocateObjectABI::kResultReg,
Address(THR, target::Thread::top_offset()));
__ leaq(kNewTopReg, Address(AllocateObjectABI::kResultReg,
kInstanceSizeReg, TIMES_1, 0));
// Check if the allocation fits into the remaining space.
__ cmpq(kNewTopReg, Address(THR, target::Thread::end_offset()));
__ j(ABOVE_EQUAL, &slow_case);
@ -1815,15 +1816,18 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
// Set the tags.
// 64 bit store also zeros the identity hash field.
__ movq(Address(RAX, target::Object::tags_offset()), kTagsReg);
__ movq(
Address(AllocateObjectABI::kResultReg, target::Object::tags_offset()),
kTagsReg);
__ addq(RAX, Immediate(kHeapObjectTag));
__ addq(AllocateObjectABI::kResultReg, Immediate(kHeapObjectTag));
// Initialize the remaining words of the object.
{
const Register kNextFieldReg = RDI;
__ leaq(kNextFieldReg,
FieldAddress(RAX, target::Instance::first_field_offset()));
FieldAddress(AllocateObjectABI::kResultReg,
target::Instance::first_field_offset()));
const Register kNullReg = R10;
__ LoadObject(kNullReg, NullObject());
@ -1839,7 +1843,8 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
static auto const kJumpLength = Assembler::kNearJump;
#endif // DEBUG
__ j(ABOVE_EQUAL, &done, kJumpLength);
__ StoreIntoObjectNoBarrier(RAX, Address(kNextFieldReg, 0), kNullReg);
__ StoreIntoObjectNoBarrier(AllocateObjectABI::kResultReg,
Address(kNextFieldReg, 0), kNullReg);
__ addq(kNextFieldReg, Immediate(target::kWordSize));
__ jmp(&init_loop, Assembler::kNearJump);
__ Bind(&done);
@ -1862,8 +1867,10 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
host_type_arguments_field_offset_in_words_offset()));
// Set the type arguments in the new object.
__ StoreIntoObject(RAX, FieldAddress(RAX, kTypeOffsetReg, TIMES_8, 0),
kAllocationStubTypeArgumentsReg);
__ StoreIntoObject(AllocateObjectABI::kResultReg,
FieldAddress(AllocateObjectABI::kResultReg,
kTypeOffsetReg, TIMES_8, 0),
AllocateObjectABI::kTypeArgumentsReg);
__ Bind(&not_parameterized_case);
} // kTypeOffsetReg = RDI;
@ -1875,7 +1882,7 @@ static void GenerateAllocateObjectHelper(Assembler* assembler,
// Fall back on slow case:
if (!is_cls_parameterized) {
__ LoadObject(kAllocationStubTypeArgumentsReg, NullObject());
__ LoadObject(AllocateObjectABI::kTypeArgumentsReg, NullObject());
}
// Tail call to generic allocation stub.
__ jmp(
@ -1893,8 +1900,6 @@ void StubCodeCompiler::GenerateAllocateObjectParameterizedStub(
}
void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
// Note: Keep in sync with calling stub.
// kAllocationStubTypeArgumentsReg = RDX
const Register kTagsToClsIdReg = R8;
if (!FLAG_use_bare_instructions) {
@ -1909,27 +1914,27 @@ void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
__ EnterStubFrame();
// Setup space on stack for return value.
__ LoadObject(RAX, NullObject());
__ pushq(RAX);
__ LoadObject(AllocateObjectABI::kResultReg, NullObject());
__ pushq(AllocateObjectABI::kResultReg);
// Push class of object to be allocated.
__ LoadClassById(RAX, kTagsToClsIdReg);
__ pushq(RAX);
__ LoadClassById(AllocateObjectABI::kResultReg, kTagsToClsIdReg);
__ pushq(AllocateObjectABI::kResultReg);
// Must be Object::null() if non-parameterized class.
__ pushq(kAllocationStubTypeArgumentsReg);
__ pushq(AllocateObjectABI::kTypeArgumentsReg);
__ CallRuntime(kAllocateObjectRuntimeEntry, 2);
__ popq(RAX); // Pop argument (type arguments of object).
__ popq(RAX); // Pop argument (class of object).
__ popq(RAX); // Pop result (newly allocated object).
__ popq(AllocateObjectABI::kResultReg); // Drop type arguments.
__ popq(AllocateObjectABI::kResultReg); // Drop class.
__ popq(AllocateObjectABI::kResultReg); // Pop newly allocated object.
// Write-barrier elimination is enabled for [cls] and we therefore need to
// ensure that the object is in new-space or has remembered bit set.
EnsureIsNewOrRemembered(assembler, /*preserve_registers=*/false);
// RAX: new object
// AllocateObjectABI::kResultReg: new object
// Restore the frame pointer.
__ LeaveStubFrame();
@ -1943,9 +1948,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const Class& cls,
const Code& allocate_object,
const Code& allocat_object_parametrized) {
static_assert(kAllocationStubTypeArgumentsReg == RDX,
"Adjust register allocation in the AllocationStub");
classid_t cls_id = target::Class::GetId(cls);
ASSERT(cls_id != kIllegalCid);
@ -1967,8 +1969,6 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
const uword tags =
target::MakeTagWordForNewSpaceObject(cls_id, instance_size);
// Note: Keep in sync with helper function.
// kAllocationStubTypeArgumentsReg = RDX
const Register kTagsReg = R8;
__ movq(kTagsReg, Immediate(tags));
@ -2004,7 +2004,7 @@ void StubCodeCompiler::GenerateAllocationStubForClass(
}
} else {
if (!is_cls_parameterized) {
__ LoadObject(kAllocationStubTypeArgumentsReg, NullObject());
__ LoadObject(AllocateObjectABI::kTypeArgumentsReg, NullObject());
}
__ jmp(Address(THR,
target::Thread::allocate_object_slow_entry_point_offset()));

View file

@ -320,9 +320,6 @@ const Register kWriteBarrierObjectReg = R1;
const Register kWriteBarrierValueReg = R0;
const Register kWriteBarrierSlotReg = R9;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = R3;
// Common ABI for shared slow path stubs.
struct SharedSlowPathStubABI {
static const Register kResultReg = R0;
@ -451,36 +448,42 @@ struct RangeErrorABI {
static const Register kIndexReg = R1;
};
// ABI for AllocateObjectStub.
struct AllocateObjectABI {
static const Register kResultReg = R0;
static const Register kTypeArgumentsReg = R3;
};
// ABI for AllocateClosureStub.
struct AllocateClosureABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kFunctionReg = R1;
static const Register kScratchReg = R4;
};
// ABI for AllocateMintShared*Stub.
struct AllocateMintABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kTempReg = R1;
};
// ABI for Allocate{Mint,Double,Float32x4,Float64x2}Stub.
struct AllocateBoxABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kTempReg = R1;
};
// ABI for AllocateArrayStub.
struct AllocateArrayABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = R2;
static const Register kTypeArgumentsReg = R1;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = R4;
static const Register kResultReg = R0;
};
// ABI for DispatchTableNullErrorStub and consequently for all dispatch

View file

@ -153,9 +153,6 @@ const Register kWriteBarrierObjectReg = R1;
const Register kWriteBarrierValueReg = R0;
const Register kWriteBarrierSlotReg = R25;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = R1;
// Common ABI for shared slow path stubs.
struct SharedSlowPathStubABI {
static const Register kResultReg = R0;
@ -291,36 +288,42 @@ struct RangeErrorABI {
static const Register kIndexReg = R1;
};
// ABI for AllocateObjectStub.
struct AllocateObjectABI {
static const Register kResultReg = R0;
static const Register kTypeArgumentsReg = R1;
};
// ABI for AllocateClosureStub.
struct AllocateClosureABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kFunctionReg = R1;
static const Register kScratchReg = R4;
};
// ABI for AllocateMintShared*Stub.
struct AllocateMintABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kTempReg = R1;
};
// ABI for Allocate{Mint,Double,Float32x4,Float64x2}Stub.
struct AllocateBoxABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kTempReg = R1;
};
// ABI for AllocateArrayStub.
struct AllocateArrayABI {
static const Register kResultReg = R0;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = R2;
static const Register kTypeArgumentsReg = R1;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = R4;
static const Register kResultReg = R0;
};
// ABI for DispatchTableNullErrorStub and consequently for all dispatch

View file

@ -88,9 +88,6 @@ const Register kWriteBarrierObjectReg = EDX;
const Register kWriteBarrierValueReg = kNoRegister;
const Register kWriteBarrierSlotReg = EDI;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = EDX;
// Common ABI for shared slow path stubs.
struct SharedSlowPathStubABI {
static const Register kResultReg = EAX;
@ -196,30 +193,36 @@ struct RangeErrorABI {
static const Register kIndexReg = EBX;
};
// ABI for AllocateObjectStub.
struct AllocateObjectABI {
static const Register kResultReg = EAX;
static const Register kTypeArgumentsReg = EDX;
};
// ABI for Allocate{Mint,Double,Float32x4,Float64x2}Stub.
struct AllocateBoxABI {
static const Register kResultReg = EAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kTempReg = EBX;
};
// ABI for AllocateClosureStub.
struct AllocateClosureABI {
static const Register kResultReg = EAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kFunctionReg = EBX;
static const Register kScratchReg = EDX;
};
// ABI for AllocateArrayStub.
struct AllocateArrayABI {
static const Register kResultReg = EAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = EDX;
static const Register kTypeArgumentsReg = ECX;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kLengthReg = EAX;
static const Register kResultReg = EAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = kResultReg;
};
// ABI for DispatchTableNullErrorStub and consequently for all dispatch

View file

@ -129,9 +129,6 @@ const Register kWriteBarrierObjectReg = RDX;
const Register kWriteBarrierValueReg = RAX;
const Register kWriteBarrierSlotReg = R13;
// ABI for allocation stubs.
const Register kAllocationStubTypeArgumentsReg = RDX;
// Common ABI for shared slow path stubs.
struct SharedSlowPathStubABI {
static const Register kResultReg = RAX;
@ -263,36 +260,42 @@ struct RangeErrorABI {
static const Register kIndexReg = RBX;
};
// ABI for AllocateObjectStub.
struct AllocateObjectABI {
static const Register kResultReg = RAX;
static const Register kTypeArgumentsReg = RDX;
};
// ABI for AllocateClosureStub.
struct AllocateClosureABI {
static const Register kResultReg = RAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kFunctionReg = RBX;
static const Register kScratchReg = R13;
};
// ABI for AllocateMintShared*Stub.
struct AllocateMintABI {
static const Register kResultReg = RAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kTempReg = RBX;
};
// ABI for Allocate{Mint,Double,Float32x4,Float64x2}Stub.
struct AllocateBoxABI {
static const Register kResultReg = RAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kTempReg = RBX;
};
// ABI for AllocateArrayStub.
struct AllocateArrayABI {
static const Register kResultReg = RAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = R10;
static const Register kTypeArgumentsReg = RBX;
};
// ABI for AllocateTypedDataArrayStub.
struct AllocateTypedDataArrayABI {
static const Register kLengthReg = RAX;
static const Register kResultReg = RAX;
static const Register kResultReg = AllocateObjectABI::kResultReg;
static const Register kLengthReg = kResultReg;
};
// ABI for DispatchTableNullErrorStub and consequently for all dispatch