[vm, compiler] Honor --no_inline_alloc in suspend state and record stubs.

TEST=ci
Change-Id: Iae70a0edd776d88f2e26783d7348f2e5daf25ad9
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/312080
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Alexander Aprelev <aam@google.com>
This commit is contained in:
Ryan Macnak 2023-07-10 22:03:08 +00:00 committed by Commit Queue
parent 2094a733aa
commit b050ec9449
17 changed files with 124 additions and 101 deletions

View file

@ -27,6 +27,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "dart:ffi";

View file

@ -27,6 +27,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "splay_common.dart";

View file

@ -29,6 +29,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "splay_common.dart";

View file

@ -27,6 +27,7 @@
// VMOptions=--profiler --verify_store_buffer
// VMOptions=--profiler --verify_after_marking
// VMOptions=--profiler --stress_write_barrier_elimination
// VMOptions=--profiler --no_inline_alloc
// VMOptions=--profiler --old_gen_heap_size=100
// VMOptions=--profiler --mark_when_idle
// VMOptions=--profiler --no_load_cse

View file

@ -27,6 +27,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "splay_common.dart";

View file

@ -31,6 +31,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "dart:ffi";

View file

@ -31,6 +31,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "splay_common.dart";

View file

@ -33,6 +33,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "splay_common.dart";

View file

@ -31,6 +31,7 @@
// VMOptions=--profiler --verify_store_buffer
// VMOptions=--profiler --verify_after_marking
// VMOptions=--profiler --stress_write_barrier_elimination
// VMOptions=--profiler --no_inline_alloc
// VMOptions=--profiler --old_gen_heap_size=100
// VMOptions=--profiler --mark_when_idle
// VMOptions=--profiler --no_load_cse

View file

@ -31,6 +31,7 @@
// VMOptions=--verify_store_buffer
// VMOptions=--verify_after_marking
// VMOptions=--stress_write_barrier_elimination
// VMOptions=--no_inline_alloc
// VMOptions=--old_gen_heap_size=150
import "splay_common.dart";

View file

@ -1311,98 +1311,101 @@ void StubCodeCompiler::GenerateAllocateRecordStub() {
const Register shape_reg = AllocateRecordABI::kShapeReg;
const Register temp_reg = AllocateRecordABI::kTemp1Reg;
const Register new_top_reg = AllocateRecordABI::kTemp2Reg;
Label slow_case;
// Check for allocation tracing.
NOT_IN_PRODUCT(__ MaybeTraceAllocation(kRecordCid, &slow_case, temp_reg));
if (!FLAG_use_slow_path && FLAG_inline_alloc) {
Label slow_case;
// Extract number of fields from the shape.
__ AndImmediate(
temp_reg, shape_reg,
compiler::target::RecordShape::kNumFieldsMask << kSmiTagShift);
// Check for allocation tracing.
NOT_IN_PRODUCT(__ MaybeTraceAllocation(kRecordCid, &slow_case, temp_reg));
// Compute the rounded instance size.
const intptr_t fixed_size_plus_alignment_padding =
(target::Record::field_offset(0) +
target::ObjectAlignment::kObjectAlignment - 1);
__ AddScaled(temp_reg, temp_reg, TIMES_COMPRESSED_HALF_WORD_SIZE,
fixed_size_plus_alignment_padding);
__ AndImmediate(temp_reg, -target::ObjectAlignment::kObjectAlignment);
// Extract number of fields from the shape.
__ AndImmediate(
temp_reg, shape_reg,
compiler::target::RecordShape::kNumFieldsMask << kSmiTagShift);
// Now allocate the object.
__ LoadFromOffset(result_reg, Address(THR, target::Thread::top_offset()));
__ MoveRegister(new_top_reg, temp_reg);
__ AddRegisters(new_top_reg, result_reg);
// Check if the allocation fits into the remaining space.
__ CompareWithMemoryValue(new_top_reg,
Address(THR, target::Thread::end_offset()));
__ BranchIf(UNSIGNED_GREATER_EQUAL, &slow_case);
// Compute the rounded instance size.
const intptr_t fixed_size_plus_alignment_padding =
(target::Record::field_offset(0) +
target::ObjectAlignment::kObjectAlignment - 1);
__ AddScaled(temp_reg, temp_reg, TIMES_COMPRESSED_HALF_WORD_SIZE,
fixed_size_plus_alignment_padding);
__ AndImmediate(temp_reg, -target::ObjectAlignment::kObjectAlignment);
// Successfully allocated the object, now update top to point to
// next object start and initialize the object.
__ StoreToOffset(new_top_reg, Address(THR, target::Thread::top_offset()));
__ AddImmediate(result_reg, kHeapObjectTag);
// Now allocate the object.
__ LoadFromOffset(result_reg, Address(THR, target::Thread::top_offset()));
__ MoveRegister(new_top_reg, temp_reg);
__ AddRegisters(new_top_reg, result_reg);
// Check if the allocation fits into the remaining space.
__ CompareWithMemoryValue(new_top_reg,
Address(THR, target::Thread::end_offset()));
__ BranchIf(UNSIGNED_GREATER_EQUAL, &slow_case);
// Calculate the size tag.
{
Label size_tag_overflow, done;
__ CompareImmediate(temp_reg, target::UntaggedObject::kSizeTagMaxSizeTag);
__ BranchIf(UNSIGNED_GREATER, &size_tag_overflow, Assembler::kNearJump);
__ LslImmediate(temp_reg,
target::UntaggedObject::kTagBitsSizeTagPos -
target::ObjectAlignment::kObjectAlignmentLog2);
__ Jump(&done, Assembler::kNearJump);
// Successfully allocated the object, now update top to point to
// next object start and initialize the object.
__ StoreToOffset(new_top_reg, Address(THR, target::Thread::top_offset()));
__ AddImmediate(result_reg, kHeapObjectTag);
__ Bind(&size_tag_overflow);
// Set overflow size tag value.
__ LoadImmediate(temp_reg, 0);
// Calculate the size tag.
{
Label size_tag_overflow, done;
__ CompareImmediate(temp_reg, target::UntaggedObject::kSizeTagMaxSizeTag);
__ BranchIf(UNSIGNED_GREATER, &size_tag_overflow, Assembler::kNearJump);
__ LslImmediate(temp_reg,
target::UntaggedObject::kTagBitsSizeTagPos -
target::ObjectAlignment::kObjectAlignmentLog2);
__ Jump(&done, Assembler::kNearJump);
__ Bind(&done);
uword tags = target::MakeTagWordForNewSpaceObject(kRecordCid, 0);
__ OrImmediate(temp_reg, tags);
__ StoreToOffset(
temp_reg,
FieldAddress(result_reg, target::Object::tags_offset())); // Tags.
}
__ Bind(&size_tag_overflow);
// Set overflow size tag value.
__ LoadImmediate(temp_reg, 0);
__ StoreCompressedIntoObjectNoBarrier(
result_reg, FieldAddress(result_reg, target::Record::shape_offset()),
shape_reg);
__ Bind(&done);
uword tags = target::MakeTagWordForNewSpaceObject(kRecordCid, 0);
__ OrImmediate(temp_reg, tags);
__ StoreToOffset(
temp_reg,
FieldAddress(result_reg, target::Object::tags_offset())); // Tags.
}
// Initialize the remaining words of the object.
{
const Register field_reg = shape_reg;
__ StoreCompressedIntoObjectNoBarrier(
result_reg, FieldAddress(result_reg, target::Record::shape_offset()),
shape_reg);
// Initialize the remaining words of the object.
{
const Register field_reg = shape_reg;
#if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
defined(TARGET_ARCH_RISCV64)
const Register null_reg = NULL_REG;
const Register null_reg = NULL_REG;
#else
const Register null_reg = temp_reg;
__ LoadObject(null_reg, NullObject());
const Register null_reg = temp_reg;
__ LoadObject(null_reg, NullObject());
#endif
Label loop, done;
__ AddImmediate(field_reg, result_reg, target::Record::field_offset(0));
__ CompareRegisters(field_reg, new_top_reg);
__ BranchIf(UNSIGNED_GREATER_EQUAL, &done, Assembler::kNearJump);
Label loop, done;
__ AddImmediate(field_reg, result_reg, target::Record::field_offset(0));
__ CompareRegisters(field_reg, new_top_reg);
__ BranchIf(UNSIGNED_GREATER_EQUAL, &done, Assembler::kNearJump);
__ Bind(&loop);
for (intptr_t offset = 0; offset < target::kObjectAlignment;
offset += target::kCompressedWordSize) {
__ StoreCompressedIntoObjectNoBarrier(
result_reg, FieldAddress(field_reg, offset), null_reg);
__ Bind(&loop);
for (intptr_t offset = 0; offset < target::kObjectAlignment;
offset += target::kCompressedWordSize) {
__ StoreCompressedIntoObjectNoBarrier(
result_reg, FieldAddress(field_reg, offset), null_reg);
}
// Safe to only check every kObjectAlignment bytes instead of each word.
ASSERT(kAllocationRedZoneSize >= target::kObjectAlignment);
__ AddImmediate(field_reg, target::kObjectAlignment);
__ CompareRegisters(field_reg, new_top_reg);
__ BranchIf(UNSIGNED_LESS, &loop, Assembler::kNearJump);
__ Bind(&done);
}
// Safe to only check every kObjectAlignment bytes instead of each word.
ASSERT(kAllocationRedZoneSize >= target::kObjectAlignment);
__ AddImmediate(field_reg, target::kObjectAlignment);
__ CompareRegisters(field_reg, new_top_reg);
__ BranchIf(UNSIGNED_LESS, &loop, Assembler::kNearJump);
__ Bind(&done);
__ Ret();
__ Bind(&slow_case);
}
__ Ret();
__ Bind(&slow_case);
__ EnterStubFrame();
__ PushObject(NullObject()); // Space on the stack for the return value.
__ PushRegister(shape_reg);
@ -1781,6 +1784,11 @@ static void GenerateAllocateSuspendState(Assembler* assembler,
Register result_reg,
Register frame_size_reg,
Register temp_reg) {
if (FLAG_use_slow_path || !FLAG_inline_alloc) {
__ Jump(slow_case);
return;
}
// Check for allocation tracing.
NOT_IN_PRODUCT(
__ MaybeTraceAllocation(kSuspendStateCid, slow_case, temp_reg));

View file

@ -63,7 +63,7 @@ class StubCodeCompiler {
bool generic);
#endif
void EnsureIsNewOrRemembered(bool preserve_registers = true);
void EnsureIsNewOrRemembered(bool preserve_registers);
static ArrayPtr BuildStaticCallsTable(
Zone* zone,
compiler::UnresolvedPcRelativeCalls* unresolved_calls);

View file

@ -1245,17 +1245,18 @@ void StubCodeCompiler::GenerateAllocateArrayStub() {
__ PushList((1 << AllocateArrayABI::kTypeArgumentsReg) |
(1 << AllocateArrayABI::kLengthReg) | (1 << IP));
__ CallRuntime(kAllocateArrayRuntimeEntry, 2);
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
__ ldr(AllocateArrayABI::kResultReg, Address(SP, 2 * target::kWordSize));
EnsureIsNewOrRemembered(/*preserve_registers=*/false);
// Pop arguments; result is popped in IP.
__ PopList((1 << AllocateArrayABI::kTypeArgumentsReg) |
(1 << AllocateArrayABI::kLengthReg) | (1 << IP));
__ mov(AllocateArrayABI::kResultReg, Operand(IP));
__ LeaveStubFrame();
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
EnsureIsNewOrRemembered(assembler);
__ Ret();
}

View file

@ -1524,17 +1524,18 @@ void StubCodeCompiler::GenerateAllocateArrayStub() {
__ Push(AllocateArrayABI::kLengthReg);
__ Push(AllocateArrayABI::kTypeArgumentsReg);
__ CallRuntime(kAllocateArrayRuntimeEntry, 2);
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
__ ldr(AllocateArrayABI::kResultReg, Address(SP, 2 * target::kWordSize));
EnsureIsNewOrRemembered(/*preserve_registers=*/false);
// Pop arguments; result is popped in IP.
__ Pop(AllocateArrayABI::kTypeArgumentsReg);
__ Pop(AllocateArrayABI::kLengthReg);
__ Pop(AllocateArrayABI::kResultReg);
__ LeaveStubFrame();
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
EnsureIsNewOrRemembered(assembler);
__ ret();
}

View file

@ -1011,15 +1011,16 @@ void StubCodeCompiler::GenerateAllocateArrayStub() {
__ pushl(AllocateArrayABI::kLengthReg); // Array length as Smi.
__ pushl(AllocateArrayABI::kTypeArgumentsReg); // Type arguments.
__ CallRuntime(kAllocateArrayRuntimeEntry, 2);
__ popl(AllocateArrayABI::kTypeArgumentsReg); // Pop type arguments.
__ popl(AllocateArrayABI::kLengthReg); // Pop array length argument.
__ popl(AllocateArrayABI::kResultReg); // Pop return value from return slot.
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
EnsureIsNewOrRemembered(assembler);
__ movl(AllocateArrayABI::kResultReg, Address(ESP, 2 * target::kWordSize));
EnsureIsNewOrRemembered(/*preserve_registers=*/false);
__ popl(AllocateArrayABI::kTypeArgumentsReg); // Pop type arguments.
__ popl(AllocateArrayABI::kLengthReg); // Pop array length argument.
__ popl(AllocateArrayABI::kResultReg); // Pop return value from return slot.
__ LeaveFrame();
__ ret();
}

View file

@ -1347,19 +1347,20 @@ void StubCodeCompiler::GenerateAllocateArrayStub() {
__ sx(AllocateArrayABI::kTypeArgumentsReg,
Address(SP, 0 * target::kWordSize));
__ CallRuntime(kAllocateArrayRuntimeEntry, 2);
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
ASSERT(AllocateArrayABI::kResultReg == A0);
__ lx(AllocateArrayABI::kResultReg, Address(SP, 2 * target::kWordSize));
EnsureIsNewOrRemembered(/*preserve_registers=*/false);
__ lx(AllocateArrayABI::kTypeArgumentsReg,
Address(SP, 0 * target::kWordSize));
__ lx(AllocateArrayABI::kLengthReg, Address(SP, 1 * target::kWordSize));
__ lx(AllocateArrayABI::kResultReg, Address(SP, 2 * target::kWordSize));
__ addi(SP, SP, 3 * target::kWordSize);
__ LeaveStubFrame();
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
ASSERT(AllocateArrayABI::kResultReg == A0);
EnsureIsNewOrRemembered(assembler);
__ ret();
}

View file

@ -1444,15 +1444,16 @@ void StubCodeCompiler::GenerateAllocateArrayStub() {
__ pushq(AllocateArrayABI::kLengthReg); // Array length as Smi.
__ pushq(AllocateArrayABI::kTypeArgumentsReg); // Element type.
__ CallRuntime(kAllocateArrayRuntimeEntry, 2);
__ popq(AllocateArrayABI::kTypeArgumentsReg); // Pop element type argument.
__ popq(AllocateArrayABI::kLengthReg); // Pop array length argument.
__ popq(AllocateArrayABI::kResultReg); // Pop allocated object.
// Write-barrier elimination might be enabled for this array (depending on the
// array length). To be sure we will check if the allocated object is in old
// space and if so call a leaf runtime to add it to the remembered set.
EnsureIsNewOrRemembered();
__ movq(AllocateArrayABI::kResultReg, Address(RSP, 2 * target::kWordSize));
EnsureIsNewOrRemembered(/*preserve_registers=*/false);
__ popq(AllocateArrayABI::kTypeArgumentsReg); // Pop element type argument.
__ popq(AllocateArrayABI::kLengthReg); // Pop array length argument.
__ popq(AllocateArrayABI::kResultReg); // Pop allocated object.
__ LeaveStubFrame();
__ ret();
}