Make subtype test cache stub shared between isolates.

Also remove an the unused pp paramters from LoadIsolate on ARM64.

BUG=
R=koda@google.com

Review URL: https://codereview.chromium.org//1235843003 .
This commit is contained in:
Florian Schneider 2015-07-13 15:42:16 +02:00
parent 12e232dca7
commit 141d6bdcd2
9 changed files with 36 additions and 31 deletions

View file

@ -1967,8 +1967,10 @@ void Assembler::LoadClassId(Register result, Register object, Condition cond) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
LoadImmediate(result, Isolate::Current()->class_table()->TableAddress());
LoadFromOffset(kWord, result, result, 0);
LoadIsolate(result);
const intptr_t offset =
Isolate::class_table_offset() + ClassTable::table_offset();
LoadFromOffset(kWord, result, result, offset);
ldr(result, Address(result, class_id, LSL, 2));
}

View file

@ -416,7 +416,7 @@ void Assembler::LoadExternalLabelFixed(Register dst,
}
void Assembler::LoadIsolate(Register dst, Register pp) {
void Assembler::LoadIsolate(Register dst) {
ldr(dst, Address(THR, Thread::isolate_offset()));
}
@ -964,8 +964,10 @@ void Assembler::LoadClassId(Register result, Register object, Register pp) {
void Assembler::LoadClassById(Register result, Register class_id, Register pp) {
ASSERT(result != class_id);
LoadImmediate(result, Isolate::Current()->class_table()->TableAddress(), pp);
LoadFromOffset(result, result, 0, pp);
LoadIsolate(result);
const intptr_t offset =
Isolate::class_table_offset() + ClassTable::table_offset();
LoadFromOffset(result, result, offset, pp);
ldr(result, Address(result, class_id, UXTX, Address::Scaled));
}

View file

@ -1319,7 +1319,7 @@ class Assembler : public ValueObject {
const ExternalLabel* label,
Patchability patchable,
Register pp);
void LoadIsolate(Register dst, Register pp);
void LoadIsolate(Register dst);
void LoadObject(Register dst, const Object& obj, Register pp);
void LoadUniqueObject(Register dst, const Object& obj, Register pp);
void LoadDecodableImmediate(Register reg, int64_t imm, Register pp);

View file

@ -3023,8 +3023,10 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
movl(result,
Address::Absolute(Isolate::Current()->class_table()->TableAddress()));
LoadIsolate(result);
const intptr_t offset =
Isolate::class_table_offset() + ClassTable::table_offset();
movl(result, Address(result, offset));
movl(result, Address(result, class_id, TIMES_4, 0));
}

View file

@ -670,8 +670,10 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(!in_delay_slot_);
ASSERT(result != class_id);
LoadImmediate(result, Isolate::Current()->class_table()->TableAddress());
lw(result, Address(result, 0));
LoadIsolate(result);
const intptr_t offset =
Isolate::class_table_offset() + ClassTable::table_offset();
lw(result, Address(result, offset));
sll(TMP, class_id, 2);
addu(result, result, TMP);
lw(result, Address(result));

View file

@ -3781,9 +3781,10 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id, Register pp) {
ASSERT(result != class_id);
Isolate* isolate = Isolate::Current();
LoadImmediate(result, Immediate(isolate->class_table()->TableAddress()), pp);
movq(result, Address(result, 0));
LoadIsolate(result);
const intptr_t offset =
Isolate::class_table_offset() + ClassTable::table_offset();
movq(result, Address(result, offset));
movq(result, Address(result, class_id, TIMES_8, 0));
}

View file

@ -178,6 +178,7 @@ class ClassTable {
void PrintToJSONObject(JSONObject* object);
// Used by the generated code.
static intptr_t table_offset() {
return OFFSET_OF(ClassTable, table_);
}
@ -193,11 +194,6 @@ class ClassTable {
// Called immediately after a new GC.
void UpdatePromoted();
// Used by the generated code.
uword TableAddress() {
return reinterpret_cast<uword>(&table_);
}
// Used by the generated code.
uword PredefinedClassHeapStatsTableAddress() {
return reinterpret_cast<uword>(predefined_class_heap_stats_table_);

View file

@ -54,6 +54,9 @@ class RawCode;
V(ZeroArgsUnoptimizedStaticCall) \
V(OneArgUnoptimizedStaticCall) \
V(TwoArgsUnoptimizedStaticCall) \
V(Subtype1TestCache) \
V(Subtype2TestCache) \
V(Subtype3TestCache) \
// Is it permitted for the stubs above to refer to Object::null(), which is
// allocated in the VM isolate and shared across all isolates.
@ -66,9 +69,6 @@ class RawCode;
#define STUB_CODE_LIST(V) \
V(CallClosureNoSuchMethod) \
V(AllocateContext) \
V(Subtype1TestCache) \
V(Subtype2TestCache) \
V(Subtype3TestCache) \
// class StubEntry is used to describe stub methods generated in dart to

View file

@ -48,7 +48,7 @@ void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) {
__ EnterStubFrame();
COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << R28)) != 0);
__ LoadIsolate(R28, kNoPP);
__ LoadIsolate(R28);
// Save exit frame information to enable stack walking as we are about
// to transition to Dart VM C++ code.
@ -148,7 +148,7 @@ void StubCode::GenerateCallNativeCFunctionStub(Assembler* assembler) {
__ EnterStubFrame();
COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << R28)) != 0);
__ LoadIsolate(R28, kNoPP);
__ LoadIsolate(R28);
// Save exit frame information to enable stack walking as we are about
// to transition to native code.
@ -250,7 +250,7 @@ void StubCode::GenerateCallBootstrapCFunctionStub(Assembler* assembler) {
__ EnterStubFrame();
COMPILE_ASSERT((kAbiPreservedCpuRegs & (1 << R28)) != 0);
__ LoadIsolate(R28, kNoPP);
__ LoadIsolate(R28);
// Save exit frame information to enable stack walking as we are about
// to transition to native code.
@ -841,7 +841,7 @@ void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) {
__ mov(THR, R3);
}
// Load Isolate pointer into temporary register R5.
__ LoadIsolate(R5, PP);
__ LoadIsolate(R5);
// Save the current VMTag on the stack.
__ LoadFromOffset(R4, R5, Isolate::vm_tag_offset(), PP);
@ -898,7 +898,7 @@ void StubCode::GenerateInvokeDartCodeStub(Assembler* assembler) {
// Get rid of arguments pushed on the stack.
__ AddImmediate(SP, FP, kExitLinkSlotFromEntryFp * kWordSize, PP);
__ LoadIsolate(R28, PP);
__ LoadIsolate(R28);
// Restore the saved top exit frame info and top resource back into the
// Isolate structure. Uses R6 as a temporary register for this.
@ -1433,7 +1433,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
Label stepping, done_stepping;
if (FLAG_support_debugger && !optimized) {
__ Comment("Check single stepping");
__ LoadIsolate(R6, kNoPP);
__ LoadIsolate(R6);
__ LoadFromOffset(
R6, R6, Isolate::single_step_offset(), kNoPP, kUnsignedByte);
__ CompareRegisters(R6, ZR);
@ -1727,7 +1727,7 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(R6, kNoPP);
__ LoadIsolate(R6);
__ LoadFromOffset(
R6, R6, Isolate::single_step_offset(), kNoPP, kUnsignedByte);
__ CompareImmediate(R6, 0, kNoPP);
@ -1843,7 +1843,7 @@ void StubCode::GenerateDebugStepCheckStub(
Assembler* assembler) {
// Check single stepping.
Label stepping, done_stepping;
__ LoadIsolate(R1, kNoPP);
__ LoadIsolate(R1);
__ LoadFromOffset(
R1, R1, Isolate::single_step_offset(), kNoPP, kUnsignedByte);
__ CompareImmediate(R1, 0, kNoPP);
@ -1991,7 +1991,7 @@ void StubCode::GenerateJumpToExceptionHandlerStub(Assembler* assembler) {
__ mov(R0, R3); // Exception object.
__ mov(R1, R4); // StackTrace object.
__ mov(THR, R5);
__ LoadIsolate(R5, kNoPP);
__ LoadIsolate(R5);
// Set the tag.
__ LoadImmediate(R2, VMTag::kDartTagId, kNoPP);
__ StoreToOffset(R2, R5, Isolate::vm_tag_offset(), kNoPP);
@ -2098,7 +2098,7 @@ void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(R1, kNoPP);
__ LoadIsolate(R1);
__ LoadFromOffset(
R1, R1, Isolate::single_step_offset(), kNoPP, kUnsignedByte);
__ CompareImmediate(R1, 0, kNoPP);