VM: Share some stub code between isolates.

This CL also lifts the restriction that the object pool can't be used
in VM isolate code on x64 and arm64.

Stubs that perform allocation can't be shared yet, because the inline allocation
code is not yet isolate-independent. This will be added in a separate CL.

BUG=
R=rmacnak@google.com

Review URL: https://codereview.chromium.org//1229283002 .
This commit is contained in:
Florian Schneider 2015-07-13 10:00:51 +02:00
parent d7c2f4550b
commit 81deeaeedc
7 changed files with 41 additions and 98 deletions

View file

@ -254,8 +254,6 @@ intptr_t ObjectPoolWrapper::AddImmediate(uword imm) {
intptr_t ObjectPoolWrapper::AddObject(ObjectPool::Entry entry,
Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
object_pool_.Add(entry);
if (patchable == kNotPatchable) {
// The object isn't patchable. Record the index for fast lookup.
@ -268,7 +266,6 @@ intptr_t ObjectPoolWrapper::AddObject(ObjectPool::Entry entry,
intptr_t ObjectPoolWrapper::AddExternalLabel(const ExternalLabel* label,
Patchability patchable) {
ASSERT(Isolate::Current() != Dart::vm_isolate());
return AddObject(ObjectPool::Entry(label->address(),
ObjectPool::kImmediate),
patchable);
@ -277,9 +274,6 @@ intptr_t ObjectPoolWrapper::AddExternalLabel(const ExternalLabel* label,
intptr_t ObjectPoolWrapper::FindObject(ObjectPool::Entry entry,
Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
// If the object is not patchable, check if we've already got it in the
// object pool.
if (patchable == kNotPatchable) {
@ -306,8 +300,6 @@ intptr_t ObjectPoolWrapper::FindImmediate(uword imm) {
intptr_t ObjectPoolWrapper::FindExternalLabel(const ExternalLabel* label,
Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
return FindObject(ObjectPool::Entry(label->address(),
ObjectPool::kImmediate),
patchable);

View file

@ -360,7 +360,6 @@ void Assembler::LoadWordFromPoolOffsetFixed(Register dst, Register pp,
intptr_t Assembler::FindImmediate(int64_t imm) {
ASSERT(Isolate::Current() != Dart::vm_isolate());
return object_pool_wrapper_.FindImmediate(imm);
}
@ -380,7 +379,7 @@ bool Assembler::CanLoadFromObjectPool(const Object& object) const {
}
ASSERT(object.IsNotTemporaryScopedHandle());
ASSERT(object.IsOld());
return (Isolate::Current() != Dart::vm_isolate());
return true;
}
@ -388,12 +387,7 @@ bool Assembler::CanLoadImmediateFromPool(int64_t imm, Register pp) {
if (!allow_constant_pool()) {
return false;
}
return !Utils::IsInt(32, imm) &&
(pp != kNoPP) &&
// We *could* put constants in the pool in a VM isolate, but it is
// simpler to maintain the invariant that the object pool is not used
// in the VM isolate.
(Isolate::Current() != Dart::vm_isolate());
return !Utils::IsInt(32, imm) && (pp != kNoPP);
}
@ -439,9 +433,7 @@ void Assembler::LoadObjectHelper(Register dst,
: object_pool_wrapper_.FindObject(object));
LoadWordFromPoolOffset(dst, pp, offset);
} else {
ASSERT((Isolate::Current() == Dart::vm_isolate()) ||
object.IsSmi() ||
object.InVMHeap());
ASSERT(object.IsSmi() || object.InVMHeap());
LoadDecodableImmediate(dst, reinterpret_cast<int64_t>(object.raw()), pp);
}
}
@ -473,17 +465,9 @@ void Assembler::CompareObject(Register reg, const Object& object, Register pp) {
void Assembler::LoadDecodableImmediate(Register reg, int64_t imm, Register pp) {
if ((pp != kNoPP) &&
(Isolate::Current() != Dart::vm_isolate()) &&
allow_constant_pool()) {
int64_t val_smi_tag = imm & kSmiTagMask;
imm &= ~kSmiTagMask; // Mask off the tag bits.
if ((pp != kNoPP) && allow_constant_pool()) {
const int32_t offset = ObjectPool::element_offset(FindImmediate(imm));
LoadWordFromPoolOffset(reg, pp, offset);
if (val_smi_tag != 0) {
// Add back the tag bits.
orri(reg, reg, Immediate(val_smi_tag));
}
} else {
// TODO(zra): Since this sequence only needs to be decodable, it can be
// of variable length.

View file

@ -93,13 +93,9 @@ void Assembler::CallPatchable(const ExternalLabel* label) {
void Assembler::Call(const ExternalLabel* label, Register pp) {
if (Isolate::Current() == Dart::vm_isolate()) {
call(label);
} else {
const int32_t offset = ObjectPool::element_offset(
object_pool_wrapper_.FindExternalLabel(label, kNotPatchable));
call(Address::AddressBaseImm32(pp, offset - kHeapObjectTag));
}
const int32_t offset = ObjectPool::element_offset(
object_pool_wrapper_.FindExternalLabel(label, kNotPatchable));
call(Address::AddressBaseImm32(pp, offset - kHeapObjectTag));
}
@ -2787,7 +2783,7 @@ bool Assembler::CanLoadFromObjectPool(const Object& object) const {
}
ASSERT(object.IsNotTemporaryScopedHandle());
ASSERT(object.IsOld());
return (Isolate::Current() != Dart::vm_isolate());
return true;
}
@ -2816,9 +2812,7 @@ void Assembler::LoadObjectHelper(Register dst,
: object_pool_wrapper_.FindObject(object));
LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag);
} else {
ASSERT((Isolate::Current() == Dart::vm_isolate()) ||
object.IsSmi() ||
object.InVMHeap());
ASSERT(object.IsSmi() || object.InVMHeap());
LoadImmediate(dst, Immediate(reinterpret_cast<int64_t>(object.raw())), pp);
}
}
@ -2877,7 +2871,6 @@ void Assembler::CompareObject(Register reg, const Object& object, Register pp) {
intptr_t Assembler::FindImmediate(int64_t imm) {
ASSERT(Isolate::Current() != Dart::vm_isolate());
return object_pool_wrapper_.FindImmediate(imm);
}
@ -2886,9 +2879,7 @@ bool Assembler::CanLoadImmediateFromPool(const Immediate& imm, Register pp) {
if (!allow_constant_pool()) {
return false;
}
return !imm.is_int32() &&
(pp != kNoRegister) &&
(Isolate::Current() != Dart::vm_isolate());
return !imm.is_int32() && (pp != kNoRegister);
}

View file

@ -629,6 +629,9 @@ void Object::InitOnce(Isolate* isolate) {
// isolate.
Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid);
// Needed for object pools of VM isolate stubs.
Class::NewTypedDataClass(kTypedDataInt8ArrayCid);
// Allocate and initialize the empty_array instance.
{
uword address = heap->Allocate(Array::InstanceSize(0), Heap::kOld);
@ -1442,7 +1445,6 @@ RawError* Object::Init(Isolate* isolate) {
// Finish the initialization by compiling the bootstrap scripts containing the
// base interfaces and the implementation of the internal classes.
StubCode::InitBootstrapStubs(isolate);
const Error& error = Error::Handle(Bootstrap::LoadandCompileScripts());
if (!error.IsNull()) {
return error.raw();
@ -1595,7 +1597,6 @@ RawError* Object::Init(Isolate* isolate) {
Context::New(0, Heap::kOld));
object_store->set_empty_context(context);
StubCode::InitBootstrapStubs(isolate);
#endif // defined(DART_NO_SNAPSHOT).
return Error::null();

View file

@ -58,30 +58,18 @@ void StubCode::InitOnce() {
}
void StubCode::GenerateBootstrapStubsFor(Isolate* init) {
// Generate initial stubs.
Code& code = Code::Handle();
BOOTSTRAP_STUB_CODE_LIST(STUB_CODE_GENERATE);
}
void StubCode::GenerateStubsFor(Isolate* init) {
// Generate all the other stubs.
Code& code = Code::Handle();
REST_STUB_CODE_LIST(STUB_CODE_GENERATE);
STUB_CODE_LIST(STUB_CODE_GENERATE);
}
#undef STUB_CODE_GENERATE
void StubCode::InitBootstrapStubs(Isolate* isolate) {
void StubCode::Init(Isolate* isolate) {
StubCode* stubs = new StubCode(isolate);
isolate->set_stub_code(stubs);
stubs->GenerateBootstrapStubsFor(isolate);
}
void StubCode::Init(Isolate* isolate) {
isolate->stub_code()->GenerateStubsFor(isolate);
}

View file

@ -23,31 +23,25 @@ class RawCode;
V(GetStackPointer) \
V(JumpToExceptionHandler) \
V(UpdateStoreBuffer) \
// Is it permitted for the stubs above to refer to Object::null(), which is
// allocated in the VM isolate and shared across all isolates.
// However, in cases where a simple GC-safe placeholder is needed on the stack,
// using Smi 0 instead of Object::null() is slightly more efficient, since a Smi
// does not require relocation.
// List of stubs created per isolate, these stubs could potentially contain
// embedded objects and hence cannot be shared across isolates.
// The initial stubs are needed for loading bootstrapping scripts and have to
// be generated before Object::Init is called.
#define BOOTSTRAP_STUB_CODE_LIST(V) \
V(PrintStopMessage) \
V(CallToRuntime) \
V(LazyCompile) \
#define REST_STUB_CODE_LIST(V) \
V(CallBootstrapCFunction) \
V(CallNativeCFunction) \
V(FixCallersTarget) \
V(CallStaticFunction) \
V(OptimizeFunction) \
V(InvokeDartCode) \
V(DebugStepCheck) \
V(MegamorphicLookup) \
V(FixAllocationStubTarget) \
V(FixAllocateArrayStubTarget) \
V(CallClosureNoSuchMethod) \
V(AllocateContext) \
V(Deoptimize) \
V(DeoptimizeLazy) \
V(UnoptimizedIdenticalWithNumberCheck) \
V(OptimizedIdenticalWithNumberCheck) \
V(ICCallBreakpoint) \
V(RuntimeCallBreakpoint) \
V(OneArgCheckInlineCache) \
V(TwoArgsCheckInlineCache) \
V(SmiAddInlineCache) \
@ -60,23 +54,22 @@ class RawCode;
V(ZeroArgsUnoptimizedStaticCall) \
V(OneArgUnoptimizedStaticCall) \
V(TwoArgsUnoptimizedStaticCall) \
V(OptimizeFunction) \
V(InvokeDartCode) \
// Is it permitted for the stubs above to refer to Object::null(), which is
// allocated in the VM isolate and shared across all isolates.
// However, in cases where a simple GC-safe placeholder is needed on the stack,
// using Smi 0 instead of Object::null() is slightly more efficient, since a Smi
// does not require relocation.
// List of stubs created per isolate, these stubs could potentially contain
// embedded objects and hence cannot be shared across isolates.
#define STUB_CODE_LIST(V) \
V(CallClosureNoSuchMethod) \
V(AllocateContext) \
V(Subtype1TestCache) \
V(Subtype2TestCache) \
V(Subtype3TestCache) \
V(Deoptimize) \
V(DeoptimizeLazy) \
V(ICCallBreakpoint) \
V(RuntimeCallBreakpoint) \
V(UnoptimizedIdenticalWithNumberCheck) \
V(OptimizedIdenticalWithNumberCheck) \
V(DebugStepCheck) \
V(MegamorphicLookup) \
#define STUB_CODE_LIST(V) \
BOOTSTRAP_STUB_CODE_LIST(V) \
REST_STUB_CODE_LIST(V)
// class StubEntry is used to describe stub methods generated in dart to
// abstract out common code executed from generated dart code.
@ -121,9 +114,6 @@ class StubCode {
// Generate all stubs which are generated on a per isolate basis as they
// have embedded objects which are isolate specific.
// Bootstrap stubs are needed before Object::Init to compile the bootstrap
// scripts.
static void InitBootstrapStubs(Isolate* isolate);
static void Init(Isolate* isolate);
static void VisitObjectPointers(ObjectPointerVisitor* visitor);
@ -185,7 +175,6 @@ class StubCode {
Assembler*, Register recv, Register cache, Register target);
private:
void GenerateBootstrapStubsFor(Isolate* isolate);
void GenerateStubsFor(Isolate* isolate);
friend class MegamorphicCacheTable;

View file

@ -1347,9 +1347,8 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
Label stepping, done_stepping;
if (FLAG_support_debugger && !optimized) {
__ Comment("Check single stepping");
uword single_step_address = reinterpret_cast<uword>(Isolate::Current()) +
Isolate::single_step_offset();
__ cmpb(Address::Absolute(single_step_address), Immediate(0));
__ LoadIsolate(EAX);
__ cmpb(Address(EAX, Isolate::single_step_offset()), Immediate(0));
__ j(NOT_EQUAL, &stepping);
__ Bind(&done_stepping);
}
@ -1649,9 +1648,8 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
uword single_step_address = reinterpret_cast<uword>(Isolate::Current()) +
Isolate::single_step_offset();
__ cmpb(Address::Absolute(single_step_address), Immediate(0));
__ LoadIsolate(EAX);
__ cmpb(Address(EAX, Isolate::single_step_offset()), Immediate(0));
__ j(NOT_EQUAL, &stepping, Assembler::kNearJump);
__ Bind(&done_stepping);
}