VM: Align implementations of assembler constant pools.

Add Support for adding patchable/non-patchable entries on ARM/MIPS like
already done for x64/ARM64.

R=vegorov@google.com

Review URL: https://codereview.chromium.org//886173003

git-svn-id: https://dart.googlecode.com/svn/branches/bleeding_edge/dart@43589 260f80e4-7a28-3924-810f-c04153c831b5
This commit is contained in:
fschneider@google.com 2015-02-09 11:47:36 +00:00
parent c767597da3
commit df94b27666
11 changed files with 173 additions and 340 deletions

View file

@ -241,4 +241,63 @@ const Code::Comments& Assembler::GetCodeComments() const {
return comments;
}
intptr_t ObjectPool::AddObject(const Object& obj, Patchability patchable) {
ASSERT(Isolate::Current() != Dart::vm_isolate());
if (object_pool_.IsNull()) {
// The object pool cannot be used in the vm isolate.
object_pool_ = GrowableObjectArray::New(Heap::kOld);
}
object_pool_.Add(obj, Heap::kOld);
patchable_pool_entries_.Add(patchable);
if (patchable == kNotPatchable) {
// The object isn't patchable. Record the index for fast lookup.
object_pool_index_table_.Insert(
ObjIndexPair(&obj, object_pool_.Length() - 1));
}
return object_pool_.Length() - 1;
}
intptr_t ObjectPool::AddExternalLabel(const ExternalLabel* label,
Patchability patchable) {
ASSERT(Isolate::Current() != Dart::vm_isolate());
const uword address = label->address();
ASSERT(Utils::IsAligned(address, 4));
// The address is stored in the object array as a RawSmi.
const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(address));
return AddObject(smi, patchable);
}
intptr_t ObjectPool::FindObject(const Object& obj, Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
// If the object is not patchable, check if we've already got it in the
// object pool.
if (patchable == kNotPatchable && !object_pool_.IsNull()) {
intptr_t idx = object_pool_index_table_.Lookup(&obj);
if (idx != ObjIndexPair::kNoIndex) {
ASSERT(patchable_pool_entries_[idx] == kNotPatchable);
return idx;
}
}
return AddObject(obj, patchable);
}
intptr_t ObjectPool::FindExternalLabel(const ExternalLabel* label,
Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
const uword address = label->address();
ASSERT(Utils::IsAligned(address, 4));
// The address is stored in the object array as a RawSmi.
const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(address));
return FindObject(smi, patchable);
}
} // namespace dart

View file

@ -9,6 +9,7 @@
#include "vm/allocation.h"
#include "vm/globals.h"
#include "vm/growable_array.h"
#include "vm/hash_map.h"
#include "vm/object.h"
namespace dart {
@ -256,6 +257,37 @@ class ObjIndexPair {
Value value_;
};
enum Patchability {
kPatchable,
kNotPatchable,
};
class ObjectPool : public ValueObject {
public:
ObjectPool() : object_pool_(GrowableObjectArray::Handle()) { }
intptr_t AddObject(const Object& obj, Patchability patchable);
intptr_t AddExternalLabel(const ExternalLabel* label,
Patchability patchable);
intptr_t FindObject(const Object& obj, Patchability patchable);
intptr_t FindExternalLabel(const ExternalLabel* label,
Patchability patchable);
const GrowableObjectArray& data() const { return object_pool_; }
private:
// Objects and jump targets.
GrowableObjectArray& object_pool_;
// Patchability of pool entries.
GrowableArray<Patchability> patchable_pool_entries_;
// Hashmap for fast lookup in object pool.
DirectChainedHashMap<ObjIndexPair> object_pool_index_table_;
};
} // namespace dart

View file

@ -1587,8 +1587,8 @@ void Assembler::LoadObject(Register rd, const Object& object, Condition cond) {
// Make sure that class CallPattern is able to decode this load from the
// object pool.
const int32_t offset =
Array::data_offset() + 4*AddObject(object) - kHeapObjectTag;
LoadWordFromPoolOffset(rd, offset, cond);
Array::element_offset(object_pool_.FindObject(object, kNotPatchable));
LoadWordFromPoolOffset(rd, offset - kHeapObjectTag, cond);
}
}
@ -2673,8 +2673,8 @@ void Assembler::BranchLinkPatchable(const ExternalLabel* label) {
// For added code robustness, use 'blx lr' in a patchable sequence and
// use 'blx ip' in a non-patchable sequence (see other BranchLink flavors).
const int32_t offset =
Array::data_offset() + 4*AddExternalLabel(label) - kHeapObjectTag;
LoadWordFromPoolOffset(LR, offset);
Array::element_offset(object_pool_.FindExternalLabel(label, kPatchable));
LoadWordFromPoolOffset(LR, offset - kHeapObjectTag);
blx(LR); // Use blx instruction so that the return branch prediction works.
}
@ -2710,22 +2710,6 @@ void Assembler::LoadPatchableImmediate(
}
void Assembler::LoadDecodableImmediate(
Register rd, int32_t value, Condition cond) {
const ARMVersion version = TargetCPUFeatures::arm_version();
if ((version == ARMv5TE) || (version == ARMv6)) {
LoadPatchableImmediate(rd, value, cond);
} else {
ASSERT(version == ARMv7);
movw(rd, Utils::Low16Bits(value), cond);
const uint16_t value_high = Utils::High16Bits(value);
if (value_high != 0) {
movt(rd, value_high, cond);
}
}
}
void Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
Operand o;
if (Operand::CanHold(value, &o)) {
@ -2733,7 +2717,7 @@ void Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
} else if (Operand::CanHold(~value, &o)) {
mvn(rd, o, cond);
} else {
LoadDecodableImmediate(rd, value, cond);
LoadPatchableImmediate(rd, value, cond);
}
}
@ -3019,7 +3003,7 @@ void Assembler::AddImmediate(Register rd, Register rn, int32_t value,
mvn(IP, o, cond);
sub(rd, rn, Operand(IP), cond);
} else {
LoadDecodableImmediate(IP, value, cond);
LoadPatchableImmediate(IP, value, cond);
add(rd, rn, Operand(IP), cond);
}
}
@ -3045,7 +3029,7 @@ void Assembler::AddImmediateSetFlags(Register rd, Register rn, int32_t value,
mvn(IP, o, cond);
subs(rd, rn, Operand(IP), cond);
} else {
LoadDecodableImmediate(IP, value, cond);
LoadPatchableImmediate(IP, value, cond);
adds(rd, rn, Operand(IP), cond);
}
}
@ -3071,7 +3055,7 @@ void Assembler::SubImmediateSetFlags(Register rd, Register rn, int32_t value,
mvn(IP, o, cond);
adds(rd, rn, Operand(IP), cond);
} else {
LoadDecodableImmediate(IP, value, cond);
LoadPatchableImmediate(IP, value, cond);
subs(rd, rn, Operand(IP), cond);
}
}
@ -3543,44 +3527,6 @@ void Assembler::Stop(const char* message) {
}
int32_t Assembler::AddObject(const Object& obj) {
ASSERT(obj.IsNotTemporaryScopedHandle());
ASSERT(obj.IsOld());
if (object_pool_.IsNull()) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
object_pool_ = GrowableObjectArray::New(Heap::kOld);
}
intptr_t index = object_pool_index_table_.Lookup(&obj);
if (index != ObjIndexPair::kNoIndex) {
return index;
}
object_pool_.Add(obj, Heap::kOld);
object_pool_index_table_.Insert(
ObjIndexPair(&obj, object_pool_.Length() - 1));
return object_pool_.Length() - 1;
}
int32_t Assembler::AddExternalLabel(const ExternalLabel* label) {
if (object_pool_.IsNull()) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
object_pool_ = GrowableObjectArray::New(Heap::kOld);
}
const word address = label->address();
ASSERT(Utils::IsAligned(address, 4));
// The address is stored in the object array as a RawSmi.
const Smi& smi = Smi::Handle(Smi::New(address >> kSmiTagShift));
// Do not reuse an existing entry, since each reference may be patched
// independently.
object_pool_.Add(smi, Heap::kOld);
return object_pool_.Length() - 1;
}
Address Assembler::ElementAddressForIntIndex(bool is_load,
bool is_external,
intptr_t cid,

View file

@ -316,7 +316,6 @@ class Assembler : public ValueObject {
public:
explicit Assembler(bool use_far_branches = false)
: buffer_(),
object_pool_(GrowableObjectArray::Handle()),
prologue_offset_(-1),
use_far_branches_(use_far_branches),
comments_(),
@ -341,7 +340,7 @@ class Assembler : public ValueObject {
ASSERT(buffer_.pointer_offsets().length() == 0); // No pointers in code.
return buffer_.pointer_offsets();
}
const GrowableObjectArray& object_pool() const { return object_pool_; }
const GrowableObjectArray& object_pool() const { return object_pool_.data(); }
bool use_far_branches() const {
return FLAG_use_far_branches || use_far_branches_;
@ -646,7 +645,6 @@ class Assembler : public ValueObject {
// Load and Store.
// These three do not clobber IP.
void LoadPatchableImmediate(Register rd, int32_t value, Condition cond = AL);
void LoadDecodableImmediate(Register rd, int32_t value, Condition cond = AL);
void LoadImmediate(Register rd, int32_t value, Condition cond = AL);
// These two may clobber IP.
void LoadSImmediate(SRegister sd, float value, Condition cond = AL);
@ -959,10 +957,7 @@ class Assembler : public ValueObject {
private:
AssemblerBuffer buffer_; // Contains position independent code.
GrowableObjectArray& object_pool_; // Objects and patchable jump targets.
// Hashmap for fast lookup in object pool.
DirectChainedHashMap<ObjIndexPair> object_pool_index_table_;
ObjectPool object_pool_; // Objects and patchable jump targets.
int32_t prologue_offset_;
@ -973,9 +968,6 @@ class Assembler : public ValueObject {
void movw(Register rd, uint16_t imm16, Condition cond = AL);
void movt(Register rd, uint16_t imm16, Condition cond = AL);
int32_t AddObject(const Object& obj);
int32_t AddExternalLabel(const ExternalLabel* label);
void BindARMv6(Label* label);
void BindARMv7(Label* label);

View file

@ -27,73 +27,61 @@ DECLARE_FLAG(bool, inline_alloc);
Assembler::Assembler(bool use_far_branches)
: buffer_(),
object_pool_(GrowableObjectArray::Handle()),
patchable_pool_entries_(),
prologue_offset_(-1),
use_far_branches_(use_far_branches),
comments_(),
allow_constant_pool_(true) {
if (Isolate::Current() != Dart::vm_isolate()) {
object_pool_ = GrowableObjectArray::New(Heap::kOld);
// These objects and labels need to be accessible through every pool-pointer
// at the same index.
object_pool_.Add(Object::null_object(), Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_index_table_.Insert(ObjIndexPair(&Object::null_object(), 0));
intptr_t index =
object_pool_.AddObject(Object::null_object(), kNotPatchable);
ASSERT(index == 0);
object_pool_.Add(Bool::True(), Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_index_table_.Insert(ObjIndexPair(&Bool::True(), 1));
index = object_pool_.AddObject(Bool::True(), kNotPatchable);
ASSERT(index == 1);
object_pool_.Add(Bool::False(), Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_index_table_.Insert(ObjIndexPair(&Bool::False(), 2));
index = object_pool_.AddObject(Bool::False(), kNotPatchable);
ASSERT(index == 2);
const Smi& vacant = Smi::Handle(Smi::New(0xfa >> kSmiTagShift));
StubCode* stub_code = Isolate::Current()->stub_code();
if (stub_code->UpdateStoreBuffer_entry() != NULL) {
FindExternalLabel(&stub_code->UpdateStoreBufferLabel(), kNotPatchable);
object_pool_.AddExternalLabel(
&stub_code->UpdateStoreBufferLabel(), kNotPatchable);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
if (stub_code->CallToRuntime_entry() != NULL) {
FindExternalLabel(&stub_code->CallToRuntimeLabel(), kNotPatchable);
object_pool_.AddExternalLabel(
&stub_code->CallToRuntimeLabel(), kNotPatchable);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
// Create fixed object pool entries for debugger stubs.
if (stub_code->ICCallBreakpoint_entry() != NULL) {
intptr_t index =
FindExternalLabel(&stub_code->ICCallBreakpointLabel(),
kNotPatchable);
object_pool_.AddExternalLabel(&stub_code->ICCallBreakpointLabel(),
kNotPatchable);
ASSERT(index == kICCallBreakpointCPIndex);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
if (stub_code->ClosureCallBreakpoint_entry() != NULL) {
intptr_t index =
FindExternalLabel(&stub_code->ClosureCallBreakpointLabel(),
kNotPatchable);
intptr_t index = object_pool_.AddExternalLabel(
&stub_code->ClosureCallBreakpointLabel(), kNotPatchable);
ASSERT(index == kClosureCallBreakpointCPIndex);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
if (stub_code->RuntimeCallBreakpoint_entry() != NULL) {
intptr_t index =
FindExternalLabel(&stub_code->RuntimeCallBreakpointLabel(),
kNotPatchable);
intptr_t index = object_pool_.AddExternalLabel(
&stub_code->RuntimeCallBreakpointLabel(), kNotPatchable);
ASSERT(index == kRuntimeCallBreakpointCPIndex);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
}
}
@ -424,59 +412,10 @@ void Assembler::LoadWordFromPoolOffsetFixed(Register dst, Register pp,
}
intptr_t Assembler::FindExternalLabel(const ExternalLabel* label,
Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
ASSERT(!object_pool_.IsNull());
const uword address = label->address();
ASSERT(Utils::IsAligned(address, 4));
// The address is stored in the object array as a RawSmi.
const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(address));
if (patchable == kNotPatchable) {
// If the call site is not patchable, we can try to re-use an existing
// entry.
return FindObject(smi, kNotPatchable);
}
// If the call is patchable, do not reuse an existing entry since each
// reference may be patched independently.
object_pool_.Add(smi, Heap::kOld);
patchable_pool_entries_.Add(patchable);
return object_pool_.Length() - 1;
}
intptr_t Assembler::FindObject(const Object& obj, Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
ASSERT(!object_pool_.IsNull());
// If the object is not patchable, check if we've already got it in the
// object pool.
if (patchable == kNotPatchable) {
intptr_t idx = object_pool_index_table_.Lookup(&obj);
if (idx != ObjIndexPair::kNoIndex) {
ASSERT(patchable_pool_entries_[idx] == kNotPatchable);
return idx;
}
}
object_pool_.Add(obj, Heap::kOld);
patchable_pool_entries_.Add(patchable);
if (patchable == kNotPatchable) {
// The object isn't patchable. Record the index for fast lookup.
object_pool_index_table_.Insert(
ObjIndexPair(&obj, object_pool_.Length() - 1));
}
return object_pool_.Length() - 1;
}
intptr_t Assembler::FindImmediate(int64_t imm) {
ASSERT(Isolate::Current() != Dart::vm_isolate());
ASSERT(!object_pool_.IsNull());
const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(imm));
return FindObject(smi, kNotPatchable);
return object_pool_.FindObject(smi, kNotPatchable);
}
@ -527,7 +466,7 @@ void Assembler::LoadExternalLabel(Register dst,
const int64_t target = static_cast<int64_t>(label->address());
if (CanLoadImmediateFromPool(target, pp)) {
const int32_t offset =
Array::element_offset(FindExternalLabel(label, patchable));
Array::element_offset(object_pool_.FindExternalLabel(label, patchable));
LoadWordFromPoolOffset(dst, pp, offset);
} else {
LoadImmediate(dst, target, kNoPP);
@ -540,7 +479,7 @@ void Assembler::LoadExternalLabelFixed(Register dst,
Patchability patchable,
Register pp) {
const int32_t offset =
Array::element_offset(FindExternalLabel(label, patchable));
Array::element_offset(object_pool_.FindExternalLabel(label, patchable));
LoadWordFromPoolOffsetFixed(dst, pp, offset);
}
@ -553,7 +492,7 @@ void Assembler::LoadIsolate(Register dst, Register pp) {
void Assembler::LoadObject(Register dst, const Object& object, Register pp) {
if (CanLoadObjectFromPool(object)) {
const int32_t offset =
Array::element_offset(FindObject(object, kNotPatchable));
Array::element_offset(object_pool_.FindObject(object, kNotPatchable));
LoadWordFromPoolOffset(dst, pp, offset);
} else {
ASSERT((Isolate::Current() == Dart::vm_isolate()) ||

View file

@ -475,7 +475,7 @@ class Assembler : public ValueObject {
ASSERT(buffer_.pointer_offsets().length() == 0); // No pointers in code.
return buffer_.pointer_offsets();
}
const GrowableObjectArray& object_pool() const { return object_pool_; }
const GrowableObjectArray& object_pool() const { return object_pool_.data(); }
bool use_far_branches() const {
return FLAG_use_far_branches || use_far_branches_;
@ -1299,11 +1299,6 @@ class Assembler : public ValueObject {
static const int kClosureCallBreakpointCPIndex = 6;
static const int kRuntimeCallBreakpointCPIndex = 7;
enum Patchability {
kPatchable,
kNotPatchable,
};
bool allow_constant_pool() const {
return allow_constant_pool_;
}
@ -1313,9 +1308,6 @@ class Assembler : public ValueObject {
void LoadWordFromPoolOffset(Register dst, Register pp, uint32_t offset);
void LoadWordFromPoolOffsetFixed(Register dst, Register pp, uint32_t offset);
intptr_t FindExternalLabel(const ExternalLabel* label,
Patchability patchable);
intptr_t FindObject(const Object& obj, Patchability patchable);
intptr_t FindImmediate(int64_t imm);
bool CanLoadObjectFromPool(const Object& object);
bool CanLoadImmediateFromPool(int64_t imm, Register pp);
@ -1425,13 +1417,7 @@ class Assembler : public ValueObject {
AssemblerBuffer buffer_; // Contains position independent code.
// Objects and patchable jump targets.
GrowableObjectArray& object_pool_;
// Patchability of pool entries.
GrowableArray<Patchability> patchable_pool_entries_;
// Hashmap for fast lookup in object pool.
DirectChainedHashMap<ObjIndexPair> object_pool_index_table_;
ObjectPool object_pool_;
int32_t prologue_offset_;

View file

@ -62,13 +62,13 @@ ASSEMBLER_TEST_GENERATE(MovImm16, assembler) {
#if defined(USING_SIMULATOR)
// ARMv7 is the default.
HostCPUFeatures::set_arm_version(ARMv6);
__ LoadDecodableImmediate(R0, 0x12345678 << 1);
__ LoadPatchableImmediate(R0, 0x12345678 << 1);
HostCPUFeatures::set_arm_version(ARMv7);
__ LoadDecodableImmediate(R1, 0x12345678);
__ LoadPatchableImmediate(R1, 0x12345678);
__ sub(R0, R0, Operand(R1));
__ bx(LR);
#else
__ LoadDecodableImmediate(R0, 0x12345678);
__ LoadPatchableImmediate(R0, 0x12345678);
__ bx(LR);
#endif
}

View file

@ -472,33 +472,12 @@ void Assembler::LoadObject(Register rd, const Object& object) {
// Make sure that class CallPattern is able to decode this load from the
// object pool.
const int32_t offset =
Array::data_offset() + 4*AddObject(object) - kHeapObjectTag;
LoadWordFromPoolOffset(rd, offset);
Array::element_offset(object_pool_.FindObject(object, kNotPatchable));
LoadWordFromPoolOffset(rd, offset - kHeapObjectTag);
}
}
int32_t Assembler::AddObject(const Object& obj) {
ASSERT(obj.IsNotTemporaryScopedHandle());
ASSERT(obj.IsOld());
if (object_pool_.IsNull()) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
object_pool_ = GrowableObjectArray::New(Heap::kOld);
}
intptr_t index = object_pool_index_table_.Lookup(&obj);
if (index != ObjIndexPair::kNoIndex) {
return index;
}
object_pool_.Add(obj, Heap::kOld);
object_pool_index_table_.Insert(
ObjIndexPair(&obj, object_pool_.Length() - 1));
return object_pool_.Length() - 1;
}
void Assembler::PushObject(const Object& object) {
ASSERT(!in_delay_slot_);
LoadObject(TMP, object);
@ -1156,23 +1135,6 @@ void Assembler::LeaveCallRuntimeFrame() {
}
int32_t Assembler::AddExternalLabel(const ExternalLabel* label) {
if (object_pool_.IsNull()) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
object_pool_ = GrowableObjectArray::New(Heap::kOld);
}
const word address = label->address();
ASSERT(Utils::IsAligned(address, 4));
// The address is stored in the object array as a RawSmi.
const Smi& smi = Smi::Handle(Smi::New(address >> kSmiTagShift));
// Do not reuse an existing entry, since each reference may be patched
// independently.
object_pool_.Add(smi, Heap::kOld);
return object_pool_.Length() - 1;
}
Address Assembler::ElementAddressForIntIndex(bool is_external,
intptr_t cid,
intptr_t index_scale,

View file

@ -236,7 +236,6 @@ class Assembler : public ValueObject {
public:
explicit Assembler(bool use_far_branches = false)
: buffer_(),
object_pool_(GrowableObjectArray::Handle()),
prologue_offset_(-1),
use_far_branches_(use_far_branches),
delay_slot_available_(false),
@ -263,7 +262,7 @@ class Assembler : public ValueObject {
const ZoneGrowableArray<intptr_t>& GetPointerOffsets() const {
return buffer_.pointer_offsets();
}
const GrowableObjectArray& object_pool() const { return object_pool_; }
const GrowableObjectArray& object_pool() const { return object_pool_.data(); }
void FinalizeInstructions(const MemoryRegion& region) {
buffer_.FinalizeInstructions(region);
}
@ -927,9 +926,9 @@ class Assembler : public ValueObject {
void BranchLinkPatchable(const ExternalLabel* label) {
ASSERT(!in_delay_slot_);
const int32_t offset =
Array::data_offset() + 4*AddExternalLabel(label) - kHeapObjectTag;
LoadWordFromPoolOffset(T9, offset);
const int32_t offset = Array::element_offset(
object_pool_.FindExternalLabel(label, kPatchable));
LoadWordFromPoolOffset(T9, offset - kHeapObjectTag);
jalr(T9);
delay_slot_available_ = false; // CodePatcher expects a nop.
}
@ -1613,10 +1612,7 @@ class Assembler : public ValueObject {
private:
AssemblerBuffer buffer_;
GrowableObjectArray& object_pool_; // Objects and patchable jump targets.
// Hashmap for fast lookup in object pool.
DirectChainedHashMap<ObjIndexPair> object_pool_index_table_;
ObjectPool object_pool_; // Objects and patchable jump targets.
intptr_t prologue_offset_;
@ -1624,9 +1620,6 @@ class Assembler : public ValueObject {
bool delay_slot_available_;
bool in_delay_slot_;
int32_t AddObject(const Object& obj);
int32_t AddExternalLabel(const ExternalLabel* label);
class CodeComment : public ZoneAllocated {
public:
CodeComment(intptr_t pc_offset, const String& comment)

View file

@ -23,8 +23,6 @@ DECLARE_FLAG(bool, inline_alloc);
Assembler::Assembler(bool use_far_branches)
: buffer_(),
object_pool_(GrowableObjectArray::Handle()),
patchable_pool_entries_(),
prologue_offset_(-1),
comments_(),
allow_constant_pool_(true) {
@ -32,66 +30,55 @@ Assembler::Assembler(bool use_far_branches)
ASSERT(!use_far_branches);
Isolate* isolate = Isolate::Current();
if (isolate != Dart::vm_isolate()) {
object_pool_ = GrowableObjectArray::New(Heap::kOld);
// These objects and labels need to be accessible through every pool-pointer
// at the same index.
object_pool_.Add(Object::null_object(), Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_index_table_.Insert(ObjIndexPair(&Object::null_object(), 0));
intptr_t index =
object_pool_.AddObject(Object::null_object(), kNotPatchable);
ASSERT(index == 0);
object_pool_.Add(Bool::True(), Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_index_table_.Insert(ObjIndexPair(&Bool::True(), 1));
index = object_pool_.AddObject(Bool::True(), kNotPatchable);
ASSERT(index == 1);
object_pool_.Add(Bool::False(), Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_index_table_.Insert(ObjIndexPair(&Bool::False(), 2));
index = object_pool_.AddObject(Bool::False(), kNotPatchable);
ASSERT(index == 2);
const Smi& vacant = Smi::Handle(Smi::New(0xfa >> kSmiTagShift));
StubCode* stub_code = isolate->stub_code();
if (stub_code->UpdateStoreBuffer_entry() != NULL) {
FindExternalLabel(&stub_code->UpdateStoreBufferLabel(), kNotPatchable);
object_pool_.AddExternalLabel(&stub_code->UpdateStoreBufferLabel(),
kNotPatchable);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
if (stub_code->CallToRuntime_entry() != NULL) {
FindExternalLabel(&stub_code->CallToRuntimeLabel(), kNotPatchable);
object_pool_.AddExternalLabel(&stub_code->CallToRuntimeLabel(),
kNotPatchable);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
// Create fixed object pool entries for debugger stubs.
if (stub_code->ICCallBreakpoint_entry() != NULL) {
intptr_t index =
FindExternalLabel(&stub_code->ICCallBreakpointLabel(),
kNotPatchable);
index = object_pool_.AddExternalLabel(
&stub_code->ICCallBreakpointLabel(), kNotPatchable);
ASSERT(index == kICCallBreakpointCPIndex);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
if (stub_code->ClosureCallBreakpoint_entry() != NULL) {
intptr_t index =
FindExternalLabel(&stub_code->ClosureCallBreakpointLabel(),
kNotPatchable);
index = object_pool_.AddExternalLabel(
&stub_code->ClosureCallBreakpointLabel(), kNotPatchable);
ASSERT(index == kClosureCallBreakpointCPIndex);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
if (stub_code->RuntimeCallBreakpoint_entry() != NULL) {
intptr_t index =
FindExternalLabel(&stub_code->RuntimeCallBreakpointLabel(),
kNotPatchable);
index = object_pool_.AddExternalLabel(
&stub_code->RuntimeCallBreakpointLabel(), kNotPatchable);
ASSERT(index == kRuntimeCallBreakpointCPIndex);
} else {
object_pool_.Add(vacant, Heap::kOld);
patchable_pool_entries_.Add(kNotPatchable);
object_pool_.AddObject(vacant, kNotPatchable);
}
}
}
@ -132,7 +119,7 @@ void Assembler::LoadExternalLabel(Register dst,
Patchability patchable,
Register pp) {
const int32_t offset =
Array::element_offset(FindExternalLabel(label, patchable));
Array::element_offset(object_pool_.FindExternalLabel(label, patchable));
LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag);
}
@ -152,7 +139,7 @@ void Assembler::CallPatchable(const ExternalLabel* label) {
ASSERT(allow_constant_pool());
intptr_t call_start = buffer_.GetPosition();
const int32_t offset =
Array::element_offset(FindExternalLabel(label, kPatchable));
Array::element_offset(object_pool_.FindExternalLabel(label, kPatchable));
call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag));
ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
}
@ -162,8 +149,8 @@ void Assembler::Call(const ExternalLabel* label, Register pp) {
if (Isolate::Current() == Dart::vm_isolate()) {
call(label);
} else {
const int32_t offset =
Array::element_offset(FindExternalLabel(label, kNotPatchable));
const int32_t offset = Array::element_offset(
object_pool_.FindExternalLabel(label, kNotPatchable));
call(Address::AddressBaseImm32(pp, offset - kHeapObjectTag));
}
}
@ -2620,7 +2607,7 @@ void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) {
ASSERT(allow_constant_pool());
intptr_t call_start = buffer_.GetPosition();
const int32_t offset =
Array::element_offset(FindExternalLabel(label, kPatchable));
Array::element_offset(object_pool_.FindExternalLabel(label, kPatchable));
// Patchable jumps always use a 32-bit immediate encoding.
jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag));
ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes);
@ -2628,8 +2615,8 @@ void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) {
void Assembler::Jmp(const ExternalLabel* label, Register pp) {
const int32_t offset =
Array::element_offset(FindExternalLabel(label, kNotPatchable));
const int32_t offset = Array::element_offset(
object_pool_.FindExternalLabel(label, kNotPatchable));
jmp(Address(pp, offset - kHeapObjectTag));
}
@ -2786,54 +2773,6 @@ void Assembler::Drop(intptr_t stack_elements, Register tmp) {
}
intptr_t Assembler::FindObject(const Object& obj, Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
ASSERT(!object_pool_.IsNull());
// If the object is not patchable, check if we've already got it in the
// object pool.
if (patchable == kNotPatchable) {
intptr_t idx = object_pool_index_table_.Lookup(&obj);
if (idx != ObjIndexPair::kNoIndex) {
ASSERT(patchable_pool_entries_[idx] == kNotPatchable);
return idx;
}
}
object_pool_.Add(obj, Heap::kOld);
patchable_pool_entries_.Add(patchable);
if (patchable == kNotPatchable) {
// The object isn't patchable. Record the index for fast lookup.
object_pool_index_table_.Insert(
ObjIndexPair(&obj, object_pool_.Length() - 1));
}
return object_pool_.Length() - 1;
}
intptr_t Assembler::FindExternalLabel(const ExternalLabel* label,
Patchability patchable) {
// The object pool cannot be used in the vm isolate.
ASSERT(Isolate::Current() != Dart::vm_isolate());
ASSERT(!object_pool_.IsNull());
const uword address = label->address();
ASSERT(Utils::IsAligned(address, 4));
// The address is stored in the object array as a RawSmi.
const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(address));
if (patchable == kNotPatchable) {
// If the call site is not patchable, we can try to re-use an existing
// entry.
return FindObject(smi, kNotPatchable);
}
// If the call is patchable, do not reuse an existing entry since each
// reference may be patched independently.
object_pool_.Add(smi, Heap::kOld);
patchable_pool_entries_.Add(patchable);
return object_pool_.Length() - 1;
}
// A set of VM objects that are present in every constant pool.
static bool IsAlwaysInConstantPool(const Object& object) {
// TODO(zra): Evaluate putting all VM heap objects into the pool.
@ -2877,7 +2816,7 @@ void Assembler::LoadIsolate(Register dst) {
void Assembler::LoadObject(Register dst, const Object& object, Register pp) {
if (CanLoadFromObjectPool(object)) {
const int32_t offset =
Array::element_offset(FindObject(object, kNotPatchable));
Array::element_offset(object_pool_.FindObject(object, kNotPatchable));
LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag);
} else {
ASSERT((Isolate::Current() == Dart::vm_isolate()) ||
@ -2912,7 +2851,7 @@ void Assembler::PushObject(const Object& object, Register pp) {
void Assembler::CompareObject(Register reg, const Object& object, Register pp) {
if (CanLoadFromObjectPool(object)) {
const int32_t offset =
Array::element_offset(FindObject(object, kNotPatchable));
Array::element_offset(object_pool_.FindObject(object, kNotPatchable));
cmpq(reg, Address(pp, offset-kHeapObjectTag));
} else {
CompareImmediate(
@ -2923,9 +2862,8 @@ void Assembler::CompareObject(Register reg, const Object& object, Register pp) {
intptr_t Assembler::FindImmediate(int64_t imm) {
ASSERT(Isolate::Current() != Dart::vm_isolate());
ASSERT(!object_pool_.IsNull());
const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(imm));
return FindObject(smi, kNotPatchable);
return object_pool_.FindObject(smi, kNotPatchable);
}

View file

@ -745,11 +745,6 @@ class Assembler : public ValueObject {
void Drop(intptr_t stack_elements, Register tmp = TMP);
enum Patchability {
kPatchable,
kNotPatchable,
};
bool allow_constant_pool() const {
return allow_constant_pool_;
}
@ -900,7 +895,7 @@ class Assembler : public ValueObject {
const ZoneGrowableArray<intptr_t>& GetPointerOffsets() const {
return buffer_.pointer_offsets();
}
const GrowableObjectArray& object_pool() const { return object_pool_; }
const GrowableObjectArray& object_pool() const { return object_pool_.data(); }
void FinalizeInstructions(const MemoryRegion& region) {
buffer_.FinalizeInstructions(region);
@ -1033,13 +1028,7 @@ class Assembler : public ValueObject {
AssemblerBuffer buffer_;
// Objects and jump targets.
GrowableObjectArray& object_pool_;
// Patchability of pool entries.
GrowableArray<Patchability> patchable_pool_entries_;
// Hashmap for fast lookup in object pool.
DirectChainedHashMap<ObjIndexPair> object_pool_index_table_;
ObjectPool object_pool_;
intptr_t prologue_offset_;
@ -1061,9 +1050,6 @@ class Assembler : public ValueObject {
GrowableArray<CodeComment*> comments_;
bool allow_constant_pool_;
intptr_t FindObject(const Object& obj, Patchability patchable);
intptr_t FindExternalLabel(const ExternalLabel* label,
Patchability patchable);
intptr_t FindImmediate(int64_t imm);
void LoadExternalLabel(Register dst,
const ExternalLabel* label,