Reland "[vm/isolates] Introduce fast isolate spawn in AOT."

This reverts commit 922ea3e9b6 in patchset 1, fix for assertion triggered in https://ci.chromium.org/b/8883214567628884960 in patchset 2, fix for deadlock around symbols table mutex in patchset 4.

Original commit description:

Speed up is achieved by sharing most of the dart code, object store
and class table between isolates in single isolate group. So
instead of bootstrapping isolate from the snapshot, isolate is
initialized by setting pointers to existing data structures already
set up for first isolate, and only few isolate-specific structures (moved
to newly introducted isolate_object_store) are created.

To allow for safe cross-isolate switchable call site, type test cache
mutations additional synchronization via RunWithStoppedMutators(that
relies on safepoints) was added.
Besides switchable call sites, no other mutation to the dart code is
done in AOT, which allows such sharing.

Bug: https://github.com/dart-lang/sdk/issues/37835
Bug: https://github.com/dart-lang/sdk/issues/36097
Change-Id: I655e337198214c9dfacbe76f7852b941b5a7e910
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/143462
Commit-Queue: Alexander Aprelev <aam@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
This commit is contained in:
Alexander Aprelev 2020-04-17 03:02:27 +00:00 committed by commit-bot@chromium.org
parent cc8cc92c52
commit 9c4a322b08
57 changed files with 1269 additions and 563 deletions

View file

@ -167,7 +167,11 @@ class SpawnIsolateTask : public ThreadPool::Task {
return;
}
#if defined(DART_PRECOMPILED_RUNTIME)
isolate = CreateWithinExistingIsolateGroupAOT(group, name, &error);
#else
isolate = CreateWithinExistingIsolateGroup(group, name, &error);
#endif
parent_isolate_->DecrementSpawnCount();
parent_isolate_ = nullptr;
if (isolate == nullptr) {

View file

@ -196,7 +196,7 @@ var tests = <IsolateTest>[
'limit': 100,
};
var result = await isolate.invokeRpcNoUpgrade('getRetainingPath', params);
expect(result['gcRootType'], 'object store');
expect(result['gcRootType'], 'isolate_object store');
expect(result['elements'].length, 0);
},
];

View file

@ -1519,7 +1519,7 @@ void ClassFinalizer::RemapClassIds(intptr_t* old_to_new_cid) {
// The [HeapIterationScope] also safepoints all threads.
HeapIterationScope his(T);
IG->class_table()->Remap(old_to_new_cid);
IG->shared_class_table()->Remap(old_to_new_cid);
IG->ForEachIsolate(
[&](Isolate* I) {
I->set_remapping_cids(true);

View file

@ -31,7 +31,8 @@ SharedClassTable::SharedClassTable()
calloc(capacity_, sizeof(RelaxedAtomic<intptr_t>))));
} else {
// Duplicate the class table from the VM isolate.
auto vm_shared_class_table = Dart::vm_isolate()->group()->class_table();
auto vm_shared_class_table =
Dart::vm_isolate()->group()->shared_class_table();
capacity_ = vm_shared_class_table->capacity_;
// Note that [calloc] will zero-initialize the memory.
RelaxedAtomic<intptr_t>* table = reinterpret_cast<RelaxedAtomic<intptr_t>*>(
@ -71,6 +72,13 @@ SharedClassTable::~SharedClassTable() {
NOT_IN_PRODUCT(free(trace_allocation_table_.load()));
}
void ClassTable::set_table(RawClass** table) {
Isolate* isolate = Isolate::Current();
ASSERT(isolate != nullptr);
table_.store(table);
isolate->set_cached_class_table_table(table);
}
ClassTable::ClassTable(SharedClassTable* shared_class_table)
: top_(kNumPredefinedCids),
capacity_(0),
@ -81,6 +89,9 @@ ClassTable::ClassTable(SharedClassTable* shared_class_table)
ASSERT(kInitialCapacity >= kNumPredefinedCids);
capacity_ = kInitialCapacity;
// Note that [calloc] will zero-initialize the memory.
// Don't use set_table because caller is supposed to set up isolates
// cached copy when constructing ClassTable. Isolate::Current might not
// be available at this point yet.
table_.store(static_cast<RawClass**>(calloc(capacity_, sizeof(RawClass*))));
} else {
// Duplicate the class table from the VM isolate.
@ -100,6 +111,9 @@ ClassTable::ClassTable(SharedClassTable* shared_class_table)
table[kDynamicCid] = vm_class_table->At(kDynamicCid);
table[kVoidCid] = vm_class_table->At(kVoidCid);
table[kNeverCid] = vm_class_table->At(kNeverCid);
// Don't use set_table because caller is supposed to set up isolates
// cached copy when constructing ClassTable. Isolate::Current might not
// be available at this point yet.
table_.store(table);
}
}
@ -226,7 +240,7 @@ void ClassTable::Grow(intptr_t new_capacity) {
new_table[i] = 0;
}
old_class_tables_->Add(old_table);
table_.store(new_table);
set_table(new_table);
capacity_ = new_capacity;
}

View file

@ -355,14 +355,6 @@ class ClassTable {
void Print();
// Used by the generated code.
static intptr_t table_offset() { return OFFSET_OF(ClassTable, table_); }
// Used by the generated code.
static intptr_t shared_class_table_offset() {
return OFFSET_OF(ClassTable, shared_class_table_);
}
#ifndef PRODUCT
// Describes layout of heap stats for code generation. See offset_extractor.cc
struct ArrayLayout {
@ -387,9 +379,11 @@ class ClassTable {
friend class MarkingWeakVisitor;
friend class Scavenger;
friend class ScavengerWeakVisitor;
friend class Dart;
friend Isolate* CreateWithinExistingIsolateGroup(IsolateGroup* group,
const char* name,
char** error);
friend class Isolate; // for table()
static const int kInitialCapacity = SharedClassTable::kInitialCapacity;
static const int kCapacityIncrement = SharedClassTable::kCapacityIncrement;
@ -397,6 +391,9 @@ class ClassTable {
void Grow(intptr_t index);
RawClass** table() { return table_.load(); }
void set_table(RawClass** table);
intptr_t top_;
intptr_t capacity_;

View file

@ -2,6 +2,8 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include <memory>
#include "vm/clustered_snapshot.h"
#include "platform/assert.h"
@ -235,7 +237,8 @@ class ClassSerializationCluster : public SerializationCluster {
UnboxedFieldBitmap CalculateTargetUnboxedFieldsBitmap(Serializer* s,
intptr_t class_id) {
const auto unboxed_fields_bitmap_host =
s->isolate()->group()->class_table()->GetUnboxedFieldsMapAt(class_id);
s->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
class_id);
UnboxedFieldBitmap unboxed_fields_bitmap;
if (unboxed_fields_bitmap_host.IsEmpty() ||
@ -353,7 +356,7 @@ class ClassDeserializationCluster : public DeserializationCluster {
}
}
auto shared_class_table = d->isolate()->group()->class_table();
auto shared_class_table = d->isolate()->group()->shared_class_table();
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id));
Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize());
@ -1205,6 +1208,10 @@ class FieldDeserializationCluster : public DeserializationCluster {
field.InitializeGuardedListLengthInObjectOffset();
}
}
Isolate* isolate = Isolate::Current();
isolate->set_saved_initial_field_table(
std::shared_ptr<FieldTable>(isolate->field_table()->Clone()));
}
};
@ -3043,7 +3050,8 @@ class InstanceSerializationCluster : public SerializationCluster {
const intptr_t next_field_offset = host_next_field_offset_in_words_
<< kWordSizeLog2;
const auto unboxed_fields_bitmap =
s->isolate()->group()->class_table()->GetUnboxedFieldsMapAt(cid_);
s->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
cid_);
intptr_t offset = Instance::NextFieldOffset();
while (offset < next_field_offset) {
// Skips unboxed fields
@ -3079,7 +3087,8 @@ class InstanceSerializationCluster : public SerializationCluster {
<< kWordSizeLog2;
const intptr_t count = objects_.length();
const auto unboxed_fields_bitmap =
s->isolate()->group()->class_table()->GetUnboxedFieldsMapAt(cid_);
s->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
cid_);
for (intptr_t i = 0; i < count; i++) {
RawInstance* instance = objects_[i];
AutoTraceObject(instance);
@ -3137,7 +3146,8 @@ class InstanceDeserializationCluster : public DeserializationCluster {
Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize);
const auto unboxed_fields_bitmap =
d->isolate()->group()->class_table()->GetUnboxedFieldsMapAt(cid_);
d->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
cid_);
for (intptr_t id = start_index_; id < stop_index_; id++) {
RawInstance* instance = reinterpret_cast<RawInstance*>(d->Ref(id));
bool is_canonical = d->Read<bool>();
@ -5542,7 +5552,7 @@ static const char* kObjectStoreFieldNames[] = {
#undef DECLARE_OBJECT_STORE_FIELD
};
void Serializer::WriteIsolateSnapshot(intptr_t num_base_objects,
void Serializer::WriteProgramSnapshot(intptr_t num_base_objects,
ObjectStore* object_store) {
NoSafepointScope no_safepoint;
@ -5553,7 +5563,7 @@ void Serializer::WriteIsolateSnapshot(intptr_t num_base_objects,
AddBaseObject(base_objects.At(i));
}
} else {
// Base objects carried over from WriteVMIsolateSnapshot.
// Base objects carried over from WriteVMSnapshot.
num_base_objects_ += num_base_objects;
next_ref_index_ += num_base_objects;
}
@ -5834,7 +5844,7 @@ void Deserializer::ReadDispatchTable() {
}
ASSERT(repeat_count == 0);
I->set_dispatch_table(table);
I->group()->set_dispatch_table(table);
#endif
}
@ -6217,7 +6227,7 @@ void Deserializer::ReadVMSnapshot() {
}
}
void Deserializer::ReadIsolateSnapshot(ObjectStore* object_store) {
void Deserializer::ReadProgramSnapshot(ObjectStore* object_store) {
Array& refs = Array::Handle();
Prepare();
@ -6255,8 +6265,8 @@ void Deserializer::ReadIsolateSnapshot(ObjectStore* object_store) {
thread()->isolate()->class_table()->CopySizesFromClassObjects();
heap_->old_space()->EvaluateAfterLoading();
#if defined(DEBUG)
Isolate* isolate = thread()->isolate();
#if defined(DEBUG)
isolate->ValidateClassTable();
isolate->heap()->Verify();
#endif
@ -6264,13 +6274,12 @@ void Deserializer::ReadIsolateSnapshot(ObjectStore* object_store) {
for (intptr_t i = 0; i < num_clusters_; i++) {
clusters_[i]->PostLoad(refs, kind_, zone_);
}
object_store->PostLoad();
isolate->isolate_object_store()->PreallocateObjects();
// Setup native resolver for bootstrap impl.
Bootstrap::SetupNativeResolver();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
FullSnapshotWriter::FullSnapshotWriter(Snapshot::Kind kind,
uint8_t** vm_snapshot_data_buffer,
@ -6345,8 +6354,8 @@ intptr_t FullSnapshotWriter::WriteVMSnapshot() {
return num_objects;
}
void FullSnapshotWriter::WriteIsolateSnapshot(intptr_t num_base_objects) {
TIMELINE_DURATION(thread(), Isolate, "WriteIsolateSnapshot");
void FullSnapshotWriter::WriteProgramSnapshot(intptr_t num_base_objects) {
TIMELINE_DURATION(thread(), Isolate, "WriteProgramSnapshot");
Serializer serializer(thread(), kind_, isolate_snapshot_data_buffer_, alloc_,
kInitialSize, isolate_image_writer_, /*vm=*/false,
@ -6365,7 +6374,7 @@ void FullSnapshotWriter::WriteIsolateSnapshot(intptr_t num_base_objects) {
serializer.WriteVersionAndFeatures(false);
// Isolate snapshot roots are:
// - the object store
serializer.WriteIsolateSnapshot(num_base_objects, object_store);
serializer.WriteProgramSnapshot(num_base_objects, object_store);
serializer.FillHeader(serializer.kind());
clustered_isolate_size_ = serializer.bytes_written();
@ -6396,7 +6405,7 @@ void FullSnapshotWriter::WriteFullSnapshot() {
}
if (isolate_snapshot_data_buffer() != NULL) {
WriteIsolateSnapshot(num_base_objects);
WriteProgramSnapshot(num_base_objects);
}
if (FLAG_print_snapshot_sizes) {
@ -6559,7 +6568,7 @@ RawApiError* FullSnapshotReader::ReadVMSnapshot() {
return ApiError::null();
}
RawApiError* FullSnapshotReader::ReadIsolateSnapshot() {
RawApiError* FullSnapshotReader::ReadProgramSnapshot() {
SnapshotHeaderReader header_reader(kind_, buffer_, size_);
intptr_t offset = 0;
char* error =
@ -6585,7 +6594,7 @@ RawApiError* FullSnapshotReader::ReadIsolateSnapshot() {
}
auto object_store = thread_->isolate()->object_store();
deserializer.ReadIsolateSnapshot(object_store);
deserializer.ReadProgramSnapshot(object_store);
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {

View file

@ -157,7 +157,7 @@ class Serializer : public ThreadStackResource {
}
intptr_t WriteVMSnapshot(const Array& symbols);
void WriteIsolateSnapshot(intptr_t num_base_objects,
void WriteProgramSnapshot(intptr_t num_base_objects,
ObjectStore* object_store);
void AddVMIsolateBaseObjects();
@ -539,7 +539,7 @@ class Deserializer : public ThreadStackResource {
// message otherwise.
RawApiError* VerifyImageAlignment();
void ReadIsolateSnapshot(ObjectStore* object_store);
void ReadProgramSnapshot(ObjectStore* object_store);
void ReadVMSnapshot();
void AddVMIsolateBaseObjects();
@ -682,7 +682,7 @@ class FullSnapshotWriter {
Isolate* isolate() const { return thread_->isolate(); }
Heap* heap() const { return isolate()->heap(); }
// Writes a full snapshot of the Isolate.
// Writes a full snapshot of the program(VM isolate, regular isolate group).
void WriteFullSnapshot();
intptr_t VmIsolateSnapshotSize() const { return vm_isolate_snapshot_size_; }
@ -692,8 +692,8 @@ class FullSnapshotWriter {
// Writes a snapshot of the VM Isolate.
intptr_t WriteVMSnapshot();
// Writes a full snapshot of a regular Dart Isolate.
void WriteIsolateSnapshot(intptr_t num_base_objects);
// Writes a full snapshot of regular Dart isolate group.
void WriteProgramSnapshot(intptr_t num_base_objects);
Thread* thread_;
Snapshot::Kind kind_;
@ -724,7 +724,7 @@ class FullSnapshotReader {
~FullSnapshotReader() {}
RawApiError* ReadVMSnapshot();
RawApiError* ReadIsolateSnapshot();
RawApiError* ReadProgramSnapshot();
private:
RawApiError* ConvertToApiError(char* message);

View file

@ -61,6 +61,11 @@ class CodePatcher : public AllStatic {
const Code& caller_code,
const Object& data,
const Code& target);
static void PatchInstanceCallAtWithMutatorsStopped(Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target);
// Return target of an unoptimized static call and its ICData object
// (calls target via a stub).
@ -78,6 +83,11 @@ class CodePatcher : public AllStatic {
const Code& caller_code,
const Object& data,
const Code& target);
static void PatchSwitchableCallAtWithMutatorsStopped(Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target);
static RawObject* GetSwitchableCallDataAt(uword return_address,
const Code& caller_code);
static RawCode* GetSwitchableCallTargetAt(uword return_address,

View file

@ -46,6 +46,19 @@ void CodePatcher::PatchInstanceCallAt(uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
auto thread = Thread::Current();
thread->isolate_group()->RunWithStoppedMutators([&]() {
PatchInstanceCallAtWithMutatorsStopped(thread, return_address, caller_code,
data, target);
});
}
void CodePatcher::PatchInstanceCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
ICCallPattern call(return_address, caller_code);
call.SetData(data);
@ -69,6 +82,20 @@ void CodePatcher::PatchSwitchableCallAt(uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
auto thread = Thread::Current();
// Ensure all threads are suspended as we update data and target pair.
thread->isolate_group()->RunWithStoppedMutators([&]() {
PatchSwitchableCallAtWithMutatorsStopped(thread, return_address,
caller_code, data, target);
});
}
void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
BareSwitchableCallPattern call(return_address, caller_code);
@ -109,10 +136,12 @@ void CodePatcher::PatchNativeCallAt(uword return_address,
const Code& code,
NativeFunction target,
const Code& trampoline) {
ASSERT(code.ContainsInstructionAt(return_address));
NativeCallPattern call(return_address, code);
call.set_target(trampoline);
call.set_native_function(target);
Thread::Current()->isolate_group()->RunWithStoppedMutators([&]() {
ASSERT(code.ContainsInstructionAt(return_address));
NativeCallPattern call(return_address, code);
call.set_target(trampoline);
call.set_native_function(target);
});
}
RawCode* CodePatcher::GetNativeCallAt(uword return_address,

View file

@ -82,6 +82,19 @@ void CodePatcher::PatchInstanceCallAt(uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
auto thread = Thread::Current();
thread->isolate_group()->RunWithStoppedMutators([&]() {
PatchInstanceCallAtWithMutatorsStopped(thread, return_address, caller_code,
data, target);
});
}
void CodePatcher::PatchInstanceCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
ICCallPattern call(return_address, caller_code);
call.SetData(data);
@ -105,6 +118,20 @@ void CodePatcher::PatchSwitchableCallAt(uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
auto thread = Thread::Current();
// Ensure all threads are suspended as we update data and target pair.
thread->isolate_group()->RunWithStoppedMutators([&]() {
PatchSwitchableCallAtWithMutatorsStopped(thread, return_address,
caller_code, data, target);
});
}
void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
BareSwitchableCallPattern call(return_address, caller_code);
@ -145,10 +172,12 @@ void CodePatcher::PatchNativeCallAt(uword return_address,
const Code& caller_code,
NativeFunction target,
const Code& trampoline) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
NativeCallPattern call(return_address, caller_code);
call.set_target(trampoline);
call.set_native_function(target);
Thread::Current()->isolate_group()->RunWithStoppedMutators([&]() {
ASSERT(caller_code.ContainsInstructionAt(return_address));
NativeCallPattern call(return_address, caller_code);
call.set_target(trampoline);
call.set_native_function(target);
});
}
RawCode* CodePatcher::GetNativeCallAt(uword return_address,

View file

@ -210,16 +210,26 @@ void CodePatcher::PatchInstanceCallAt(uword return_address,
const Object& data,
const Code& target) {
auto thread = Thread::Current();
thread->isolate_group()->RunWithStoppedMutators([&]() {
PatchInstanceCallAtWithMutatorsStopped(thread, return_address, caller_code,
data, target);
});
}
void CodePatcher::PatchInstanceCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
auto zone = thread->zone();
ASSERT(caller_code.ContainsInstructionAt(return_address));
const Instructions& instrs =
Instructions::Handle(zone, caller_code.instructions());
thread->isolate_group()->RunWithStoppedMutators([&]() {
WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
InstanceCall call(return_address);
call.set_data(data);
call.set_target(target);
});
WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
InstanceCall call(return_address);
call.set_data(data);
call.set_target(target);
}
RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
@ -243,6 +253,16 @@ void CodePatcher::PatchSwitchableCallAt(uword return_address,
UNREACHABLE();
}
void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
// Switchable instance calls only generated for precompilation.
UNREACHABLE();
}
RawCode* CodePatcher::GetSwitchableCallTargetAt(uword return_address,
const Code& caller_code) {
// Switchable instance calls only generated for precompilation.

View file

@ -445,6 +445,19 @@ void CodePatcher::PatchInstanceCallAt(uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
auto thread = Thread::Current();
thread->isolate_group()->RunWithStoppedMutators([&]() {
PatchInstanceCallAtWithMutatorsStopped(thread, return_address, caller_code,
data, target);
});
}
void CodePatcher::PatchInstanceCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
InstanceCall call(return_address, caller_code);
call.set_data(data);
@ -472,6 +485,20 @@ void CodePatcher::PatchSwitchableCallAt(uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
auto thread = Thread::Current();
// Ensure all threads are suspended as we update data and target pair.
thread->isolate_group()->RunWithStoppedMutators([&]() {
PatchSwitchableCallAtWithMutatorsStopped(thread, return_address,
caller_code, data, target);
});
}
void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
Thread* thread,
uword return_address,
const Code& caller_code,
const Object& data,
const Code& target) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
BareSwitchableCall call(return_address, caller_code);
@ -512,10 +539,12 @@ void CodePatcher::PatchNativeCallAt(uword return_address,
const Code& caller_code,
NativeFunction target,
const Code& trampoline) {
ASSERT(caller_code.ContainsInstructionAt(return_address));
NativeCall call(return_address, caller_code);
call.set_target(trampoline);
call.set_native_function(target);
Thread::Current()->isolate_group()->RunWithStoppedMutators([&]() {
ASSERT(caller_code.ContainsInstructionAt(return_address));
NativeCall call(return_address, caller_code);
call.set_target(trampoline);
call.set_native_function(target);
});
}
RawCode* CodePatcher::GetNativeCallAt(uword return_address,

View file

@ -1583,21 +1583,24 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
__ b(&not_double, NE);
__ LoadIsolate(R0);
__ LoadFromOffset(kWord, R0, R0, target::Isolate::object_store_offset());
__ LoadFromOffset(kWord, R0, R0,
target::Isolate::cached_object_store_offset());
__ LoadFromOffset(kWord, R0, R0, target::ObjectStore::double_type_offset());
__ Ret();
__ Bind(&not_double);
JumpIfNotInteger(assembler, R1, R0, &not_integer);
__ LoadIsolate(R0);
__ LoadFromOffset(kWord, R0, R0, target::Isolate::object_store_offset());
__ LoadFromOffset(kWord, R0, R0,
target::Isolate::cached_object_store_offset());
__ LoadFromOffset(kWord, R0, R0, target::ObjectStore::int_type_offset());
__ Ret();
__ Bind(&not_integer);
JumpIfNotString(assembler, R1, R0, &use_declaration_type);
__ LoadIsolate(R0);
__ LoadFromOffset(kWord, R0, R0, target::Isolate::object_store_offset());
__ LoadFromOffset(kWord, R0, R0,
target::Isolate::cached_object_store_offset());
__ LoadFromOffset(kWord, R0, R0, target::ObjectStore::string_type_offset());
__ Ret();

View file

@ -1648,21 +1648,21 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
__ b(&not_double, NE);
__ LoadIsolate(R0);
__ LoadFromOffset(R0, R0, target::Isolate::object_store_offset());
__ LoadFromOffset(R0, R0, target::Isolate::cached_object_store_offset());
__ LoadFromOffset(R0, R0, target::ObjectStore::double_type_offset());
__ ret();
__ Bind(&not_double);
JumpIfNotInteger(assembler, R1, R0, &not_integer);
__ LoadIsolate(R0);
__ LoadFromOffset(R0, R0, target::Isolate::object_store_offset());
__ LoadFromOffset(R0, R0, target::Isolate::cached_object_store_offset());
__ LoadFromOffset(R0, R0, target::ObjectStore::int_type_offset());
__ ret();
__ Bind(&not_integer);
JumpIfNotString(assembler, R1, R0, &use_declaration_type);
__ LoadIsolate(R0);
__ LoadFromOffset(R0, R0, target::Isolate::object_store_offset());
__ LoadFromOffset(R0, R0, target::Isolate::cached_object_store_offset());
__ LoadFromOffset(R0, R0, target::ObjectStore::string_type_offset());
__ ret();

View file

@ -1682,7 +1682,7 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
__ j(NOT_EQUAL, &not_double);
__ LoadIsolate(EAX);
__ movl(EAX, Address(EAX, target::Isolate::object_store_offset()));
__ movl(EAX, Address(EAX, target::Isolate::cached_object_store_offset()));
__ movl(EAX, Address(EAX, target::ObjectStore::double_type_offset()));
__ ret();
@ -1692,7 +1692,7 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
JumpIfNotInteger(assembler, EAX, &not_integer);
__ LoadIsolate(EAX);
__ movl(EAX, Address(EAX, target::Isolate::object_store_offset()));
__ movl(EAX, Address(EAX, target::Isolate::cached_object_store_offset()));
__ movl(EAX, Address(EAX, target::ObjectStore::int_type_offset()));
__ ret();
@ -1703,7 +1703,7 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
JumpIfNotString(assembler, EAX, &use_declaration_type);
__ LoadIsolate(EAX);
__ movl(EAX, Address(EAX, target::Isolate::object_store_offset()));
__ movl(EAX, Address(EAX, target::Isolate::cached_object_store_offset()));
__ movl(EAX, Address(EAX, target::ObjectStore::string_type_offset()));
__ ret();

View file

@ -1594,7 +1594,7 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
__ j(NOT_EQUAL, &not_double);
__ LoadIsolate(RAX);
__ movq(RAX, Address(RAX, target::Isolate::object_store_offset()));
__ movq(RAX, Address(RAX, target::Isolate::cached_object_store_offset()));
__ movq(RAX, Address(RAX, target::ObjectStore::double_type_offset()));
__ ret();
@ -1604,7 +1604,7 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
JumpIfNotInteger(assembler, RAX, &not_integer);
__ LoadIsolate(RAX);
__ movq(RAX, Address(RAX, target::Isolate::object_store_offset()));
__ movq(RAX, Address(RAX, target::Isolate::cached_object_store_offset()));
__ movq(RAX, Address(RAX, target::ObjectStore::int_type_offset()));
__ ret();
@ -1615,7 +1615,7 @@ void AsmIntrinsifier::ObjectRuntimeType(Assembler* assembler,
JumpIfNotString(assembler, RAX, &use_declaration_type);
__ LoadIsolate(RAX);
__ movq(RAX, Address(RAX, target::Isolate::object_store_offset()));
__ movq(RAX, Address(RAX, target::Isolate::cached_object_store_offset()));
__ movq(RAX, Address(RAX, target::ObjectStore::string_type_offset()));
__ ret();

View file

@ -2004,8 +2004,8 @@ void Assembler::LoadClassId(Register result, Register object, Condition cond) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
const intptr_t table_offset =
target::Isolate::cached_class_table_table_offset();
LoadIsolate(result);
LoadFromOffset(kWord, result, result, table_offset);
@ -3529,8 +3529,7 @@ void Assembler::LoadAllocationStatsAddress(Register dest, intptr_t cid) {
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
target::Isolate::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);

View file

@ -1155,8 +1155,8 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
const intptr_t table_offset =
target::Isolate::cached_class_table_table_offset();
LoadIsolate(result);
LoadFromOffset(result, result, table_offset);
@ -1631,8 +1631,7 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
target::Isolate::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);

View file

@ -2409,8 +2409,7 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
Address state_address(kNoRegister, 0);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
target::Isolate::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
@ -2638,8 +2637,8 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
const intptr_t table_offset =
target::Isolate::cached_class_table_table_offset();
LoadIsolate(result);
movl(result, Address(result, table_offset));
movl(result, Address(result, class_id, TIMES_4, 0));

View file

@ -1864,8 +1864,7 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
bool near_jump) {
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
target::Isolate::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
@ -2139,8 +2138,8 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
const intptr_t table_offset =
target::Isolate::cached_class_table_table_offset();
LoadIsolate(result);
movq(result, Address(result, table_offset));

View file

@ -1075,11 +1075,12 @@ class ObjectStore : public AllStatic {
class Isolate : public AllStatic {
public:
static word object_store_offset();
static word cached_object_store_offset();
static word default_tag_offset();
static word current_tag_offset();
static word user_tag_offset();
static word class_table_offset();
static word cached_class_table_table_offset();
static word shared_class_table_offset();
static word ic_miss_code_offset();
#if !defined(PRODUCT)
static word single_step_offset();
@ -1093,8 +1094,6 @@ class SharedClassTable : public AllStatic {
class ClassTable : public AllStatic {
public:
static word table_offset();
static word shared_class_table_offset();
#if !defined(PRODUCT)
static word ClassOffsetFor(intptr_t cid);
static word SharedTableOffsetFor();

View file

@ -80,9 +80,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
@ -137,12 +134,16 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 20;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 28;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 16;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 40;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 20;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 60;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 48;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 16;
static constexpr dart::compiler::target::word
@ -554,9 +555,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
@ -611,12 +609,16 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 40;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 52;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 40;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 120;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 96;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
static constexpr dart::compiler::target::word
@ -1031,9 +1033,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
@ -1088,12 +1087,16 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 20;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 28;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 16;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 40;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 20;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 60;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 48;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 16;
static constexpr dart::compiler::target::word
@ -1502,9 +1505,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
@ -1559,12 +1559,16 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 40;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 52;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 40;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 120;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 96;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
static constexpr dart::compiler::target::word
@ -1982,9 +1986,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -2037,11 +2038,15 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 20;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 28;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 16;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 40;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 20;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 32;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 16;
static constexpr dart::compiler::target::word
@ -2450,9 +2455,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -2505,11 +2507,15 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 40;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 52;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 40;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 64;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
static constexpr dart::compiler::target::word
@ -2921,9 +2927,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -2976,11 +2979,15 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 20;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 28;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 16;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 36;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 40;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 20;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 32;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 16;
static constexpr dart::compiler::target::word
@ -3386,9 +3393,6 @@ static constexpr dart::compiler::target::word Class_num_type_arguments_offset =
static constexpr dart::compiler::target::word Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -3441,11 +3445,15 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 40;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 52;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 40;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 64;
static constexpr dart::compiler::target::word
Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
static constexpr dart::compiler::target::word
@ -3867,9 +3875,6 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
AOT_ClassTable_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word AOT_ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 20;
@ -3913,18 +3918,20 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedMask = 3;
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedShift = 0;
static constexpr dart::compiler::target::word AOT_ICData_entries_offset = 12;
static constexpr dart::compiler::target::word AOT_Isolate_class_table_offset =
36;
static constexpr dart::compiler::target::word
AOT_Isolate_shared_class_table_offset = 36;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_class_table_table_offset = 40;
static constexpr dart::compiler::target::word AOT_Isolate_current_tag_offset =
20;
static constexpr dart::compiler::target::word AOT_Isolate_default_tag_offset =
24;
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
28;
static constexpr dart::compiler::target::word AOT_Isolate_object_store_offset =
32;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_object_store_offset = 32;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
60;
48;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
16;
@ -4385,9 +4392,6 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
AOT_ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word AOT_ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
@ -4431,18 +4435,20 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedMask = 3;
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedShift = 0;
static constexpr dart::compiler::target::word AOT_ICData_entries_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_class_table_offset =
72;
static constexpr dart::compiler::target::word
AOT_Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word AOT_Isolate_current_tag_offset =
40;
static constexpr dart::compiler::target::word AOT_Isolate_default_tag_offset =
48;
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word AOT_Isolate_object_store_offset =
64;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
120;
96;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
32;
@ -4909,9 +4915,6 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
AOT_ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word AOT_ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
@ -4955,18 +4958,20 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedMask = 3;
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedShift = 0;
static constexpr dart::compiler::target::word AOT_ICData_entries_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_class_table_offset =
72;
static constexpr dart::compiler::target::word
AOT_Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word AOT_Isolate_current_tag_offset =
40;
static constexpr dart::compiler::target::word AOT_Isolate_default_tag_offset =
48;
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word AOT_Isolate_object_store_offset =
64;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
120;
96;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
32;
@ -5433,9 +5438,6 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
AOT_ClassTable_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word AOT_ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 20;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 12;
@ -5477,16 +5479,18 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedMask = 3;
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedShift = 0;
static constexpr dart::compiler::target::word AOT_ICData_entries_offset = 12;
static constexpr dart::compiler::target::word AOT_Isolate_class_table_offset =
36;
static constexpr dart::compiler::target::word
AOT_Isolate_shared_class_table_offset = 36;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_class_table_table_offset = 40;
static constexpr dart::compiler::target::word AOT_Isolate_current_tag_offset =
20;
static constexpr dart::compiler::target::word AOT_Isolate_default_tag_offset =
24;
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
28;
static constexpr dart::compiler::target::word AOT_Isolate_object_store_offset =
32;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_object_store_offset = 32;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
16;
@ -5944,9 +5948,6 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
AOT_ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word AOT_ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 24;
@ -5988,16 +5989,18 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedMask = 3;
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedShift = 0;
static constexpr dart::compiler::target::word AOT_ICData_entries_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_class_table_offset =
72;
static constexpr dart::compiler::target::word
AOT_Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word AOT_Isolate_current_tag_offset =
40;
static constexpr dart::compiler::target::word AOT_Isolate_default_tag_offset =
48;
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word AOT_Isolate_object_store_offset =
64;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
32;
@ -6461,9 +6464,6 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 184;
static constexpr dart::compiler::target::word
AOT_ClassTable_shared_class_table_offset = 32;
static constexpr dart::compiler::target::word AOT_ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 24;
@ -6505,16 +6505,18 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedMask = 3;
static constexpr dart::compiler::target::word AOT_ICData_NumArgsTestedShift = 0;
static constexpr dart::compiler::target::word AOT_ICData_entries_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_class_table_offset =
72;
static constexpr dart::compiler::target::word
AOT_Isolate_shared_class_table_offset = 72;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_class_table_table_offset = 80;
static constexpr dart::compiler::target::word AOT_Isolate_current_tag_offset =
40;
static constexpr dart::compiler::target::word AOT_Isolate_default_tag_offset =
48;
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word AOT_Isolate_object_store_offset =
64;
static constexpr dart::compiler::target::word
AOT_Isolate_cached_object_store_offset = 64;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashMap_data_offset =
32;

View file

@ -66,8 +66,6 @@
FIELD(Class, num_type_arguments_offset) \
FIELD(Class, super_type_offset) \
FIELD(Class, host_type_arguments_field_offset_in_words_offset) \
FIELD(ClassTable, shared_class_table_offset) \
FIELD(ClassTable, table_offset) \
NOT_IN_PRODUCT(FIELD(SharedClassTable, class_heap_stats_table_offset)) \
FIELD(Closure, context_offset) \
FIELD(Closure, delayed_type_arguments_offset) \
@ -106,11 +104,12 @@
PRECOMP_NO_CHECK(FIELD(ICData, owner_offset)) \
PRECOMP_NO_CHECK(FIELD(ICData, state_bits_offset)) \
NOT_IN_PRECOMPILED_RUNTIME(FIELD(ICData, receivers_static_type_offset)) \
FIELD(Isolate, class_table_offset) \
FIELD(Isolate, shared_class_table_offset) \
FIELD(Isolate, cached_class_table_table_offset) \
FIELD(Isolate, current_tag_offset) \
FIELD(Isolate, default_tag_offset) \
FIELD(Isolate, ic_miss_code_offset) \
FIELD(Isolate, object_store_offset) \
FIELD(Isolate, cached_object_store_offset) \
NOT_IN_PRODUCT(FIELD(Isolate, single_step_offset)) \
FIELD(Isolate, user_tag_offset) \
FIELD(LinkedHashMap, data_offset) \

View file

@ -248,7 +248,9 @@ char* Dart::Init(const uint8_t* vm_isolate_snapshot,
std::unique_ptr<IsolateGroupSource> source(
new IsolateGroupSource(nullptr, kVmIsolateName, vm_isolate_snapshot,
instructions_snapshot, nullptr, -1, api_flags));
auto group = new IsolateGroup(std::move(source), /*embedder_data=*/nullptr);
// ObjectStore should be created later, after null objects are initialized.
auto group = new IsolateGroup(std::move(source), /*embedder_data=*/nullptr,
/*object_store=*/nullptr);
group->CreateHeap(/*is_vm_isolate=*/true,
/*is_service_or_kernel_isolate=*/false);
IsolateGroup::RegisterIsolateGroup(group);
@ -265,7 +267,8 @@ char* Dart::Init(const uint8_t* vm_isolate_snapshot,
StackZone zone(T);
HandleScope handle_scope(T);
Object::InitNullAndBool(vm_isolate_);
ObjectStore::Init(vm_isolate_);
vm_isolate_->set_object_store(new ObjectStore());
vm_isolate_->isolate_object_store()->Init();
TargetCPUFeatures::Init();
Object::Init(vm_isolate_);
ArgumentsDescriptor::Init();
@ -653,25 +656,35 @@ static bool IsSnapshotCompatible(Snapshot::Kind vm_kind,
return Snapshot::IsFull(isolate_kind);
}
RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
const uint8_t* snapshot_instructions,
const uint8_t* kernel_buffer,
intptr_t kernel_buffer_size,
void* isolate_data) {
// Initialize the new isolate.
Thread* T = Thread::Current();
Isolate* I = T->isolate();
#if defined(SUPPORT_TIMELINE)
TimelineBeginEndScope tbes(T, Timeline::GetIsolateStream(),
"InitializeIsolate");
tbes.SetNumArguments(1);
tbes.CopyArgument(0, "isolateName", I->name());
#endif
ASSERT(I != NULL);
StackZone zone(T);
HandleScope handle_scope(T);
ObjectStore::Init(I);
#if defined(DART_PRECOMPILED_RUNTIME)
static bool CloneIntoChildIsolateAOT(Thread* T,
Isolate* I,
IsolateGroup* source_isolate_group) {
// In AOT we speed up isolate spawning by copying donor's isolate structure.
Isolate* donor_isolate = source_isolate_group != nullptr
? source_isolate_group->FirstIsolate()
: nullptr;
if (donor_isolate == nullptr) {
return false;
}
I->isolate_object_store()->Init();
I->isolate_object_store()->PreallocateObjects();
// Initialize field_table with initial values.
I->set_field_table(T, donor_isolate->saved_initial_field_table()->Clone());
I->set_saved_initial_field_table(
donor_isolate->saved_initial_field_table_shareable());
ReversePcLookupCache::BuildAndAttachToIsolate(I);
return true;
}
#endif
RawError* Dart::InitIsolateFromSnapshot(Thread* T,
Isolate* I,
const uint8_t* snapshot_data,
const uint8_t* snapshot_instructions,
const uint8_t* kernel_buffer,
intptr_t kernel_buffer_size) {
Error& error = Error::Handle(T->zone());
error = Object::Init(I, kernel_buffer, kernel_buffer_size);
if (!error.IsNull()) {
@ -681,8 +694,8 @@ RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
// Read the snapshot and setup the initial state.
#if defined(SUPPORT_TIMELINE)
TimelineBeginEndScope tbes(T, Timeline::GetIsolateStream(),
"ReadIsolateSnapshot");
#endif
"ReadProgramSnapshot");
#endif // defined(SUPPORT_TIMELINE)
// TODO(turnidge): Remove once length is not part of the snapshot.
const Snapshot* snapshot = Snapshot::SetupFromBuffer(snapshot_data);
if (snapshot == NULL) {
@ -700,7 +713,7 @@ RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
OS::PrintErr("Size of isolate snapshot = %" Pd "\n", snapshot->length());
}
FullSnapshotReader reader(snapshot, snapshot_instructions, T);
const Error& error = Error::Handle(reader.ReadIsolateSnapshot());
const Error& error = Error::Handle(reader.ReadProgramSnapshot());
if (!error.IsNull()) {
return error.raw();
}
@ -714,7 +727,7 @@ RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
tbes.FormatArgument(1, "heapSize", "%" Pd64,
I->heap()->UsedInWords(Heap::kOld) * kWordSize);
}
#endif // !defined(PRODUCT)
#endif // defined(SUPPORT_TIMELINE)
if (FLAG_trace_isolates) {
I->heap()->PrintSizes();
MegamorphicCacheTable::PrintSizes(I);
@ -727,6 +740,89 @@ RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
}
}
return Error::null();
}
#if defined(DART_PRECOMPILED_RUNTIME)
static void PrintLLVMConstantPool(Thread* T, Isolate* I) {
StackZone printing_zone(T);
HandleScope printing_scope(T);
TextBuffer b(1000);
const auto& constants =
GrowableObjectArray::Handle(I->object_store()->llvm_constant_pool());
if (constants.IsNull()) {
b.AddString("No constant pool information in snapshot.\n\n");
} else {
auto const len = constants.Length();
b.Printf("Constant pool contents (length %" Pd "):\n", len);
auto& obj = Object::Handle();
for (intptr_t i = 0; i < len; i++) {
obj = constants.At(i);
b.Printf(" %5" Pd ": ", i);
if (obj.IsString()) {
b.AddChar('"');
b.AddEscapedString(obj.ToCString());
b.AddChar('"');
} else {
b.AddString(obj.ToCString());
}
b.AddChar('\n');
}
b.AddString("End of constant pool.\n\n");
}
const auto& functions =
GrowableObjectArray::Handle(I->object_store()->llvm_function_pool());
if (functions.IsNull()) {
b.AddString("No function pool information in snapshot.\n\n");
} else {
auto const len = functions.Length();
b.Printf("Function pool contents (length %" Pd "):\n", len);
auto& obj = Function::Handle();
for (intptr_t i = 0; i < len; i++) {
obj ^= functions.At(i);
ASSERT(!obj.IsNull());
b.Printf(" %5" Pd ": %s\n", i, obj.ToFullyQualifiedCString());
}
b.AddString("End of function pool.\n\n");
}
THR_Print("%s", b.buf());
}
#endif
RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
const uint8_t* snapshot_instructions,
const uint8_t* kernel_buffer,
intptr_t kernel_buffer_size,
IsolateGroup* source_isolate_group,
void* isolate_data) {
// Initialize the new isolate.
Thread* T = Thread::Current();
Isolate* I = T->isolate();
#if defined(SUPPORT_TIMELINE)
TimelineBeginEndScope tbes(T, Timeline::GetIsolateStream(),
"InitializeIsolate");
tbes.SetNumArguments(1);
tbes.CopyArgument(0, "isolateName", I->name());
#endif
ASSERT(I != NULL);
StackZone zone(T);
HandleScope handle_scope(T);
bool was_child_cloned_into_existing_isolate = false;
#if defined(DART_PRECOMPILED_RUNTIME)
if (CloneIntoChildIsolateAOT(T, I, source_isolate_group)) {
was_child_cloned_into_existing_isolate = true;
} else {
#endif
const Error& error = Error::Handle(
InitIsolateFromSnapshot(T, I, snapshot_data, snapshot_instructions,
kernel_buffer, kernel_buffer_size));
if (!error.IsNull()) {
return error.raw();
}
#if defined(DART_PRECOMPILED_RUNTIME)
}
#endif
Object::VerifyBuiltinVtables();
DEBUG_ONLY(I->heap()->Verify(kForbidMarked));
@ -735,47 +831,7 @@ RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
ASSERT(I->object_store()->megamorphic_call_miss_code() != Code::null());
ASSERT(I->object_store()->build_method_extractor_code() != Code::null());
if (FLAG_print_llvm_constant_pool) {
StackZone printing_zone(T);
HandleScope printing_scope(T);
TextBuffer b(1000);
const auto& constants =
GrowableObjectArray::Handle(I->object_store()->llvm_constant_pool());
if (constants.IsNull()) {
b.AddString("No constant pool information in snapshot.\n\n");
} else {
auto const len = constants.Length();
b.Printf("Constant pool contents (length %" Pd "):\n", len);
auto& obj = Object::Handle();
for (intptr_t i = 0; i < len; i++) {
obj = constants.At(i);
b.Printf(" %5" Pd ": ", i);
if (obj.IsString()) {
b.AddChar('"');
b.AddEscapedString(obj.ToCString());
b.AddChar('"');
} else {
b.AddString(obj.ToCString());
}
b.AddChar('\n');
}
b.AddString("End of constant pool.\n\n");
}
const auto& functions =
GrowableObjectArray::Handle(I->object_store()->llvm_function_pool());
if (functions.IsNull()) {
b.AddString("No function pool information in snapshot.\n\n");
} else {
auto const len = functions.Length();
b.Printf("Function pool contents (length %" Pd "):\n", len);
auto& obj = Function::Handle();
for (intptr_t i = 0; i < len; i++) {
obj ^= functions.At(i);
ASSERT(!obj.IsNull());
b.Printf(" %5" Pd ": %s\n", i, obj.ToFullyQualifiedCString());
}
b.AddString("End of function pool.\n\n");
}
THR_Print("%s", b.buf());
PrintLLVMConstantPool(T, I);
}
#else
// JIT: The megamorphic call miss function and code come from the snapshot in
@ -794,13 +850,20 @@ RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
I->set_ic_miss_code(StubCode::SwitchableCallMiss());
if ((snapshot_data == NULL) || (kernel_buffer != NULL)) {
const Error& error = Error::Handle(I->object_store()->PreallocateObjects());
Error& error = Error::Handle();
error ^= I->object_store()->PreallocateObjects();
if (!error.IsNull()) {
return error.raw();
}
error ^= I->isolate_object_store()->PreallocateObjects();
if (!error.IsNull()) {
return error.raw();
}
}
I->heap()->InitGrowthControl();
if (!was_child_cloned_into_existing_isolate) {
I->heap()->InitGrowthControl();
}
I->set_init_callback_data(isolate_data);
if (FLAG_print_class_table) {
I->class_table()->Print();

View file

@ -59,7 +59,14 @@ class Dart : public AllStatic {
const uint8_t* snapshot_instructions,
const uint8_t* kernel_buffer,
intptr_t kernel_buffer_size,
IsolateGroup* source_isolate_group,
void* data);
static RawError* InitIsolateFromSnapshot(Thread* T,
Isolate* I,
const uint8_t* snapshot_data,
const uint8_t* snapshot_instructions,
const uint8_t* kernel_buffer,
intptr_t kernel_buffer_size);
static void RunShutdownCallback();
static void ShutdownIsolate(Isolate* isolate);
static void ShutdownIsolate();

View file

@ -1142,10 +1142,10 @@ static Dart_Isolate CreateIsolate(IsolateGroup* group,
// Api Handles when an error is encountered.
T->EnterApiScope();
const Error& error_obj = Error::Handle(
Z, Dart::InitializeIsolate(source->snapshot_data,
source->snapshot_instructions,
source->kernel_buffer,
source->kernel_buffer_size, isolate_data));
Z, Dart::InitializeIsolate(
source->snapshot_data, source->snapshot_instructions,
source->kernel_buffer, source->kernel_buffer_size,
is_new_group ? nullptr : group, isolate_data));
if (error_obj.IsNull()) {
#if defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_check_function_fingerprints && source->kernel_buffer == NULL) {
@ -1194,9 +1194,33 @@ static bool IsServiceOrKernelIsolateName(const char* name) {
return false;
}
Isolate* CreateWithinExistingIsolateGroupAOT(IsolateGroup* group,
const char* name,
char** error) {
#if defined(DART_PRECOMPILED_RUNTIME)
API_TIMELINE_DURATION(Thread::Current());
CHECK_NO_ISOLATE(Isolate::Current());
auto spawning_group = group;
Isolate* isolate = reinterpret_cast<Isolate*>(
CreateIsolate(spawning_group, /*is_new_group=*/false, name,
/*isolate_data=*/nullptr, error));
if (isolate == nullptr) return nullptr;
auto source = spawning_group->source();
ASSERT(isolate->source() == source);
return isolate;
#else
UNREACHABLE();
#endif
}
Isolate* CreateWithinExistingIsolateGroup(IsolateGroup* group,
const char* name,
char** error) {
#if !defined(DART_PRECOMPILED_RUNTIME)
API_TIMELINE_DURATION(Thread::Current());
CHECK_NO_ISOLATE(Isolate::Current());
@ -1219,9 +1243,6 @@ Isolate* CreateWithinExistingIsolateGroup(IsolateGroup* group,
ASSERT(isolate->source() == source);
if (source->script_kernel_buffer != nullptr) {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
Dart_EnterScope();
{
Thread* T = Thread::Current();
@ -1251,7 +1272,6 @@ Isolate* CreateWithinExistingIsolateGroup(IsolateGroup* group,
isolate->object_store()->set_root_library(Library::Cast(tmp));
}
Dart_ExitScope();
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
// If we are running in AppJIT training mode we'll have to remap class ids.
@ -1322,7 +1342,9 @@ Isolate* CreateWithinExistingIsolateGroup(IsolateGroup* group,
isolate->isolate_group_ = group;
group->RegisterIsolateLocked(isolate);
isolate->class_table()->shared_class_table_ = group->class_table();
isolate->class_table()->shared_class_table_ =
group->shared_class_table();
isolate->set_shared_class_table(group->shared_class_table());
// Even though the mutator thread was descheduled, it will still
// retain its [Thread] structure with valid isolate/isolate_group
@ -1354,6 +1376,9 @@ Isolate* CreateWithinExistingIsolateGroup(IsolateGroup* group,
ASSERT(Thread::Current()->isolate_group() == isolate->group());
return isolate;
#else
UNREACHABLE();
#endif
}
DART_EXPORT void Dart_IsolateFlagsInitialize(Dart_IsolateFlags* flags) {

View file

@ -353,6 +353,9 @@ class IsolateGroupSource;
Isolate* CreateWithinExistingIsolateGroup(IsolateGroup* group,
const char* name,
char** error);
Isolate* CreateWithinExistingIsolateGroupAOT(IsolateGroup* group,
const char* name,
char** error);
} // namespace dart.

View file

@ -69,8 +69,9 @@ class PreallocatedStackTraceBuilder : public StackTraceBuilder {
: stacktrace_(StackTrace::Cast(stacktrace)),
cur_index_(0),
dropped_frames_(0) {
ASSERT(stacktrace_.raw() ==
Isolate::Current()->object_store()->preallocated_stack_trace());
ASSERT(
stacktrace_.raw() ==
Isolate::Current()->isolate_object_store()->preallocated_stack_trace());
}
~PreallocatedStackTraceBuilder() {}
@ -815,11 +816,12 @@ static void ThrowExceptionHelper(Thread* thread,
ASSERT(incoming_exception.raw() ==
isolate->object_store()->out_of_memory());
const UnhandledException& error = UnhandledException::Handle(
zone, isolate->object_store()->preallocated_unhandled_exception());
zone,
isolate->isolate_object_store()->preallocated_unhandled_exception());
thread->long_jump_base()->Jump(1, error);
UNREACHABLE();
}
stacktrace = isolate->object_store()->preallocated_stack_trace();
stacktrace = isolate->isolate_object_store()->preallocated_stack_trace();
PreallocatedStackTraceBuilder frame_builder(stacktrace);
ASSERT(existing_stacktrace.IsNull() ||
(existing_stacktrace.raw() == stacktrace.raw()));

View file

@ -95,7 +95,24 @@ void FieldTable::Grow(intptr_t new_capacity) {
Thread::Current()->field_table_values_ = table_;
}
FieldTable* FieldTable::Clone() {
FieldTable* clone = new FieldTable();
auto new_table = static_cast<RawInstance**>(
malloc(capacity_ * sizeof(RawInstance*))); // NOLINT
memmove(new_table, table_, top_ * sizeof(RawInstance*));
ASSERT(clone->table_ == nullptr);
clone->table_ = new_table;
clone->capacity_ = capacity_;
clone->top_ = top_;
return clone;
}
void FieldTable::VisitObjectPointers(ObjectPointerVisitor* visitor) {
// GC might try to visit field table before it's isolate done setting it up.
if (table_ == nullptr) {
return;
}
ASSERT(visitor != NULL);
visitor->set_gc_root_type("static fields table");
visitor->VisitPointers(reinterpret_cast<RawObject**>(&table_[0]),

View file

@ -55,6 +55,8 @@ class FieldTable {
}
void SetAt(intptr_t index, RawInstance* raw_instance);
FieldTable* Clone();
void VisitObjectPointers(ObjectPointerVisitor* visitor);
static const int kInitialCapacity = 512;

View file

@ -207,6 +207,7 @@ class Handles {
friend class HandleScope;
friend class Dart;
friend class IsolateObjectStore;
friend class ObjectStore;
friend class ThreadState;
DISALLOW_ALLOCATION();

View file

@ -87,15 +87,16 @@ uword Heap::AllocateNew(intptr_t size) {
if (LIKELY(addr != 0)) {
return addr;
}
if (new_space_.GrowthControlState()) {
// This call to CollectGarbage might end up "reusing" a collection spawned
// from a different thread and will be racing to allocate the requested
// memory with other threads being released after the collection.
CollectGarbage(kNew);
// This call to CollectGarbage might end up "reusing" a collection spawned
// from a different thread and will be racing to allocate the requested
// memory with other threads being released after the collection.
CollectGarbage(kNew);
addr = new_space_.TryAllocate(thread, size);
if (LIKELY(addr != 0)) {
return addr;
addr = new_space_.TryAllocate(thread, size);
if (LIKELY(addr != 0)) {
return addr;
}
}
// It is possible a GC doesn't clear enough space.
@ -651,14 +652,17 @@ void Heap::UpdateGlobalMaxUsed() {
}
void Heap::InitGrowthControl() {
new_space_.InitGrowthControl();
old_space_.InitGrowthControl();
}
void Heap::SetGrowthControlState(bool state) {
new_space_.SetGrowthControlState(state);
old_space_.SetGrowthControlState(state);
}
bool Heap::GrowthControlState() {
ASSERT(new_space_.GrowthControlState() == old_space_.GrowthControlState());
return old_space_.GrowthControlState();
}
@ -720,6 +724,11 @@ void Heap::MergeOtherHeap(Heap* other) {
void Heap::CollectForDebugging() {
if (gc_on_nth_allocation_ == kNoForcedGarbageCollection) return;
if (Thread::Current()->IsAtSafepoint()) {
// CollectAllGarbage is not supported when we are at a safepoint.
// Allocating when at a safepoint is not a common case.
return;
}
gc_on_nth_allocation_--;
if (gc_on_nth_allocation_ == 0) {
CollectAllGarbage(kDebugging);

View file

@ -292,7 +292,7 @@ class MarkingWeakVisitor : public HandleVisitor {
public:
explicit MarkingWeakVisitor(Thread* thread)
: HandleVisitor(thread),
class_table_(thread->isolate_group()->class_table()) {}
class_table_(thread->isolate_group()->shared_class_table()) {}
void VisitHandle(uword addr) {
FinalizablePersistentHandle* handle =

View file

@ -1113,7 +1113,7 @@ void PageSpace::CollectGarbageAtSafepoint(bool compact,
const int64_t start = OS::GetCurrentMonotonicMicros();
// Perform various cleanup that relies on no tasks interfering.
isolate_group->class_table()->FreeOldTables();
isolate_group->shared_class_table()->FreeOldTables();
isolate_group->ForEachIsolate(
[&](Isolate* isolate) { isolate->field_table()->FreeOldTables(); },
/*at_safepoint=*/true);

View file

@ -51,6 +51,8 @@ class SafepointHandler {
void BlockForSafepoint(Thread* T);
bool IsOwnedByTheThread(Thread* thread) { return owner_ == thread; }
private:
void SafepointThreads(Thread* T);
void ResumeThreads(Thread* T);

View file

@ -449,7 +449,7 @@ class ScavengerWeakVisitor : public HandleVisitor {
ScavengerWeakVisitor(Thread* thread, Scavenger* scavenger)
: HandleVisitor(thread),
scavenger_(scavenger),
class_table_(thread->isolate_group()->class_table()) {
class_table_(thread->isolate_group()->shared_class_table()) {
ASSERT(scavenger->heap_->isolate_group() == thread->isolate_group());
}

View file

@ -202,6 +202,16 @@ class Scavenger {
void MakeNewSpaceIterable() const;
int64_t FreeSpaceInWords(Isolate* isolate) const;
void InitGrowthControl() {
growth_control_ = true;
}
void SetGrowthControlState(bool state) {
growth_control_ = state;
}
bool GrowthControlState() { return growth_control_; }
bool scavenging() const { return scavenging_; }
private:
@ -313,6 +323,8 @@ class Scavenger {
bool failed_to_promote_;
bool growth_control_;
// Protects new space during the allocation of new TLABs
mutable Mutex space_lock_;

View file

@ -135,7 +135,7 @@ void WeakTable::Rehash() {
void WeakTable::MergeOtherWeakTable(WeakTable* other) {
for (intptr_t i = 0; i < other->size(); i++) {
if (other->IsValidEntryAtExclusive(i)) {
SetValue(other->ObjectAtExclusive(i), ValueIndex(i));
SetValueExclusive(other->ObjectAtExclusive(i), ValueIndex(i));
}
}
}

View file

@ -273,7 +273,7 @@ DART_FORCE_INLINE static bool TryAllocate(Thread* thread,
const uword start = thread->top();
#ifndef PRODUCT
auto table = thread->isolate_group()->class_table();
auto table = thread->isolate_group()->shared_class_table();
if (UNLIKELY(table->TraceAllocationFor(class_id))) {
return false;
}

View file

@ -108,20 +108,20 @@ class IntrusiveDListEntry {
prev_ = nullptr;
}
bool IsEmpty() {
bool IsEmpty() const {
bool result = next_ == this;
ASSERT(result == (prev_ == this));
return result;
}
bool IsLinked() {
bool IsLinked() const {
ASSERT((next_ == nullptr) == (prev_ == nullptr));
return next_ != nullptr;
}
IntrusiveDListEntry<T, N>* Prev() { return prev_; }
IntrusiveDListEntry<T, N>* Prev() const { return prev_; }
IntrusiveDListEntry<T, N>* Next() { return next_; }
IntrusiveDListEntry<T, N>* Next() const { return next_; }
friend class IntrusiveDList<T, N>;
@ -143,9 +143,9 @@ class IntrusiveDList {
IntrusiveDListEntry<ContainerType, I>* entry)
: head_(head), entry_(entry) {}
inline ContainerType* operator->() { return entry_->container(); }
inline ContainerType* operator->() const { return entry_->container(); }
inline ContainerType* operator*() { return entry_->container(); }
inline ContainerType* operator*() const { return entry_->container(); }
inline bool operator==(const Iterator<ContainerType, I>& other) const {
return entry_ == other.entry_;
@ -180,18 +180,18 @@ class IntrusiveDList {
// NOTE: This function only checks whether [a] is linked inside *a*
// [IntrusiveDList].
inline bool IsInList(T* a) { return convert(a)->IsLinked(); }
inline bool IsInList(T* a) const { return convert(a)->IsLinked(); }
inline void Remove(T* a) { convert(a)->Remove(); }
inline bool IsEmpty() { return head_.IsEmpty(); }
inline bool IsEmpty() const { return head_.IsEmpty(); }
inline T* First() {
inline T* First() const {
ASSERT(!IsEmpty());
return head_.Next()->container();
}
inline T* Last() {
inline T* Last() const {
ASSERT(!IsEmpty());
return head_.Prev()->container();
}
@ -230,7 +230,7 @@ class IntrusiveDList {
private:
Entry head_;
Entry* convert(T* entry) { return static_cast<Entry*>(entry); }
Entry* convert(T* entry) const { return static_cast<Entry*>(entry); }
};
} // namespace dart.

View file

@ -221,7 +221,8 @@ class FinalizeWeakPersistentHandlesVisitor : public HandleVisitor {
};
IsolateGroup::IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
void* embedder_data)
void* embedder_data,
ObjectStore* object_store)
: embedder_data_(embedder_data),
isolates_lock_(new SafepointRwLock()),
isolates_(),
@ -234,14 +235,37 @@ IsolateGroup::IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
thread_registry_(new ThreadRegistry()),
safepoint_handler_(new SafepointHandler(this)),
shared_class_table_(new SharedClassTable()),
object_store_(object_store),
#if defined(DART_PRECOMPILED_RUNTIME)
class_table_(new ClassTable(shared_class_table_.get())),
#else
class_table_(nullptr),
#endif
symbols_lock_(new SafepointRwLock()),
store_buffer_(new StoreBuffer()),
heap_(nullptr) {
heap_(nullptr),
saved_unlinked_calls_(Array::null()) {
{
WriteRwLocker wl(ThreadState::Current(), isolate_groups_rwlock_);
id_ = isolate_group_random_->NextUInt64();
}
}
IsolateGroup::IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
void* embedder_data)
: IsolateGroup(source,
embedder_data,
#if !defined(DART_PRECOMPILED_RUNTIME)
// in JIT, with --enable_isolate_groups keep object store
// on isolate, rather than on isolate group
FLAG_enable_isolate_groups ? nullptr :
#endif
new ObjectStore()) {
if (object_store() != nullptr) {
object_store()->InitStubs();
}
}
IsolateGroup::~IsolateGroup() {
// Finalize any weak persistent handles with a non-null referent.
FinalizeWeakPersistentHandlesVisitor visitor(this);
@ -337,6 +361,10 @@ void IsolateGroup::set_heap(std::unique_ptr<Heap> heap) {
heap_ = std::move(heap);
}
void IsolateGroup::set_saved_unlinked_calls(const Array& saved_unlinked_calls) {
saved_unlinked_calls_ = saved_unlinked_calls.raw();
}
Thread* IsolateGroup::ScheduleThreadLocked(MonitorLocker* ml,
Thread* existing_mutator_thread,
bool is_vm_isolate,
@ -694,6 +722,13 @@ void Isolate::SendInternalLibMessage(LibMsgId msg_id, uint64_t capability) {
writer.WriteMessage(msg, main_port(), Message::kOOBPriority));
}
void Isolate::set_object_store(ObjectStore* object_store) {
ASSERT(cached_object_store_ == nullptr);
object_store_shared_ptr_.reset(object_store);
cached_object_store_ = object_store;
isolate_object_store_->set_object_store(object_store);
}
class IsolateMessageHandler : public MessageHandler {
public:
explicit IsolateMessageHandler(Isolate* isolate);
@ -1329,9 +1364,17 @@ Isolate::Isolate(IsolateGroup* isolate_group,
current_tag_(UserTag::null()),
default_tag_(UserTag::null()),
ic_miss_code_(Code::null()),
class_table_(isolate_group->class_table()),
shared_class_table_(isolate_group->shared_class_table()),
field_table_(new FieldTable()),
isolate_group_(isolate_group),
isolate_object_store_(
new IsolateObjectStore(isolate_group->object_store())),
object_store_shared_ptr_(isolate_group->object_store_shared_ptr()),
#if defined(DART_PRECOMPILED_RUNTIME)
class_table_(isolate_group->class_table_shared_ptr()),
#else
class_table_(new ClassTable(shared_class_table_)),
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
native_callback_trampolines_(),
#endif
@ -1352,7 +1395,7 @@ Isolate::Isolate(IsolateGroup* isolate_group,
start_time_micros_(OS::GetCurrentMonotonicMicros()),
random_(),
mutex_(NOT_IN_PRODUCT("Isolate::mutex_")),
symbols_mutex_(NOT_IN_PRODUCT("Isolate::symbols_mutex_")),
symbols_lock_(new SafepointRwLock()),
type_canonicalization_mutex_(
NOT_IN_PRODUCT("Isolate::type_canonicalization_mutex_")),
constant_canonicalization_mutex_(
@ -1373,6 +1416,8 @@ Isolate::Isolate(IsolateGroup* isolate_group,
spawn_count_monitor_(),
handler_info_cache_(),
catch_entry_moves_cache_() {
cached_object_store_ = object_store_shared_ptr_.get();
cached_class_table_table_ = class_table_->table();
FlagsCopyFrom(api_flags);
SetErrorsFatal(true);
// TODO(asiva): A Thread is not available here, need to figure out
@ -1407,9 +1452,6 @@ Isolate::~Isolate() {
delete reverse_pc_lookup_cache_;
reverse_pc_lookup_cache_ = nullptr;
delete dispatch_table_;
dispatch_table_ = nullptr;
if (FLAG_enable_interpreter) {
delete background_compiler_;
background_compiler_ = nullptr;
@ -1428,7 +1470,6 @@ Isolate::~Isolate() {
#endif // !defined(PRODUCT)
free(name_);
delete object_store_;
delete field_table_;
#if defined(USING_SIMULATOR)
delete simulator_;
@ -1485,6 +1526,21 @@ Isolate* Isolate::InitIsolate(const char* name_prefix,
bool is_vm_isolate) {
Isolate* result = new Isolate(isolate_group, api_flags);
result->BuildName(name_prefix);
if (!is_vm_isolate) {
// vm isolate object store is initialized later, after null instance
// is created (in Dart::Init).
// Non-vm isolates need to have isolate object store initialized is that
// exit_listeners have to be null-initialized as they will be used if
// we fail to create isolate below, have to do low level shutdown.
if (result->object_store() == nullptr) {
// in JIT with --enable-isolate-groups each isolate still
// has to have its own object store
result->set_object_store(new ObjectStore());
result->object_store()->InitStubs();
}
result->isolate_object_store()->Init();
}
ASSERT(result != nullptr);
#if !defined(PRODUCT)
@ -1608,6 +1664,17 @@ int64_t Isolate::UptimeMicros() const {
return OS::GetCurrentMonotonicMicros() - start_time_micros_;
}
Dart_Port Isolate::origin_id() {
MutexLocker ml(&origin_id_mutex_);
return origin_id_;
}
void Isolate::set_origin_id(Dart_Port id) {
MutexLocker ml(&origin_id_mutex_);
ASSERT((id == main_port_ && origin_id_ == 0) || (origin_id_ == main_port_));
origin_id_ = id;
}
bool Isolate::IsPaused() const {
#if defined(PRODUCT)
return false;
@ -1664,7 +1731,7 @@ bool IsolateGroup::ReloadSources(JSONStream* js,
RELEASE_ASSERT(isolates_.First() == isolates_.Last());
RELEASE_ASSERT(isolates_.First() == Isolate::Current());
auto shared_class_table = IsolateGroup::Current()->class_table();
auto shared_class_table = IsolateGroup::Current()->shared_class_table();
std::shared_ptr<IsolateGroupReloadContext> group_reload_context(
new IsolateGroupReloadContext(this, shared_class_table, js));
group_reload_context_ = group_reload_context;
@ -1697,7 +1764,7 @@ bool IsolateGroup::ReloadKernel(JSONStream* js,
RELEASE_ASSERT(isolates_.First() == isolates_.Last());
RELEASE_ASSERT(isolates_.First() == Isolate::Current());
auto shared_class_table = IsolateGroup::Current()->class_table();
auto shared_class_table = IsolateGroup::Current()->shared_class_table();
std::shared_ptr<IsolateGroupReloadContext> group_reload_context(
new IsolateGroupReloadContext(this, shared_class_table, js));
group_reload_context_ = group_reload_context;
@ -1802,7 +1869,7 @@ bool Isolate::AddResumeCapability(const Capability& capability) {
compiler::target::kSmiMax / (6 * kWordSize);
const GrowableObjectArray& caps = GrowableObjectArray::Handle(
current_zone(), object_store()->resume_capabilities());
current_zone(), isolate_object_store()->resume_capabilities());
Capability& current = Capability::Handle(current_zone());
intptr_t insertion_index = -1;
for (intptr_t i = 0; i < caps.Length(); i++) {
@ -1831,7 +1898,7 @@ bool Isolate::AddResumeCapability(const Capability& capability) {
bool Isolate::RemoveResumeCapability(const Capability& capability) {
const GrowableObjectArray& caps = GrowableObjectArray::Handle(
current_zone(), object_store()->resume_capabilities());
current_zone(), isolate_object_store()->resume_capabilities());
Capability& current = Capability::Handle(current_zone());
for (intptr_t i = 0; i < caps.Length(); i++) {
current ^= caps.At(i);
@ -1854,7 +1921,7 @@ void Isolate::AddExitListener(const SendPort& listener,
compiler::target::kSmiMax / (12 * kWordSize);
const GrowableObjectArray& listeners = GrowableObjectArray::Handle(
current_zone(), object_store()->exit_listeners());
current_zone(), isolate_object_store()->exit_listeners());
SendPort& current = SendPort::Handle(current_zone());
intptr_t insertion_index = -1;
for (intptr_t i = 0; i < listeners.Length(); i += 2) {
@ -1885,7 +1952,7 @@ void Isolate::AddExitListener(const SendPort& listener,
void Isolate::RemoveExitListener(const SendPort& listener) {
const GrowableObjectArray& listeners = GrowableObjectArray::Handle(
current_zone(), object_store()->exit_listeners());
current_zone(), isolate_object_store()->exit_listeners());
SendPort& current = SendPort::Handle(current_zone());
for (intptr_t i = 0; i < listeners.Length(); i += 2) {
current ^= listeners.At(i);
@ -1901,7 +1968,7 @@ void Isolate::RemoveExitListener(const SendPort& listener) {
void Isolate::NotifyExitListeners() {
const GrowableObjectArray& listeners = GrowableObjectArray::Handle(
current_zone(), this->object_store()->exit_listeners());
current_zone(), isolate_object_store()->exit_listeners());
if (listeners.IsNull()) return;
SendPort& listener = SendPort::Handle(current_zone());
@ -1922,7 +1989,7 @@ void Isolate::AddErrorListener(const SendPort& listener) {
compiler::target::kSmiMax / (6 * kWordSize);
const GrowableObjectArray& listeners = GrowableObjectArray::Handle(
current_zone(), object_store()->error_listeners());
current_zone(), isolate_object_store()->error_listeners());
SendPort& current = SendPort::Handle(current_zone());
intptr_t insertion_index = -1;
for (intptr_t i = 0; i < listeners.Length(); i++) {
@ -1950,7 +2017,7 @@ void Isolate::AddErrorListener(const SendPort& listener) {
void Isolate::RemoveErrorListener(const SendPort& listener) {
const GrowableObjectArray& listeners = GrowableObjectArray::Handle(
current_zone(), object_store()->error_listeners());
current_zone(), isolate_object_store()->error_listeners());
SendPort& current = SendPort::Handle(current_zone());
for (intptr_t i = 0; i < listeners.Length(); i++) {
current ^= listeners.At(i);
@ -1966,7 +2033,7 @@ void Isolate::RemoveErrorListener(const SendPort& listener) {
bool Isolate::NotifyErrorListeners(const String& msg,
const String& stacktrace) {
const GrowableObjectArray& listeners = GrowableObjectArray::Handle(
current_zone(), this->object_store()->error_listeners());
current_zone(), isolate_object_store()->error_listeners());
if (listeners.IsNull()) return false;
const Array& arr = Array::Handle(current_zone(), Array::New(2));
@ -2388,16 +2455,23 @@ void Isolate::VisitObjectPointers(ObjectPointerVisitor* visitor,
ValidationPolicy validate_frames) {
ASSERT(visitor != nullptr);
// Visit objects in the object store.
if (object_store() != nullptr) {
// Visit objects in the object store if there is no isolate group object store
if (group()->object_store() == nullptr && object_store() != nullptr) {
object_store()->VisitObjectPointers(visitor);
}
// Visit objects in the isolate object store.
if (isolate_object_store() != nullptr) {
isolate_object_store()->VisitObjectPointers(visitor);
}
// Visit objects in the class table.
class_table()->VisitObjectPointers(visitor);
// Visit objects in the field table.
field_table()->VisitObjectPointers(visitor);
if (saved_initial_field_table() != nullptr) {
saved_initial_field_table()->VisitObjectPointers(visitor);
}
visitor->clear_gc_root_type();
// Visit the objects directly referenced from the isolate structure.
@ -2511,6 +2585,11 @@ void IsolateGroup::ForEachIsolate(
}
}
Isolate* IsolateGroup::FirstIsolate() const {
SafepointWriteRwLocker ml(Thread::Current(), isolates_lock_.get());
return isolates_.IsEmpty() ? nullptr : isolates_.First();
}
void IsolateGroup::RunWithStoppedMutators(
std::function<void()> single_current_mutator,
std::function<void()> otherwise,
@ -2522,6 +2601,12 @@ void IsolateGroup::RunWithStoppedMutators(
return;
}
if (thread->IsAtSafepoint() &&
safepoint_handler()->IsOwnedByTheThread(thread)) {
single_current_mutator();
return;
}
{
SafepointReadRwLocker ml(thread, isolates_lock_.get());
const bool only_one_isolate = isolates_.First() == isolates_.Last();
@ -2551,6 +2636,11 @@ void IsolateGroup::VisitObjectPointers(ObjectPointerVisitor* visitor,
},
/*at_safepoint=*/true);
api_state()->VisitObjectPointersUnlocked(visitor);
// Visit objects in the object store.
if (object_store() != nullptr) {
object_store()->VisitObjectPointers(visitor);
}
visitor->VisitPointer(reinterpret_cast<RawObject**>(&saved_unlinked_calls_));
VisitStackPointers(visitor, validate_frames);
}
@ -2624,10 +2714,10 @@ intptr_t IsolateGroup::GetClassSizeForHeapWalkAt(intptr_t cid) {
if (IsReloading()) {
return group_reload_context_->GetClassSizeForHeapWalkAt(cid);
} else {
return class_table()->SizeAt(cid);
return shared_class_table()->SizeAt(cid);
}
#else
return class_table()->SizeAt(cid);
return shared_class_table()->SizeAt(cid);
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
}

View file

@ -19,6 +19,7 @@
#include "vm/base_isolate.h"
#include "vm/class_table.h"
#include "vm/constants_kbc.h"
#include "vm/dispatch_table.h"
#include "vm/exceptions.h"
#include "vm/field_table.h"
#include "vm/fixed_cache.h"
@ -49,7 +50,6 @@ class Capability;
class CodeIndexTable;
class Debugger;
class DeoptContext;
class DispatchTable;
class ExternalTypedData;
class HandleScope;
class HandleVisitor;
@ -58,6 +58,7 @@ class ICData;
#if !defined(DART_PRECOMPILED_RUNTIME)
class Interpreter;
#endif
class IsolateObjectStore;
class IsolateProfilerData;
class IsolateReloadContext;
class IsolateSpawnState;
@ -278,6 +279,9 @@ class DisableIdleTimerScope : public ValueObject {
// Represents an isolate group and is shared among all isolates within a group.
class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
public:
IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
void* embedder_data,
ObjectStore* object_store);
IsolateGroup(std::shared_ptr<IsolateGroupSource> source, void* embedder_data);
~IsolateGroup();
@ -336,8 +340,18 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
}
#endif // !defined(PRODUCT)
SharedClassTable* class_table() const { return shared_class_table_.get(); }
DispatchTable* dispatch_table() const { return dispatch_table_.get(); }
void set_dispatch_table(DispatchTable* table) {
dispatch_table_.reset(table);
}
SharedClassTable* shared_class_table() const {
return shared_class_table_.get();
}
StoreBuffer* store_buffer() const { return store_buffer_.get(); }
ClassTable* class_table() const { return class_table_.get(); }
ObjectStore* object_store() const { return object_store_.get(); }
SafepointRwLock* symbols_lock() { return symbols_lock_.get(); }
static inline IsolateGroup* Current() {
Thread* thread = Thread::Current();
@ -389,6 +403,7 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
// adding/removing isolates, so no locks will be held.
void ForEachIsolate(std::function<void(Isolate* isolate)> function,
bool at_safepoint = false);
Isolate* FirstIsolate() const;
// Ensures mutators are stopped during execution of the provided function.
//
@ -404,8 +419,9 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
std::function<void()> otherwise,
bool use_force_growth_in_otherwise = false);
void RunWithStoppedMutators(std::function<void()> function) {
RunWithStoppedMutators(function, function);
void RunWithStoppedMutators(std::function<void()> function,
bool use_force_growth = false) {
RunWithStoppedMutators(function, function, use_force_growth);
}
#ifndef PRODUCT
@ -486,9 +502,14 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
void RememberLiveTemporaries();
void DeferredMarkLiveTemporaries();
RawArray* saved_unlinked_calls() const { return saved_unlinked_calls_; }
void set_saved_unlinked_calls(const Array& saved_unlinked_calls);
private:
friend class Heap;
friend class StackFrame; // For `[isolates_].First()`.
// For `object_store_shared_ptr()`, `class_table_shared_ptr()`
friend class Isolate;
#define ISOLATE_GROUP_FLAG_BITS(V) V(CompactionInProgress)
@ -506,6 +527,13 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
void set_heap(std::unique_ptr<Heap> value);
const std::shared_ptr<ClassTable>& class_table_shared_ptr() const {
return class_table_;
}
const std::shared_ptr<ObjectStore>& object_store_shared_ptr() const {
return object_store_;
}
bool is_vm_isolate_heap_ = false;
void* embedder_data_ = nullptr;
@ -547,8 +575,17 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
uint64_t id_ = 0;
std::unique_ptr<SharedClassTable> shared_class_table_;
std::shared_ptr<ObjectStore> object_store_; // nullptr in JIT mode
std::shared_ptr<ClassTable> class_table_; // nullptr in JIT mode
// This symbols_mutex_ on Isolate is only used when IsolateGroup does not
// have object_store.
std::unique_ptr<SafepointRwLock>
symbols_lock_; // Protects concurrent access to the symbol table.
std::unique_ptr<StoreBuffer> store_buffer_;
std::unique_ptr<Heap> heap_;
std::unique_ptr<DispatchTable> dispatch_table_;
RawArray* saved_unlinked_calls_;
IdleTimeHandler idle_time_handler_;
uint32_t isolate_group_flags_ = 0;
};
@ -607,12 +644,53 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
return group()->safepoint_handler();
}
ClassTable* class_table() { return &class_table_; }
static intptr_t class_table_offset() {
return OFFSET_OF(Isolate, class_table_);
ClassTable* class_table() { return class_table_.get(); }
RawClass** cached_class_table_table() { return cached_class_table_table_; }
void set_cached_class_table_table(RawClass** cached_class_table_table) {
cached_class_table_table_ = cached_class_table_table;
}
static intptr_t cached_class_table_table_offset() {
return OFFSET_OF(Isolate, cached_class_table_table_);
}
SharedClassTable* shared_class_table() const { return shared_class_table_; }
// Used during isolate creation to re-register isolate with right group.
void set_shared_class_table(SharedClassTable* table) {
shared_class_table_ = table;
}
// Used by the generated code.
static intptr_t shared_class_table_offset() {
return OFFSET_OF(Isolate, shared_class_table_);
}
ObjectStore* object_store() const { return object_store_shared_ptr_.get(); }
void set_object_store(ObjectStore* object_store);
static intptr_t cached_object_store_offset() {
return OFFSET_OF(Isolate, cached_object_store_);
}
SafepointRwLock* symbols_lock() { return symbols_lock_.get(); }
FieldTable* field_table() const { return field_table_; }
void set_field_table(Thread* T, FieldTable* field_table) {
delete field_table_;
field_table_ = field_table;
T->field_table_values_ = field_table->table();
}
FieldTable* saved_initial_field_table() const {
return saved_initial_field_table_.get();
}
std::shared_ptr<FieldTable> saved_initial_field_table_shareable() {
return saved_initial_field_table_;
}
void set_saved_initial_field_table(std::shared_ptr<FieldTable> field_table) {
saved_initial_field_table_ = field_table;
}
IsolateObjectStore* isolate_object_store() const {
return isolate_object_store_.get();
}
// Prefers old classes when we are in the middle of a reload.
RawClass* GetClassForHeapWalkAt(intptr_t cid);
@ -646,11 +724,8 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
ASSERT(main_port_ == 0); // Only set main port once.
main_port_ = port;
}
Dart_Port origin_id() const { return origin_id_; }
void set_origin_id(Dart_Port id) {
ASSERT((id == main_port_ && origin_id_ == 0) || (origin_id_ == main_port_));
origin_id_ = id;
}
Dart_Port origin_id();
void set_origin_id(Dart_Port id);
void set_pause_capability(uint64_t value) { pause_capability_ = value; }
uint64_t pause_capability() const { return pause_capability_; }
void set_terminate_capability(uint64_t value) {
@ -662,12 +737,6 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
Heap* heap() const { return isolate_group_->heap(); }
ObjectStore* object_store() const { return object_store_; }
void set_object_store(ObjectStore* value) { object_store_ = value; }
static intptr_t object_store_offset() {
return OFFSET_OF(Isolate, object_store_);
}
void set_init_callback_data(void* value) { init_callback_data_ = value; }
void* init_callback_data() const { return init_callback_data_; }
@ -717,7 +786,6 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
}
Mutex* mutex() { return &mutex_; }
Mutex* symbols_mutex() { return &symbols_mutex_; }
Mutex* type_canonicalization_mutex() { return &type_canonicalization_mutex_; }
Mutex* constant_canonicalization_mutex() {
return &constant_canonicalization_mutex_;
@ -735,7 +803,6 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
#if !defined(PRODUCT)
Debugger* debugger() const {
ASSERT(debugger_ != nullptr);
return debugger_;
}
@ -1048,8 +1115,9 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
void set_obfuscation_map(const char** map) { obfuscation_map_ = map; }
const char** obfuscation_map() const { return obfuscation_map_; }
const DispatchTable* dispatch_table() const { return dispatch_table_; }
void set_dispatch_table(DispatchTable* table) { dispatch_table_ = table; }
const DispatchTable* dispatch_table() const {
return group()->dispatch_table();
}
// Returns the pc -> code lookup cache object for this isolate.
ReversePcLookupCache* reverse_pc_lookup_cache() const {
@ -1233,14 +1301,23 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
RawUserTag* current_tag_;
RawUserTag* default_tag_;
RawCode* ic_miss_code_;
ObjectStore* object_store_ = nullptr;
ClassTable class_table_;
// Cached value of object_store_shared_ptr_, here for generated code access
ObjectStore* cached_object_store_ = nullptr;
SharedClassTable* shared_class_table_ = nullptr;
// Cached value of class_table_->table_, here for generated code access
RawClass** cached_class_table_table_ = nullptr;
FieldTable* field_table_ = nullptr;
bool single_step_ = false;
// End accessed from generated code.
IsolateGroup* isolate_group_;
IdleTimeHandler idle_time_handler_;
std::shared_ptr<FieldTable> saved_initial_field_table_;
std::unique_ptr<IsolateObjectStore> isolate_object_store_;
// shared in AOT(same pointer as on IsolateGroup), not shared in JIT
std::shared_ptr<ObjectStore> object_store_shared_ptr_;
// shared in AOT(same pointer as on IsolateGroup), not shared in JIT
std::shared_ptr<ClassTable> class_table_;
#if !defined(DART_PRECOMPILED_RUNTIME)
NativeCallbackTrampolines native_callback_trampolines_;
@ -1328,6 +1405,7 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
Dart_Port main_port_ = 0;
// Isolates created by Isolate.spawn have the same origin id.
Dart_Port origin_id_ = 0;
Mutex origin_id_mutex_;
uint64_t pause_capability_ = 0;
uint64_t terminate_capability_ = 0;
void* init_callback_data_ = nullptr;
@ -1335,7 +1413,8 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
Random random_;
Simulator* simulator_ = nullptr;
Mutex mutex_; // Protects compiler stats.
Mutex symbols_mutex_; // Protects concurrent access to the symbol table.
std::unique_ptr<SafepointRwLock>
symbols_lock_; // Protects concurrent access to the symbol table.
Mutex type_canonicalization_mutex_; // Protects type canonicalization.
Mutex constant_canonicalization_mutex_; // Protects const canonicalization.
Mutex megamorphic_mutex_; // Protects the table of megamorphic caches and

View file

@ -310,9 +310,14 @@ void NativeEntry::LinkNativeCall(Dart_NativeArguments args) {
const Code& current_trampoline =
Code::Handle(zone, CodePatcher::GetNativeCallAt(
caller_frame->pc(), code, &current_function));
// Some other isolate(with code being shared in AOT) might have updated
// target function/trampoline already.
ASSERT(current_function ==
reinterpret_cast<NativeFunction>(LinkNativeCall));
ASSERT(current_trampoline.raw() == StubCode::CallBootstrapNative().raw());
reinterpret_cast<NativeFunction>(LinkNativeCall) ||
current_function == target_function);
ASSERT(current_trampoline.raw() ==
StubCode::CallBootstrapNative().raw() ||
current_function == target_function);
}
#endif

View file

@ -1636,6 +1636,8 @@ RawError* Object::Init(Isolate* isolate,
#if !defined(DART_PRECOMPILED_RUNTIME)
// Object::Init version when we are bootstrapping from source or from a
// Kernel binary.
// This will initialize isolate group object_store, shared by all isolates
// running in the isolate group.
ObjectStore* object_store = isolate->object_store();
Class& cls = Class::Handle(zone);
@ -1647,6 +1649,7 @@ RawError* Object::Init(Isolate* isolate,
// All RawArray fields will be initialized to an empty array, therefore
// initialize array class first.
cls = Class::New<Array, RTN::Array>(isolate);
ASSERT(object_store->array_class() == Class::null());
object_store->set_array_class(cls);
// VM classes that are parameterized (Array, ImmutableArray,
@ -2634,7 +2637,7 @@ RawObject* Object::Allocate(intptr_t cls_id, intptr_t size, Heap::Space space) {
}
}
#ifndef PRODUCT
auto class_table = thread->isolate_group()->class_table();
auto class_table = thread->isolate_group()->shared_class_table();
if (class_table->TraceAllocationFor(cls_id)) {
Profiler::SampleAllocation(thread, cls_id);
}
@ -3291,9 +3294,10 @@ UnboxedFieldBitmap Class::CalculateFieldOffsets() const {
set_num_native_fields(super.num_native_fields());
if (FLAG_precompiled_mode) {
host_bitmap =
Isolate::Current()->group()->class_table()->GetUnboxedFieldsMapAt(
super.id());
host_bitmap = Isolate::Current()
->group()
->shared_class_table()
->GetUnboxedFieldsMapAt(super.id());
}
}
// If the super class is parameterized, use the same type_arguments field,
@ -3756,8 +3760,8 @@ void Class::Finalize() const {
// Sets the new size in the class table.
isolate->class_table()->SetAt(id(), raw());
if (FLAG_precompiled_mode) {
isolate->group()->class_table()->SetUnboxedFieldsMapAt(id(),
host_bitmap);
isolate->group()->shared_class_table()->SetUnboxedFieldsMapAt(
id(), host_bitmap);
}
}
}
@ -3852,7 +3856,7 @@ void Class::DisableAllCHAOptimizedCode() {
bool Class::TraceAllocation(Isolate* isolate) const {
#ifndef PRODUCT
auto class_table = isolate->group()->class_table();
auto class_table = isolate->group()->shared_class_table();
return class_table->TraceAllocationFor(id());
#else
return false;
@ -3864,7 +3868,7 @@ void Class::SetTraceAllocation(bool trace_allocation) const {
Isolate* isolate = Isolate::Current();
const bool changed = trace_allocation != this->TraceAllocation(isolate);
if (changed) {
auto class_table = isolate->group()->class_table();
auto class_table = isolate->group()->shared_class_table();
class_table->SetTraceAllocationFor(id(), trace_allocation);
DisableAllocationStub();
}
@ -17553,7 +17557,7 @@ uint32_t Instance::CanonicalizeHash() const {
Instance& member = Instance::Handle();
const auto unboxed_fields_bitmap =
thread->isolate()->group()->class_table()->GetUnboxedFieldsMapAt(
thread->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
GetClassId());
for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
@ -17605,7 +17609,7 @@ bool Instance::CheckAndCanonicalizeFields(Thread* thread,
const intptr_t instance_size = SizeFromClass();
ASSERT(instance_size != 0);
const auto unboxed_fields_bitmap =
thread->isolate()->group()->class_table()->GetUnboxedFieldsMapAt(
thread->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
GetClassId());
for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
offset += kWordSize) {

View file

@ -848,7 +848,8 @@ bool Class::CanReloadFinalized(const Class& replacement,
// Make sure the declaration types argument count matches for the two classes.
// ex. class A<int,B> {} cannot be replace with class A<B> {}.
auto group_context = context->group_reload_context();
auto shared_class_table = group_context->isolate_group()->class_table();
auto shared_class_table =
group_context->isolate_group()->shared_class_table();
if (NumTypeArguments() != replacement.NumTypeArguments()) {
group_context->AddReasonForCancelling(
new (context->zone())

View file

@ -16,6 +16,91 @@
namespace dart {
IsolateObjectStore::IsolateObjectStore(ObjectStore* object_store)
: object_store_(object_store) {}
IsolateObjectStore::~IsolateObjectStore() {}
void IsolateObjectStore::VisitObjectPointers(ObjectPointerVisitor* visitor) {
ASSERT(visitor != NULL);
visitor->set_gc_root_type("isolate_object store");
visitor->VisitPointers(from(), to());
visitor->clear_gc_root_type();
}
void IsolateObjectStore::Init() {
#define INIT_FIELD(Type, name) name##_ = Type::null();
ISOLATE_OBJECT_STORE_FIELD_LIST(INIT_FIELD, INIT_FIELD)
#undef INIT_FIELD
for (RawObject** current = from(); current <= to(); current++) {
ASSERT(*current == Object::null());
}
}
#ifndef PRODUCT
void IsolateObjectStore::PrintToJSONObject(JSONObject* jsobj) {
jsobj->AddProperty("type", "_IsolateObjectStore");
{
JSONObject fields(jsobj, "fields");
Object& value = Object::Handle();
#define PRINT_OBJECT_STORE_FIELD(type, name) \
value = name##_; \
fields.AddProperty(#name "_", value);
ISOLATE_OBJECT_STORE_FIELD_LIST(PRINT_OBJECT_STORE_FIELD,
PRINT_OBJECT_STORE_FIELD);
#undef PRINT_OBJECT_STORE_FIELD
}
}
#endif // !PRODUCT
static RawUnhandledException* CreatePreallocatedUnandledException(
Zone* zone,
const Object& out_of_memory) {
// Allocate pre-allocated unhandled exception object initialized with the
// pre-allocated OutOfMemoryError.
const UnhandledException& unhandled_exception =
UnhandledException::Handle(UnhandledException::New(
Instance::Cast(out_of_memory), StackTrace::Handle(zone)));
return unhandled_exception.raw();
}
static RawStackTrace* CreatePreallocatedStackTrace(Zone* zone) {
const Array& code_array = Array::Handle(
zone, Array::New(StackTrace::kPreallocatedStackdepth, Heap::kOld));
const Array& pc_offset_array = Array::Handle(
zone, Array::New(StackTrace::kPreallocatedStackdepth, Heap::kOld));
const StackTrace& stack_trace =
StackTrace::Handle(zone, StackTrace::New(code_array, pc_offset_array));
// Expansion of inlined functions requires additional memory at run time,
// avoid it.
stack_trace.set_expand_inlined(false);
return stack_trace.raw();
}
RawError* IsolateObjectStore::PreallocateObjects() {
Thread* thread = Thread::Current();
Isolate* isolate = thread->isolate();
Zone* zone = thread->zone();
ASSERT(isolate != NULL && isolate->isolate_object_store() == this);
ASSERT(preallocated_stack_trace() == StackTrace::null());
resume_capabilities_ = GrowableObjectArray::New();
exit_listeners_ = GrowableObjectArray::New();
error_listeners_ = GrowableObjectArray::New();
// Allocate pre-allocated unhandled exception object initialized with the
// pre-allocated OutOfMemoryError.
const Object& out_of_memory =
Object::Handle(zone, object_store_->out_of_memory());
set_preallocated_unhandled_exception(UnhandledException::Handle(
CreatePreallocatedUnandledException(zone, out_of_memory)));
set_preallocated_stack_trace(
StackTrace::Handle(CreatePreallocatedStackTrace(zone)));
return Error::null();
}
ObjectStore::ObjectStore() {
#define INIT_FIELD(Type, name) name##_ = Type::null();
OBJECT_STORE_FIELD_LIST(INIT_FIELD, INIT_FIELD)
@ -35,16 +120,10 @@ void ObjectStore::VisitObjectPointers(ObjectPointerVisitor* visitor) {
visitor->clear_gc_root_type();
}
void ObjectStore::Init(Isolate* isolate) {
ASSERT(isolate->object_store() == NULL);
ObjectStore* store = new ObjectStore();
isolate->set_object_store(store);
if (!Dart::VmIsolateNameEquals(isolate->name())) {
#define DO(member, name) store->set_##member(StubCode::name());
OBJECT_STORE_STUB_CODE_LIST(DO)
void ObjectStore::InitStubs() {
#define DO(member, name) set_##member(StubCode::name());
OBJECT_STORE_STUB_CODE_LIST(DO)
#undef DO
}
}
#ifndef PRODUCT
@ -72,22 +151,22 @@ static RawInstance* AllocateObjectByClassName(const Library& library,
RawError* ObjectStore::PreallocateObjects() {
Thread* thread = Thread::Current();
IsolateGroup* isolate_group = thread->isolate_group();
Isolate* isolate = thread->isolate();
Zone* zone = thread->zone();
ASSERT(isolate != NULL && isolate->object_store() == this);
// Either we are the object store on isolate group, or isolate group has no
// object store and we are the object store on the isolate.
ASSERT(isolate_group != NULL && (isolate_group->object_store() == this ||
(isolate_group->object_store() == nullptr &&
isolate->object_store() == this)));
if (this->stack_overflow() != Instance::null()) {
ASSERT(this->out_of_memory() != Instance::null());
ASSERT(this->preallocated_stack_trace() != StackTrace::null());
return Error::null();
}
ASSERT(this->stack_overflow() == Instance::null());
ASSERT(this->out_of_memory() == Instance::null());
ASSERT(this->preallocated_stack_trace() == StackTrace::null());
this->closure_functions_ = GrowableObjectArray::New();
this->resume_capabilities_ = GrowableObjectArray::New();
this->exit_listeners_ = GrowableObjectArray::New();
this->error_listeners_ = GrowableObjectArray::New();
Object& result = Object::Handle();
const Library& library = Library::Handle(Library::CoreLibrary());
@ -104,24 +183,6 @@ RawError* ObjectStore::PreallocateObjects() {
}
set_out_of_memory(Instance::Cast(result));
// Allocate pre-allocated unhandled exception object initialized with the
// pre-allocated OutOfMemoryError.
const UnhandledException& unhandled_exception =
UnhandledException::Handle(UnhandledException::New(
Instance::Cast(result), StackTrace::Handle(zone)));
set_preallocated_unhandled_exception(unhandled_exception);
const Array& code_array = Array::Handle(
zone, Array::New(StackTrace::kPreallocatedStackdepth, Heap::kOld));
const Array& pc_offset_array = Array::Handle(
zone, Array::New(StackTrace::kPreallocatedStackdepth, Heap::kOld));
const StackTrace& stack_trace =
StackTrace::Handle(zone, StackTrace::New(code_array, pc_offset_array));
// Expansion of inlined functions requires additional memory at run time,
// avoid it.
stack_trace.set_expand_inlined(false);
set_preallocated_stack_trace(stack_trace);
return Error::null();
}
@ -272,10 +333,4 @@ void ObjectStore::InitKnownObjects() {
#endif
}
void ObjectStore::PostLoad() {
resume_capabilities_ = GrowableObjectArray::New();
exit_listeners_ = GrowableObjectArray::New();
error_listeners_ = GrowableObjectArray::New();
}
} // namespace dart

View file

@ -34,6 +34,9 @@ class ObjectPointerVisitor;
// TODO(liama): Once NNBD is enabled, *_type will be deleted and all uses will
// be replaced with *_type_non_nullable. Later, once we drop support for opted
// out code, *_type_legacy will be deleted.
//
// R_ - needs getter only
// RW - needs getter and setter
#define OBJECT_STORE_FIELD_LIST(R_, RW) \
RW(Class, object_class) \
RW(Type, object_type) \
@ -149,8 +152,6 @@ class ObjectPointerVisitor;
RW(GrowableObjectArray, pending_classes) \
RW(Instance, stack_overflow) \
RW(Instance, out_of_memory) \
RW(UnhandledException, preallocated_unhandled_exception) \
RW(StackTrace, preallocated_stack_trace) \
RW(Function, lookup_port_handler) \
RW(Function, handle_message_function) \
RW(Function, growable_list_factory) \
@ -163,7 +164,6 @@ class ObjectPointerVisitor;
RW(Function, complete_on_async_return) \
RW(Class, async_star_stream_controller) \
RW(Array, bytecode_attributes) \
RW(Array, saved_unlinked_calls) \
RW(GrowableObjectArray, llvm_constant_pool) \
RW(GrowableObjectArray, llvm_function_pool) \
RW(Array, llvm_constant_hash_table) \
@ -198,12 +198,10 @@ class ObjectPointerVisitor;
RW(Code, call_closure_no_such_method_stub) \
R_(Code, megamorphic_call_miss_code) \
R_(Function, megamorphic_call_miss_function) \
R_(GrowableObjectArray, resume_capabilities) \
R_(GrowableObjectArray, exit_listeners) \
R_(GrowableObjectArray, error_listeners) \
RW(Array, dispatch_table_code_entries) \
RW(Array, code_order_table) \
RW(Array, obfuscation_map) \
RW(Array, saved_initial_field_values) \
RW(Class, ffi_pointer_class) \
RW(Class, ffi_native_type_class) \
RW(Class, ffi_struct_class) \
@ -238,8 +236,80 @@ class ObjectPointerVisitor;
DO(init_static_field_stub, InitStaticField) \
DO(instance_of_stub, InstanceOf)
// The object store is a per isolate instance which stores references to
// objects used by the VM.
#define ISOLATE_OBJECT_STORE_FIELD_LIST(R_, RW) \
RW(UnhandledException, preallocated_unhandled_exception) \
RW(StackTrace, preallocated_stack_trace) \
R_(GrowableObjectArray, resume_capabilities) \
R_(GrowableObjectArray, exit_listeners) \
R_(GrowableObjectArray, error_listeners)
// Please remember the last entry must be referred in the 'to' function below.
class IsolateObjectStore {
public:
explicit IsolateObjectStore(ObjectStore* object_store);
~IsolateObjectStore();
#define DECLARE_GETTER(Type, name) \
Raw##Type* name() const { return name##_; } \
static intptr_t name##_offset() { \
return OFFSET_OF(IsolateObjectStore, name##_); \
}
#define DECLARE_GETTER_AND_SETTER(Type, name) \
DECLARE_GETTER(Type, name) \
void set_##name(const Type& value) { name##_ = value.raw(); }
ISOLATE_OBJECT_STORE_FIELD_LIST(DECLARE_GETTER, DECLARE_GETTER_AND_SETTER)
#undef DECLARE_GETTER
#undef DECLARE_GETTER_AND_SETTER
// Visit all object pointers.
void VisitObjectPointers(ObjectPointerVisitor* visitor);
// Called to initialize objects required by the vm but which invoke
// dart code. If an error occurs the error object is returned otherwise
// a null object is returned.
RawError* PreallocateObjects();
void Init();
void PostLoad();
ObjectStore* object_store() const { return object_store_; }
void set_object_store(ObjectStore* object_store) {
ASSERT(object_store_ == nullptr);
object_store_ = object_store;
}
static intptr_t object_store_offset() {
return OFFSET_OF(IsolateObjectStore, object_store_);
}
#ifndef PRODUCT
void PrintToJSONObject(JSONObject* jsobj);
#endif
private:
// Finds a core library private method in Object.
RawFunction* PrivateObjectLookup(const String& name);
RawObject** from() {
return reinterpret_cast<RawObject**>(&preallocated_unhandled_exception_);
}
#define DECLARE_OBJECT_STORE_FIELD(type, name) Raw##type* name##_;
ISOLATE_OBJECT_STORE_FIELD_LIST(DECLARE_OBJECT_STORE_FIELD,
DECLARE_OBJECT_STORE_FIELD)
#undef DECLARE_OBJECT_STORE_FIELD
RawObject** to() { return reinterpret_cast<RawObject**>(&error_listeners_); }
ObjectStore* object_store_;
friend class Serializer;
friend class Deserializer;
DISALLOW_COPY_AND_ASSIGN(IsolateObjectStore);
};
// The object store is a per isolate group instance which stores references to
// objects used by the VM shared by all isolates in a group.
class ObjectStore {
public:
enum BootstrapLibraryId {
@ -249,6 +319,7 @@ class ObjectStore {
#undef MAKE_ID
};
ObjectStore();
~ObjectStore();
#define DECLARE_GETTER(Type, name) \
@ -306,17 +377,13 @@ class ObjectStore {
void InitKnownObjects();
void PostLoad();
static void Init(Isolate* isolate);
void InitStubs();
#ifndef PRODUCT
void PrintToJSONObject(JSONObject* jsobj);
#endif
private:
ObjectStore();
// Finds a core library private method in Object.
RawFunction* PrivateObjectLookup(const String& name);

View file

@ -64,11 +64,11 @@ void RawObject::Validate(IsolateGroup* isolate_group) const {
}
}
const intptr_t class_id = ClassIdTag::decode(tags);
if (!isolate_group->class_table()->IsValidIndex(class_id)) {
if (!isolate_group->shared_class_table()->IsValidIndex(class_id)) {
FATAL1("Invalid class id encountered %" Pd "\n", class_id);
}
if (class_id == kNullCid &&
isolate_group->class_table()->HasValidClassAt(class_id)) {
isolate_group->shared_class_table()->HasValidClassAt(class_id)) {
// Null class not yet initialized; skip.
return;
}
@ -241,7 +241,7 @@ intptr_t RawObject::HeapSizeFromClass(uint32_t tags) const {
const bool use_saved_class_table = false;
#endif
auto class_table = isolate_group->class_table();
auto class_table = isolate_group->shared_class_table();
ASSERT(use_saved_class_table || class_table->SizeAt(class_id) > 0);
if (!class_table->IsValidIndex(class_id) ||
(!class_table->HasValidClassAt(class_id) && !use_saved_class_table)) {

View file

@ -418,10 +418,14 @@ DEFINE_RUNTIME_ENTRY(InstantiateTypeArguments, 3) {
// Code inlined in the caller should have optimized the case where the
// instantiator can be reused as type argument vector.
ASSERT(!type_arguments.IsUninstantiatedIdentity());
type_arguments = type_arguments.InstantiateAndCanonicalizeFrom(
instantiator_type_arguments, function_type_arguments);
ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated());
arguments.SetReturn(type_arguments);
thread->isolate_group()->RunWithStoppedMutators(
[&]() {
type_arguments = type_arguments.InstantiateAndCanonicalizeFrom(
instantiator_type_arguments, function_type_arguments);
ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated());
arguments.SetReturn(type_arguments);
},
/*use_force_growth=*/true);
}
// Instantiate type.
@ -598,6 +602,7 @@ static void PrintTypeCheck(const char* message,
// This operation is currently very slow (lookup of code is not efficient yet).
static void UpdateTypeTestCache(
Zone* zone,
Thread* thread,
const Instance& instance,
const AbstractType& type,
const TypeArguments& instantiator_type_arguments,
@ -641,94 +646,103 @@ static void UpdateTypeTestCache(
instance_type_arguments = instance.GetTypeArguments();
}
}
const intptr_t len = new_cache.NumberOfChecks();
if (len >= FLAG_max_subtype_cache_entries) {
if (FLAG_trace_type_checks) {
OS::PrintErr("Not updating subtype test cache as its length reached %d\n",
FLAG_max_subtype_cache_entries);
}
return;
}
thread->isolate_group()->RunWithStoppedMutators(
[&]() {
const intptr_t len = new_cache.NumberOfChecks();
if (len >= FLAG_max_subtype_cache_entries) {
if (FLAG_trace_type_checks) {
OS::PrintErr(
"Not updating subtype test cache as its length reached %d\n",
FLAG_max_subtype_cache_entries);
}
return;
}
#if defined(DEBUG)
ASSERT(instance_type_arguments.IsNull() ||
instance_type_arguments.IsCanonical());
ASSERT(instantiator_type_arguments.IsNull() ||
instantiator_type_arguments.IsCanonical());
ASSERT(function_type_arguments.IsNull() ||
function_type_arguments.IsCanonical());
ASSERT(instance_parent_function_type_arguments.IsNull() ||
instance_parent_function_type_arguments.IsCanonical());
ASSERT(instance_delayed_type_arguments.IsNull() ||
instance_delayed_type_arguments.IsCanonical());
auto& last_instance_class_id_or_function = Object::Handle(zone);
auto& last_instance_type_arguments = TypeArguments::Handle(zone);
auto& last_instantiator_type_arguments = TypeArguments::Handle(zone);
auto& last_function_type_arguments = TypeArguments::Handle(zone);
auto& last_instance_parent_function_type_arguments =
TypeArguments::Handle(zone);
auto& last_instance_delayed_type_arguments = TypeArguments::Handle(zone);
Bool& last_result = Bool::Handle(zone);
for (intptr_t i = 0; i < len; ++i) {
new_cache.GetCheck(
i, &last_instance_class_id_or_function, &last_instance_type_arguments,
&last_instantiator_type_arguments, &last_function_type_arguments,
&last_instance_parent_function_type_arguments,
&last_instance_delayed_type_arguments, &last_result);
if ((last_instance_class_id_or_function.raw() ==
instance_class_id_or_function.raw()) &&
(last_instance_type_arguments.raw() == instance_type_arguments.raw()) &&
(last_instantiator_type_arguments.raw() ==
instantiator_type_arguments.raw()) &&
(last_function_type_arguments.raw() == function_type_arguments.raw()) &&
(last_instance_parent_function_type_arguments.raw() ==
instance_parent_function_type_arguments.raw()) &&
(last_instance_delayed_type_arguments.raw() ==
instance_delayed_type_arguments.raw())) {
OS::PrintErr(" Error in test cache %p ix: %" Pd ",", new_cache.raw(), i);
PrintTypeCheck(" duplicate cache entry", instance, type,
instantiator_type_arguments, function_type_arguments,
result);
UNREACHABLE();
return;
}
}
ASSERT(instance_type_arguments.IsNull() ||
instance_type_arguments.IsCanonical());
ASSERT(instantiator_type_arguments.IsNull() ||
instantiator_type_arguments.IsCanonical());
ASSERT(function_type_arguments.IsNull() ||
function_type_arguments.IsCanonical());
ASSERT(instance_parent_function_type_arguments.IsNull() ||
instance_parent_function_type_arguments.IsCanonical());
ASSERT(instance_delayed_type_arguments.IsNull() ||
instance_delayed_type_arguments.IsCanonical());
auto& last_instance_class_id_or_function = Object::Handle(zone);
auto& last_instance_type_arguments = TypeArguments::Handle(zone);
auto& last_instantiator_type_arguments = TypeArguments::Handle(zone);
auto& last_function_type_arguments = TypeArguments::Handle(zone);
auto& last_instance_parent_function_type_arguments =
TypeArguments::Handle(zone);
auto& last_instance_delayed_type_arguments =
TypeArguments::Handle(zone);
Bool& last_result = Bool::Handle(zone);
for (intptr_t i = 0; i < len; ++i) {
new_cache.GetCheck(
i, &last_instance_class_id_or_function,
&last_instance_type_arguments, &last_instantiator_type_arguments,
&last_function_type_arguments,
&last_instance_parent_function_type_arguments,
&last_instance_delayed_type_arguments, &last_result);
if ((last_instance_class_id_or_function.raw() ==
instance_class_id_or_function.raw()) &&
(last_instance_type_arguments.raw() ==
instance_type_arguments.raw()) &&
(last_instantiator_type_arguments.raw() ==
instantiator_type_arguments.raw()) &&
(last_function_type_arguments.raw() ==
function_type_arguments.raw()) &&
(last_instance_parent_function_type_arguments.raw() ==
instance_parent_function_type_arguments.raw()) &&
(last_instance_delayed_type_arguments.raw() ==
instance_delayed_type_arguments.raw())) {
// Some other isolate might have updated the cache between entry was
// found missing and now.
return;
}
}
#endif
new_cache.AddCheck(instance_class_id_or_function, instance_type_arguments,
instantiator_type_arguments, function_type_arguments,
instance_parent_function_type_arguments,
instance_delayed_type_arguments, result);
if (FLAG_trace_type_checks) {
AbstractType& test_type = AbstractType::Handle(zone, type.raw());
if (!test_type.IsInstantiated()) {
test_type = type.InstantiateFrom(instantiator_type_arguments,
function_type_arguments, kAllFree, NULL,
Heap::kNew);
}
const auto& type_class = Class::Handle(zone, test_type.type_class());
const auto& instance_class_name =
String::Handle(zone, instance_class.Name());
OS::PrintErr(
" Updated test cache %p ix: %" Pd
" with "
"(cid-or-fun: %p, type-args: %p, i-type-args: %p, f-type-args: %p, "
"p-type-args: %p, d-type-args: %p, result: %s)\n"
" instance [class: (%p '%s' cid: %" Pd
"), type-args: %p %s]\n"
" test-type [class: (%p '%s' cid: %" Pd
"), i-type-args: %p %s, f-type-args: %p %s]\n",
new_cache.raw(), len, instance_class_id_or_function.raw(),
instance_type_arguments.raw(), instantiator_type_arguments.raw(),
function_type_arguments.raw(),
instance_parent_function_type_arguments.raw(),
instance_delayed_type_arguments.raw(), result.ToCString(),
instance_class.raw(), instance_class_name.ToCString(),
instance_class.id(), instance_type_arguments.raw(),
instance_type_arguments.ToCString(), type_class.raw(),
String::Handle(zone, type_class.Name()).ToCString(), type_class.id(),
instantiator_type_arguments.raw(),
instantiator_type_arguments.ToCString(), function_type_arguments.raw(),
function_type_arguments.ToCString());
}
new_cache.AddCheck(instance_class_id_or_function,
instance_type_arguments, instantiator_type_arguments,
function_type_arguments,
instance_parent_function_type_arguments,
instance_delayed_type_arguments, result);
if (FLAG_trace_type_checks) {
AbstractType& test_type = AbstractType::Handle(zone, type.raw());
if (!test_type.IsInstantiated()) {
test_type = type.InstantiateFrom(instantiator_type_arguments,
function_type_arguments, kAllFree,
NULL, Heap::kNew);
}
const auto& type_class = Class::Handle(zone, test_type.type_class());
const auto& instance_class_name =
String::Handle(zone, instance_class.Name());
OS::PrintErr(
" Updated test cache %p ix: %" Pd
" with "
"(cid-or-fun: %p, type-args: %p, i-type-args: %p, f-type-args: "
"%p, "
"p-type-args: %p, d-type-args: %p, result: %s)\n"
" instance [class: (%p '%s' cid: %" Pd
"), type-args: %p %s]\n"
" test-type [class: (%p '%s' cid: %" Pd
"), i-type-args: %p %s, f-type-args: %p %s]\n",
new_cache.raw(), len, instance_class_id_or_function.raw(),
instance_type_arguments.raw(), instantiator_type_arguments.raw(),
function_type_arguments.raw(),
instance_parent_function_type_arguments.raw(),
instance_delayed_type_arguments.raw(), result.ToCString(),
instance_class.raw(), instance_class_name.ToCString(),
instance_class.id(), instance_type_arguments.raw(),
instance_type_arguments.ToCString(), type_class.raw(),
String::Handle(zone, type_class.Name()).ToCString(),
type_class.id(), instantiator_type_arguments.raw(),
instantiator_type_arguments.ToCString(),
function_type_arguments.raw(),
function_type_arguments.ToCString());
}
},
/*use_force_growth=*/true);
}
// Check that the given instance is an instance of the given type.
@ -758,7 +772,7 @@ DEFINE_RUNTIME_ENTRY(Instanceof, 5) {
PrintTypeCheck("InstanceOf", instance, type, instantiator_type_arguments,
function_type_arguments, result);
}
UpdateTypeTestCache(zone, instance, type, instantiator_type_arguments,
UpdateTypeTestCache(zone, thread, instance, type, instantiator_type_arguments,
function_type_arguments, result, cache);
arguments.SetReturn(result);
}
@ -922,18 +936,26 @@ DEFINE_RUNTIME_ENTRY(TypeCheck, 7) {
TypeTestingStubCallPattern tts_pattern(caller_frame->pc());
const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex();
// The pool entry must be initialized to `null` when we patch it.
ASSERT(pool.ObjectAt(stc_pool_idx) == Object::null());
cache = SubtypeTestCache::New();
pool.SetObjectAt(stc_pool_idx, cache);
thread->isolate_group()->RunWithStoppedMutators(
[&]() {
// If nobody has updated the pool since the check, we are
// updating it now.
if (pool.ObjectAt(stc_pool_idx) == Object::null()) {
cache = SubtypeTestCache::New();
pool.SetObjectAt(stc_pool_idx, cache);
}
},
/*use_force_growth=*/true);
#else
UNREACHABLE();
#endif
}
UpdateTypeTestCache(zone, src_instance, dst_type,
instantiator_type_arguments, function_type_arguments,
Bool::True(), cache);
if (!cache.IsNull()) { // we might have lost the race to set up new cache
UpdateTypeTestCache(zone, thread, src_instance, dst_type,
instantiator_type_arguments, function_type_arguments,
Bool::True(), cache);
}
}
arguments.SetReturn(src_instance);
@ -1460,36 +1482,45 @@ static void SaveUnlinkedCall(Zone* zone,
Isolate* isolate,
uword frame_pc,
const UnlinkedCall& unlinked_call) {
auto object_store = isolate->object_store();
if (object_store->saved_unlinked_calls() == Array::null()) {
IsolateGroup* isolate_group = isolate->group();
if (isolate_group->saved_unlinked_calls() == Array::null()) {
const auto& initial_map =
Array::Handle(zone, HashTables::New<UnlinkedCallMap>(16, Heap::kOld));
object_store->set_saved_unlinked_calls(initial_map);
isolate_group->set_saved_unlinked_calls(initial_map);
}
UnlinkedCallMap unlinked_call_map(zone, object_store->saved_unlinked_calls());
UnlinkedCallMap unlinked_call_map(zone,
isolate_group->saved_unlinked_calls());
const auto& pc = Integer::Handle(Integer::NewFromUint64(frame_pc));
const bool was_present = unlinked_call_map.UpdateOrInsert(pc, unlinked_call);
// We transition at most once out of UnlinkedCall state.
RELEASE_ASSERT(!was_present);
object_store->set_saved_unlinked_calls(unlinked_call_map.Release());
// Some other isolate might have updated unlinked_call_map[pc] too, but
// their update should be identical to ours.
UnlinkedCall& new_or_old_value = UnlinkedCall::Handle(
zone, UnlinkedCall::RawCast(
unlinked_call_map.InsertOrGetValue(pc, unlinked_call)));
RELEASE_ASSERT(new_or_old_value.raw() == unlinked_call.raw());
isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
}
#if defined(DART_PRECOMPILED_RUNTIME)
static RawUnlinkedCall* LoadUnlinkedCall(Zone* zone,
Isolate* isolate,
uword pc) {
auto object_store = isolate->object_store();
ASSERT(object_store->saved_unlinked_calls() != Array::null());
uword pc,
bool is_monomorphic_hit) {
IsolateGroup* isolate_group = isolate->group();
ASSERT(isolate_group->saved_unlinked_calls() != Array::null());
UnlinkedCallMap unlinked_call_map(zone,
isolate_group->saved_unlinked_calls());
UnlinkedCallMap unlinked_call_map(zone, object_store->saved_unlinked_calls());
const auto& pc_integer = Integer::Handle(Integer::NewFromUint64(pc));
const auto& unlinked_call = UnlinkedCall::Cast(
Object::Handle(zone, unlinked_call_map.GetOrDie(pc_integer)));
// Since we transition out of the monomorphic state only once, we should not
// need the saved unlinked call anymore.
unlinked_call_map.Remove(pc_integer);
object_store->set_saved_unlinked_calls(unlinked_call_map.Release());
// Only remove entry from unlinked_call_map if we are actually transitioning
// out of monomorphic state.
if (!is_monomorphic_hit) {
unlinked_call_map.Remove(pc_integer);
isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
}
return unlinked_call.raw();
}
@ -1626,8 +1657,8 @@ void SwitchableCallHandler::DoUnlinkedCall(const UnlinkedCall& unlinked) {
code = StubCode::MonomorphicSmiableCheck().raw();
}
}
CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, object,
code);
CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
thread_, caller_frame_->pc(), caller_code_, object, code);
// Return the ICData. The miss stub will jump to continue in the IC lookup
// stub.
@ -1694,11 +1725,16 @@ void SwitchableCallHandler::DoMonomorphicMiss(const Object& data) {
UNREACHABLE();
}
// The site might have just been updated to monomorphic state with same
// exact class id, in which case we are staying in monomorphic state.
bool is_monomorphic_hit = old_expected_cid == receiver_.GetClassId();
String& name = String::Handle(zone_);
Array& descriptor = Array::Handle(zone_);
if (FLAG_use_bare_instructions && FLAG_dedup_instructions) {
const UnlinkedCall& unlinked_call = UnlinkedCall::Handle(
zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->pc()));
zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->pc(),
is_monomorphic_hit));
name = unlinked_call.target_name();
descriptor = unlinked_call.args_descriptor();
@ -1743,6 +1779,14 @@ void SwitchableCallHandler::DoMonomorphicMiss(const Object& data) {
ic_data.AddReceiverCheck(old_expected_cid, old_target);
}
if (is_monomorphic_hit) {
// The site just have been updated to monomorphic state with same
// exact class id - do nothing in that case: stub will call through ic data.
arguments_.SetArgAt(0, StubCode::ICCallThroughCode());
arguments_.SetReturn(ic_data);
return;
}
const Function& target_function = Function::Handle(
zone_, ResolveAndAddReceiverCheck(name, descriptor, ic_data));
@ -1758,8 +1802,8 @@ void SwitchableCallHandler::DoMonomorphicMiss(const Object& data) {
cache.set_lower_limit(lower);
cache.set_upper_limit(upper);
const Code& stub = StubCode::SingleTargetCall();
CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, cache,
stub);
CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
thread_, caller_frame_->pc(), caller_code_, cache, stub);
// Return the ICData. The miss stub will jump to continue in the IC call
// stub.
arguments_.SetArgAt(0, StubCode::ICCallThroughCode());
@ -1769,8 +1813,8 @@ void SwitchableCallHandler::DoMonomorphicMiss(const Object& data) {
// Patch to call through stub.
const Code& stub = StubCode::ICCallThroughCode();
CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data,
stub);
CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
thread_, caller_frame_->pc(), caller_code_, ic_data, stub);
// Return the ICData. The miss stub will jump to continue in the IC lookup
// stub.
@ -1786,8 +1830,8 @@ void SwitchableCallHandler::DoMonomorphicMiss(const Object& data) {
const Code& stub = ic_data.is_tracking_exactness()
? StubCode::OneArgCheckInlineCacheWithExactnessCheck()
: StubCode::OneArgCheckInlineCache();
CodePatcher::PatchInstanceCallAt(caller_frame_->pc(), caller_code_, ic_data,
stub);
CodePatcher::PatchInstanceCallAtWithMutatorsStopped(
thread_, caller_frame_->pc(), caller_code_, ic_data, stub);
if (FLAG_trace_ic) {
OS::PrintErr("Instance call at %" Px
" switching to polymorphic dispatch, %s\n",
@ -1846,8 +1890,8 @@ void SwitchableCallHandler::DoSingleTargetMiss(const SingleTargetCache& data) {
// Call site is not single target, switch to call using ICData.
const Code& stub = StubCode::ICCallThroughCode();
CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data,
stub);
CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
thread_, caller_frame_->pc(), caller_code_, ic_data, stub);
// Return the ICData. The single target stub will jump to continue in the
// IC call stub.
@ -1894,20 +1938,25 @@ void SwitchableCallHandler::DoICDataMiss(const ICData& ic_data) {
Code::Handle(zone_, target_function.EnsureHasCode());
const Smi& expected_cid =
Smi::Handle(zone_, Smi::New(receiver_.GetClassId()));
CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_,
expected_cid, target_code);
CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
thread_, caller_frame_->pc(), caller_code_, expected_cid, target_code);
arguments_.SetArgAt(0, target_code);
arguments_.SetReturn(expected_cid);
} else {
ic_data.AddReceiverCheck(receiver_.GetClassId(), target_function);
// IC entry might have been added while we waited to get into runtime.
GrowableArray<intptr_t> class_ids(1);
class_ids.Add(receiver_.GetClassId());
if (ic_data.FindCheck(class_ids) == -1) {
ic_data.AddReceiverCheck(receiver_.GetClassId(), target_function);
}
if (number_of_checks > FLAG_max_polymorphic_checks) {
// Switch to megamorphic call.
const MegamorphicCache& cache = MegamorphicCache::Handle(
zone_, MegamorphicCacheTable::Lookup(thread_, name, descriptor));
const Code& stub = StubCode::MegamorphicCall();
CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_,
cache, stub);
CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
thread_, caller_frame_->pc(), caller_code_, cache, stub);
arguments_.SetArgAt(0, stub);
arguments_.SetReturn(cache);
} else {
@ -1948,14 +1997,14 @@ void SwitchableCallHandler::DoMegamorphicMiss(const MegamorphicCache& data) {
}
void SwitchableCallHandler::HandleMiss(const Object& old_data,
const Code& old_target) {
const Code& old_code) {
switch (old_data.GetClassId()) {
case kUnlinkedCallCid:
ASSERT(old_target.raw() == StubCode::SwitchableCallMiss().raw());
ASSERT(old_code.raw() == StubCode::SwitchableCallMiss().raw());
DoUnlinkedCall(UnlinkedCall::Cast(old_data));
break;
case kMonomorphicSmiableCallCid:
ASSERT(old_target.raw() == StubCode::MonomorphicSmiableCheck().raw());
ASSERT(old_code.raw() == StubCode::MonomorphicSmiableCheck().raw());
FALL_THROUGH;
#if defined(DART_PRECOMPILED_RUNTIME)
case kSmiCid:
@ -1967,15 +2016,15 @@ void SwitchableCallHandler::HandleMiss(const Object& old_data,
DoMonomorphicMiss(old_data);
break;
case kSingleTargetCacheCid:
ASSERT(old_target.raw() == StubCode::SingleTargetCall().raw());
ASSERT(old_code.raw() == StubCode::SingleTargetCall().raw());
DoSingleTargetMiss(SingleTargetCache::Cast(old_data));
break;
case kICDataCid:
ASSERT(old_target.raw() == StubCode::ICCallThroughCode().raw());
ASSERT(old_code.raw() == StubCode::ICCallThroughCode().raw());
DoICDataMiss(ICData::Cast(old_data));
break;
case kMegamorphicCacheCid:
ASSERT(old_target.raw() == StubCode::MegamorphicCall().raw());
ASSERT(old_code.raw() == StubCode::MegamorphicCall().raw());
DoMegamorphicMiss(MegamorphicCache::Cast(old_data));
break;
default:
@ -2008,20 +2057,24 @@ DEFINE_RUNTIME_ENTRY(SwitchableCallMiss, 2) {
Object& old_data = Object::Handle(zone);
Code& old_code = Code::Handle(zone);
thread->isolate_group()->RunWithStoppedMutators(
[&]() {
#if defined(DART_PRECOMPILED_RUNTIME)
old_data =
CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code);
old_data = CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(),
caller_code);
#if defined(DEBUG)
old_code ^=
CodePatcher::GetSwitchableCallTargetAt(caller_frame->pc(), caller_code);
old_code ^= CodePatcher::GetSwitchableCallTargetAt(caller_frame->pc(),
caller_code);
#endif
#else
old_code ^= CodePatcher::GetInstanceCallAt(caller_frame->pc(), caller_code,
&old_data);
old_code ^= CodePatcher::GetInstanceCallAt(caller_frame->pc(),
caller_code, &old_data);
#endif
SwitchableCallHandler handler(thread, receiver, arguments, caller_frame,
caller_code, caller_function);
handler.HandleMiss(old_data, old_code);
SwitchableCallHandler handler(thread, receiver, arguments, caller_frame,
caller_code, caller_function);
handler.HandleMiss(old_data, old_code);
},
/*use_force_growth=*/true);
}
// Handles interpreted interface call cache miss.

View file

@ -4324,6 +4324,17 @@ static bool GetObjectStore(Thread* thread, JSONStream* js) {
return true;
}
static const MethodParameter* get_isolate_object_store_params[] = {
RUNNABLE_ISOLATE_PARAMETER,
NULL,
};
static bool GetIsolateObjectStore(Thread* thread, JSONStream* js) {
JSONObject jsobj(js);
thread->isolate()->isolate_object_store()->PrintToJSONObject(&jsobj);
return true;
}
static const MethodParameter* get_class_list_params[] = {
RUNNABLE_ISOLATE_PARAMETER, NULL,
};
@ -4820,6 +4831,8 @@ static const ServiceMethodDescriptor service_methods_[] = {
get_instances_params },
{ "getIsolate", GetIsolate,
get_isolate_params },
{ "_getIsolateObjectStore", GetIsolateObjectStore,
get_isolate_object_store_params },
{ "getIsolateGroup", GetIsolateGroup,
get_isolate_group_params },
{ "getMemoryUsage", GetMemoryUsage,

View file

@ -133,7 +133,7 @@ bool ServiceIsolate::IsServiceIsolate(const Isolate* isolate) {
return isolate != nullptr && isolate == isolate_;
}
bool ServiceIsolate::IsServiceIsolateDescendant(const Isolate* isolate) {
bool ServiceIsolate::IsServiceIsolateDescendant(Isolate* isolate) {
MonitorLocker ml(monitor_);
return isolate->origin_id() == origin_;
}

View file

@ -26,7 +26,7 @@ class ServiceIsolate : public AllStatic {
static bool Exists();
static bool IsRunning();
static bool IsServiceIsolate(const Isolate* isolate);
static bool IsServiceIsolateDescendant(const Isolate* isolate);
static bool IsServiceIsolateDescendant(Isolate* isolate);
static Dart_Port Port();
static void WaitForServiceIsolateStartup();
@ -97,9 +97,7 @@ class ServiceIsolate : public AllStatic {
static bool Exists() { return false; }
static bool IsRunning() { return false; }
static bool IsServiceIsolate(const Isolate* isolate) { return false; }
static bool IsServiceIsolateDescendant(const Isolate* isolate) {
return false;
}
static bool IsServiceIsolateDescendant(Isolate* isolate) { return false; }
static void Run() {}
static bool SendIsolateStartupMessage() { return false; }
static bool SendIsolateShutdownMessage() { return false; }

View file

@ -621,7 +621,8 @@ RawObject* SnapshotReader::ReadInstance(intptr_t object_id,
intptr_t result_cid = result->GetClassId();
const auto unboxed_fields =
isolate()->group()->class_table()->GetUnboxedFieldsMapAt(result_cid);
isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
result_cid);
while (offset < next_field_offset) {
if (unboxed_fields.Get(offset / kWordSize)) {
@ -1464,7 +1465,7 @@ void SnapshotWriter::WriteInstance(RawObject* raw,
WriteObjectImpl(cls, kAsInlinedObject);
const auto unboxed_fields =
isolate()->group()->class_table()->GetUnboxedFieldsMapAt(
isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
cls->ptr()->id_);
// Write out all the fields for the object.

View file

@ -581,12 +581,31 @@ RawString* Symbols::NewSymbol(Thread* thread, const StringType& str) {
table.Release();
}
if (symbol.IsNull()) {
IsolateGroup* group = thread->isolate_group();
Isolate* isolate = thread->isolate();
SafepointMutexLocker ml(isolate->symbols_mutex());
data = isolate->object_store()->symbol_table();
SymbolTable table(&key, &value, &data);
symbol ^= table.InsertNewOrGet(str);
isolate->object_store()->set_symbol_table(table.Release());
// in JIT object_store lives on isolate, not on isolate group.
ObjectStore* object_store = group->object_store() == nullptr
? isolate->object_store()
: group->object_store();
// in AOT no need to worry about background compiler, only about
// other mutators.
#if defined(DART_PRECOMPILED_RUNTIME)
group->RunWithStoppedMutators(
[&]() {
#else
SafepointRwLock* symbols_lock = group->object_store() == nullptr
? isolate->symbols_lock()
: group->symbols_lock();
SafepointWriteRwLocker sl(thread, symbols_lock);
#endif
data = object_store->symbol_table();
SymbolTable table(&key, &value, &data);
symbol ^= table.InsertNewOrGet(str);
object_store->set_symbol_table(table.Release());
#if defined(DART_PRECOMPILED_RUNTIME)
},
/*use_force_growth=*/true);
#endif
}
ASSERT(symbol.IsSymbol());
ASSERT(symbol.HasHash());
@ -610,9 +629,21 @@ RawString* Symbols::Lookup(Thread* thread, const StringType& str) {
table.Release();
}
if (symbol.IsNull()) {
IsolateGroup* group = thread->isolate_group();
Isolate* isolate = thread->isolate();
SafepointMutexLocker ml(isolate->symbols_mutex());
data = isolate->object_store()->symbol_table();
// in JIT object_store lives on isolate, not on isolate group.
ObjectStore* object_store = group->object_store() == nullptr
? isolate->object_store()
: group->object_store();
// in AOT no need to worry about background compiler, only about
// other mutators.
#if !defined(DART_PRECOMPILED_RUNTIME)
SafepointRwLock* symbols_lock = group->object_store() == nullptr
? isolate->symbols_lock()
: group->symbols_lock();
SafepointReadRwLocker sl(thread, symbols_lock);
#endif
data = object_store->symbol_table();
SymbolTable table(&key, &value, &data);
symbol ^= table.GetOrNull(str);
table.Release();

View file

@ -117,11 +117,12 @@ Dart_Isolate TestCase::CreateIsolate(const uint8_t* data_buffer,
Isolate::FlagsInitialize(&api_flags);
Dart_Isolate isolate = NULL;
if (len == 0) {
isolate =
Dart_CreateIsolateGroup(name, NULL, data_buffer, instr_buffer,
&api_flags, group_data, isolate_data, &err);
isolate = Dart_CreateIsolateGroup(
/*script_uri=*/name, /*name=*/name, data_buffer, instr_buffer,
&api_flags, group_data, isolate_data, &err);
} else {
isolate = Dart_CreateIsolateGroupFromKernel(name, NULL, data_buffer, len,
isolate = Dart_CreateIsolateGroupFromKernel(/*script_uri=*/name,
/*name=*/name, data_buffer, len,
&api_flags, group_data,
isolate_data, &err);
}
@ -157,6 +158,26 @@ void SetupCoreLibrariesForUnitTest() {
RELEASE_ASSERT(!Dart_IsError(result));
}
Dart_Isolate TestCase::CreateTestIsolateInGroup(const char* name,
Dart_Isolate parent,
void* group_data,
void* isolate_data) {
char* error;
#if defined(DART_PRECOMPILED_RUNTIME)
Isolate* result = CreateWithinExistingIsolateGroupAOT(
reinterpret_cast<Isolate*>(parent)->group(), name, &error);
#else
Isolate* result = CreateWithinExistingIsolateGroup(
reinterpret_cast<Isolate*>(parent)->group(), name, &error);
#endif
if (error != nullptr) {
OS::PrintErr("CreateTestIsolateInGroup failed: %s\n", error);
free(error);
}
EXPECT(result != nullptr);
return Api::CastIsolate(result);
}
struct TestLibEntry {
const char* url;
const char* source;

View file

@ -361,6 +361,11 @@ class TestCase : TestCaseBase {
static Dart_Isolate CreateTestIsolate(const char* name = nullptr,
void* isolate_group_data = nullptr,
void* isolate_data = nullptr);
static Dart_Isolate CreateTestIsolateInGroup(const char* name,
Dart_Isolate parent,
void* group_data = nullptr,
void* isolate_data = nullptr);
static Dart_Handle library_handler(Dart_LibraryTag tag,
Dart_Handle library,
Dart_Handle url);

View file

@ -11,6 +11,6 @@ namespace dart {
ObjectPointerVisitor::ObjectPointerVisitor(IsolateGroup* isolate_group)
: isolate_group_(isolate_group),
gc_root_type_("unknown"),
shared_class_table_(isolate_group->class_table()) {}
shared_class_table_(isolate_group->shared_class_table()) {}
} // namespace dart