[vm] Clean up ClassTable

* Merge ClassTable and SharedClassTable back together;
* Simplify handling of multiple arrays growing in sync;
* Refactor how reload deals with ClassTable.

The last change is the most important because it makes it
much easier to reason about the code. We move away from
copying bits and pieces of the class table and shared
class table into reload contexts.

Having two class table fields in the isolate group makes
it easier to reason about. One field contains program
class table (one modified by kernel loader and accessed
by various program structure cid lookups) and heap
walk class table (used by GC visitors). Normally these
two fields point to the same class table, but during
hot reload we temporary split them apart: original
class table is kept as a heap walk class table, while
program class table is replaced by a clone and updated
by reload.

If reload succeeds we drop original class table and
set program class table as heap walk one.

If reload fails we drop the program class table and
restore original one from heap walk table.

TEST=ci

Cq-Include-Trybots: luci.dart.try:vm-kernel-reload-linux-release-x64-try,vm-kernel-reload-linux-debug-x64-try,vm-kernel-reload-rollback-linux-debug-x64-try,vm-kernel-reload-rollback-linux-release-x64-try,vm-kernel-linux-debug-x64-try,vm-kernel-precomp-tsan-linux-release-x64-try,vm-kernel-tsan-linux-release-x64-try,vm-kernel-precomp-asan-linux-release-x64-try,vm-kernel-asan-linux-release-x64-try
Change-Id: I8b66259fcc474dea7dd2af063e4772df99be06c4
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/258361
Commit-Queue: Slava Egorov <vegorov@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
This commit is contained in:
Vyacheslav Egorov 2022-09-10 15:12:35 +00:00 committed by Commit Bot
parent b9052f85d6
commit 4a4eedd860
34 changed files with 880 additions and 1277 deletions

View file

@ -8,6 +8,14 @@
namespace dart {
void* calloc(size_t n, size_t size) {
void* result = ::calloc(n, size);
if (result == nullptr) {
OUT_OF_MEMORY();
}
return result;
}
void* malloc(size_t size) {
void* result = ::malloc(size);
if (result == nullptr) {

View file

@ -10,6 +10,10 @@
namespace dart {
void* calloc(size_t n, size_t size);
void* malloc(size_t size);
void* realloc(void* ptr, size_t size);
// Stack allocated objects subclass from this base class. Objects of this type
// cannot be allocated on either the C or object heaps. Destructors for objects
// of this type will not be run unless the stack is unwound through normal
@ -41,19 +45,11 @@ class MallocAllocated {
// check malloc/new/new[] are paired with free/delete/delete[] respectively.
#if !defined(USING_ADDRESS_SANITIZER)
void* operator new(size_t size) {
void* result = ::malloc(size);
if (result == nullptr) {
OUT_OF_MEMORY();
}
return result;
return dart::malloc(size);
}
void* operator new[](size_t size) {
void* result = ::malloc(size);
if (result == nullptr) {
OUT_OF_MEMORY();
}
return result;
return dart::malloc(size);
}
void operator delete(void* pointer) { ::free(pointer); }
@ -62,9 +58,6 @@ class MallocAllocated {
#endif
};
void* malloc(size_t size);
void* realloc(void* ptr, size_t size);
} // namespace dart
#endif // RUNTIME_PLATFORM_ALLOCATION_H_

View file

@ -218,7 +218,7 @@ static UnboxedFieldBitmap CalculateTargetUnboxedFieldsBitmap(
Serializer* s,
intptr_t class_id) {
const auto unboxed_fields_bitmap_host =
s->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(class_id);
UnboxedFieldBitmap unboxed_fields_bitmap;
if (unboxed_fields_bitmap_host.IsEmpty() ||
@ -438,7 +438,7 @@ class ClassDeserializationCluster : public DeserializationCluster {
ClassTable* table = d_->isolate_group()->class_table();
#if defined(DART_PRECOMPILED_RUNTIME)
auto shared_class_table = d_->isolate_group()->shared_class_table();
auto class_table = d_->isolate_group()->class_table();
#endif
for (intptr_t id = start_index_, n = stop_index_; id < n; id++) {
ClassPtr cls = static_cast<ClassPtr>(d.Ref(id));
@ -481,7 +481,7 @@ class ClassDeserializationCluster : public DeserializationCluster {
#if defined(DART_PRECOMPILED_RUNTIME)
if (!ClassTable::IsTopLevelCid(class_id)) {
const UnboxedFieldBitmap unboxed_fields_map(d.ReadUnsigned64());
shared_class_table->SetUnboxedFieldsMapAt(class_id, unboxed_fields_map);
class_table->SetUnboxedFieldsMapAt(class_id, unboxed_fields_map);
}
#endif
}
@ -3823,7 +3823,7 @@ class InstanceSerializationCluster : public SerializationCluster {
const intptr_t next_field_offset = host_next_field_offset_in_words_
<< kCompressedWordSizeLog2;
const auto unboxed_fields_bitmap =
s->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(cid_);
s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(cid_);
intptr_t offset = Instance::NextFieldOffset();
while (offset < next_field_offset) {
// Skips unboxed fields
@ -3861,7 +3861,7 @@ class InstanceSerializationCluster : public SerializationCluster {
const intptr_t count = objects_.length();
s->WriteUnsigned64(CalculateTargetUnboxedFieldsBitmap(s, cid_).Value());
const auto unboxed_fields_bitmap =
s->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(cid_);
s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(cid_);
for (intptr_t i = 0; i < count; i++) {
InstancePtr instance = objects_[i];

View file

@ -1551,12 +1551,10 @@ void ClassFinalizer::RemapClassIds(intptr_t* old_to_new_cid) {
// The [HeapIterationScope] also safepoints all threads.
HeapIterationScope his(T);
IG->shared_class_table()->Remap(old_to_new_cid);
IG->set_remapping_cids(true);
// Update the class table. Do it before rewriting cids in headers, as
// the heap walkers load an object's size *after* calling the visitor.
IG->class_table()->Remap(old_to_new_cid);
IG->set_remapping_cids(true);
// Rewrite cids in headers and cids in Classes, Fields, Types and
// TypeParameters.

View file

@ -20,461 +20,154 @@ namespace dart {
DEFINE_FLAG(bool, print_class_table, false, "Print initial class table.");
SharedClassTable::SharedClassTable()
: top_(kNumPredefinedCids),
capacity_(0),
old_tables_(new MallocGrowableArray<void*>()) {
ClassTable::ClassTable(ClassTableAllocator* allocator)
: allocator_(allocator),
classes_(allocator),
top_level_classes_(allocator) {
if (Dart::vm_isolate() == NULL) {
ASSERT(kInitialCapacity >= kNumPredefinedCids);
capacity_ = kInitialCapacity;
// Note that [calloc] will zero-initialize the memory.
table_.store(reinterpret_cast<RelaxedAtomic<intptr_t>*>(
calloc(capacity_, sizeof(RelaxedAtomic<intptr_t>))));
} else {
// Duplicate the class table from the VM isolate.
auto vm_shared_class_table = Dart::vm_isolate_group()->shared_class_table();
capacity_ = vm_shared_class_table->capacity_;
// Note that [calloc] will zero-initialize the memory.
RelaxedAtomic<intptr_t>* table = reinterpret_cast<RelaxedAtomic<intptr_t>*>(
calloc(capacity_, sizeof(RelaxedAtomic<intptr_t>)));
// The following cids don't have a corresponding class object in Dart code.
// We therefore need to initialize them eagerly.
COMPILE_ASSERT(kFirstInternalOnlyCid == kObjectCid + 1);
for (intptr_t i = kObjectCid; i <= kLastInternalOnlyCid; i++) {
table[i] = vm_shared_class_table->SizeAt(i);
}
table[kTypeArgumentsCid] = vm_shared_class_table->SizeAt(kTypeArgumentsCid);
table[kFreeListElement] = vm_shared_class_table->SizeAt(kFreeListElement);
table[kForwardingCorpse] = vm_shared_class_table->SizeAt(kForwardingCorpse);
table[kDynamicCid] = vm_shared_class_table->SizeAt(kDynamicCid);
table[kVoidCid] = vm_shared_class_table->SizeAt(kVoidCid);
table_.store(table);
}
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
// Note that [calloc] will zero-initialize the memory.
unboxed_fields_map_ = static_cast<UnboxedFieldBitmap*>(
calloc(capacity_, sizeof(UnboxedFieldBitmap)));
#endif // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
#ifndef PRODUCT
// Note that [calloc] will zero-initialize the memory.
trace_allocation_table_.store(
static_cast<uint8_t*>(calloc(capacity_, sizeof(uint8_t))));
#endif // !PRODUCT
}
SharedClassTable::~SharedClassTable() {
if (old_tables_ != NULL) {
FreeOldTables();
delete old_tables_;
}
free(table_.load());
free(unboxed_fields_map_);
NOT_IN_PRODUCT(free(trace_allocation_table_.load()));
}
void ClassTable::set_table(ClassPtr* table) {
// We don't have to stop mutators, since the old table is the prefix of the
// new table. But we should ensure that all writes to the current table are
// visible once the new table is visible.
table_.store(table);
IsolateGroup::Current()->set_cached_class_table_table(table);
}
ClassTable::ClassTable(SharedClassTable* shared_class_table)
: top_(kNumPredefinedCids),
capacity_(0),
tlc_top_(0),
tlc_capacity_(0),
table_(nullptr),
tlc_table_(nullptr),
old_class_tables_(new MallocGrowableArray<ClassPtr*>()),
shared_class_table_(shared_class_table) {
if (Dart::vm_isolate() == NULL) {
ASSERT(kInitialCapacity >= kNumPredefinedCids);
capacity_ = kInitialCapacity;
// Note that [calloc] will zero-initialize the memory.
// Don't use set_table because caller is supposed to set up isolates
// cached copy when constructing ClassTable. Isolate::Current might not
// be available at this point yet.
table_.store(static_cast<ClassPtr*>(calloc(capacity_, sizeof(ClassPtr))));
classes_.SetNumCidsAndCapacity(kNumPredefinedCids, kInitialCapacity);
} else {
// Duplicate the class table from the VM isolate.
ClassTable* vm_class_table = Dart::vm_isolate_group()->class_table();
capacity_ = vm_class_table->capacity_;
// Note that [calloc] will zero-initialize the memory.
ClassPtr* table =
static_cast<ClassPtr*>(calloc(capacity_, sizeof(ClassPtr)));
classes_.SetNumCidsAndCapacity(kNumPredefinedCids,
vm_class_table->classes_.capacity());
const auto copy_info_for_cid = [&](intptr_t cid) {
classes_.At<kClassIndex>(cid) = vm_class_table->At(cid);
classes_.At<kSizeIndex>(cid) = vm_class_table->SizeAt(cid);
};
// The following cids don't have a corresponding class object in Dart code.
// We therefore need to initialize them eagerly.
COMPILE_ASSERT(kFirstInternalOnlyCid == kObjectCid + 1);
for (intptr_t i = kObjectCid; i <= kLastInternalOnlyCid; i++) {
table[i] = vm_class_table->At(i);
copy_info_for_cid(i);
}
table[kTypeArgumentsCid] = vm_class_table->At(kTypeArgumentsCid);
table[kFreeListElement] = vm_class_table->At(kFreeListElement);
table[kForwardingCorpse] = vm_class_table->At(kForwardingCorpse);
table[kDynamicCid] = vm_class_table->At(kDynamicCid);
table[kVoidCid] = vm_class_table->At(kVoidCid);
// Don't use set_table because caller is supposed to set up isolates
// cached copy when constructing ClassTable. Isolate::Current might not
// be available at this point yet.
table_.store(table);
copy_info_for_cid(kTypeArgumentsCid);
copy_info_for_cid(kFreeListElement);
copy_info_for_cid(kForwardingCorpse);
copy_info_for_cid(kDynamicCid);
copy_info_for_cid(kVoidCid);
}
UpdateCachedAllocationTracingStateTablePointer();
}
ClassTable::~ClassTable() {
if (old_class_tables_ != nullptr) {
FreeOldTables();
delete old_class_tables_;
}
free(table_.load());
free(tlc_table_.load());
}
void ClassTable::AddOldTable(ClassPtr* old_class_table) {
ASSERT(Thread::Current()->IsMutatorThread());
old_class_tables_->Add(old_class_table);
}
void ClassTable::FreeOldTables() {
while (old_class_tables_->length() > 0) {
free(old_class_tables_->RemoveLast());
}
}
void SharedClassTable::AddOldTable(intptr_t* old_table) {
ASSERT(Thread::Current()->IsMutatorThread());
old_tables_->Add(old_table);
}
void SharedClassTable::FreeOldTables() {
while (old_tables_->length() > 0) {
free(old_tables_->RemoveLast());
}
}
void ClassTable::Register(const Class& cls) {
ASSERT(Thread::Current()->IsMutatorThread());
const classid_t cid = cls.id();
ASSERT(cls.id() == kIllegalCid || cls.id() < kNumPredefinedCids);
bool did_grow = false;
const classid_t cid =
cls.id() != kIllegalCid ? cls.id() : classes_.AddRow(&did_grow);
ASSERT(!IsTopLevelCid(cid));
// During the transition period we would like [SharedClassTable] to operate in
// parallel to [ClassTable].
const intptr_t instance_size =
cls.is_abstract() ? 0 : Class::host_instance_size(cls.ptr());
const intptr_t expected_cid =
shared_class_table_->Register(cid, instance_size);
cls.set_id(cid);
classes_.At<kClassIndex>(cid) = cls.ptr();
classes_.At<kSizeIndex>(cid) = static_cast<int32_t>(instance_size);
if (cid != kIllegalCid) {
ASSERT(cid > 0 && cid < kNumPredefinedCids && cid < top_);
ASSERT(table_.load()[cid] == nullptr);
table_.load()[cid] = cls.ptr();
if (did_grow) {
IsolateGroup::Current()->set_cached_class_table_table(
classes_.GetColumn<kClassIndex>());
UpdateCachedAllocationTracingStateTablePointer();
} else {
if (top_ == capacity_) {
const intptr_t new_capacity = capacity_ + kCapacityIncrement;
Grow(new_capacity);
}
ASSERT(top_ < capacity_);
cls.set_id(top_);
table_.load()[top_] = cls.ptr();
top_++; // Increment next index.
std::atomic_thread_fence(std::memory_order_release);
}
ASSERT(expected_cid == cls.id());
}
void ClassTable::RegisterTopLevel(const Class& cls) {
if (top_ >= std::numeric_limits<classid_t>::max()) {
if (top_level_classes_.num_cids() >= std::numeric_limits<classid_t>::max()) {
FATAL1("Fatal error in ClassTable::RegisterTopLevel: invalid index %" Pd
"\n",
top_);
top_level_classes_.num_cids());
}
ASSERT(Thread::Current()->IsMutatorThread());
ASSERT(cls.id() == kIllegalCid);
const intptr_t index = cls.id();
ASSERT(index == kIllegalCid);
if (tlc_top_ == tlc_capacity_) {
const intptr_t new_capacity = tlc_capacity_ + kCapacityIncrement;
GrowTopLevel(new_capacity);
}
ASSERT(tlc_top_ < tlc_capacity_);
cls.set_id(ClassTable::CidFromTopLevelIndex(tlc_top_));
tlc_table_.load()[tlc_top_] = cls.ptr();
tlc_top_++; // Increment next index.
}
intptr_t SharedClassTable::Register(intptr_t index, intptr_t size) {
if (!Class::is_valid_id(top_)) {
FATAL1("Fatal error in SharedClassTable::Register: invalid index %" Pd "\n",
top_);
}
ASSERT(Thread::Current()->IsMutatorThread());
if (index != kIllegalCid) {
// We are registring the size of a predefined class.
ASSERT(index > 0 && index < kNumPredefinedCids);
SetSizeAt(index, size);
return index;
} else {
ASSERT(size == 0);
if (top_ == capacity_) {
const intptr_t new_capacity = capacity_ + kCapacityIncrement;
Grow(new_capacity);
}
ASSERT(top_ < capacity_);
table_.load()[top_] = size;
return top_++; // Increment next index.
}
bool did_grow = false;
const intptr_t index = top_level_classes_.AddRow(&did_grow);
cls.set_id(ClassTable::CidFromTopLevelIndex(index));
top_level_classes_.At<kClassIndex>(index) = cls.ptr();
}
void ClassTable::AllocateIndex(intptr_t index) {
bool did_grow = false;
if (IsTopLevelCid(index)) {
AllocateTopLevelIndex(index);
top_level_classes_.AllocateIndex(IndexFromTopLevelCid(index), &did_grow);
return;
}
// This is called by a snapshot reader.
shared_class_table_->AllocateIndex(index);
ASSERT(Class::is_valid_id(index));
if (index >= capacity_) {
const intptr_t new_capacity = index + kCapacityIncrement;
Grow(new_capacity);
classes_.AllocateIndex(index, &did_grow);
if (did_grow) {
IsolateGroup::Current()->set_cached_class_table_table(table());
UpdateCachedAllocationTracingStateTablePointer();
}
ASSERT(table_.load()[index] == nullptr);
if (index >= top_) {
top_ = index + 1;
}
ASSERT(top_ == shared_class_table_->top_);
ASSERT(capacity_ == shared_class_table_->capacity_);
}
void ClassTable::AllocateTopLevelIndex(intptr_t cid) {
ASSERT(IsTopLevelCid(cid));
const intptr_t tlc_index = IndexFromTopLevelCid(cid);
if (tlc_index >= tlc_capacity_) {
const intptr_t new_capacity = tlc_index + kCapacityIncrement;
GrowTopLevel(new_capacity);
}
ASSERT(tlc_table_.load()[tlc_index] == nullptr);
if (tlc_index >= tlc_top_) {
tlc_top_ = tlc_index + 1;
}
}
void ClassTable::Grow(intptr_t new_capacity) {
ASSERT(new_capacity > capacity_);
auto old_table = table_.load();
auto new_table = static_cast<ClassPtr*>(
malloc(new_capacity * sizeof(ClassPtr))); // NOLINT
intptr_t i;
for (i = 0; i < capacity_; i++) {
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_table[i] = old_table[i];
}
for (; i < new_capacity; i++) {
// Don't use memset, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_table[i] = 0;
}
old_class_tables_->Add(old_table);
set_table(new_table);
capacity_ = new_capacity;
}
void ClassTable::GrowTopLevel(intptr_t new_capacity) {
ASSERT(new_capacity > tlc_capacity_);
auto old_table = tlc_table_.load();
auto new_table = static_cast<ClassPtr*>(
malloc(new_capacity * sizeof(ClassPtr))); // NOLINT
intptr_t i;
for (i = 0; i < tlc_capacity_; i++) {
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_table[i] = old_table[i];
}
for (; i < new_capacity; i++) {
// Don't use memset, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_table[i] = 0;
}
old_class_tables_->Add(old_table);
tlc_table_.store(new_table);
tlc_capacity_ = new_capacity;
}
void SharedClassTable::AllocateIndex(intptr_t index) {
// This is called by a snapshot reader.
ASSERT(Class::is_valid_id(index));
if (index >= capacity_) {
const intptr_t new_capacity = index + kCapacityIncrement;
Grow(new_capacity);
}
ASSERT(table_.load()[index] == 0);
if (index >= top_) {
top_ = index + 1;
}
}
void SharedClassTable::Grow(intptr_t new_capacity) {
ASSERT(new_capacity >= capacity_);
RelaxedAtomic<intptr_t>* old_table = table_.load();
RelaxedAtomic<intptr_t>* new_table =
reinterpret_cast<RelaxedAtomic<intptr_t>*>(
malloc(new_capacity * sizeof(RelaxedAtomic<intptr_t>))); // NOLINT
intptr_t i;
for (i = 0; i < capacity_; i++) {
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_table[i] = old_table[i];
}
for (; i < new_capacity; i++) {
// Don't use memset, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_table[i] = 0;
}
#if !defined(PRODUCT)
auto old_trace_table = trace_allocation_table_.load();
auto new_trace_table =
static_cast<uint8_t*>(malloc(new_capacity * sizeof(uint8_t))); // NOLINT
for (i = 0; i < capacity_; i++) {
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_trace_table[i] = old_trace_table[i];
}
for (; i < new_capacity; i++) {
// Don't use memset, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_trace_table[i] = 0;
}
#endif
old_tables_->Add(old_table);
table_.store(new_table);
NOT_IN_PRODUCT(old_tables_->Add(old_trace_table));
NOT_IN_PRODUCT(trace_allocation_table_.store(new_trace_table));
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
auto old_unboxed_fields_map = unboxed_fields_map_;
auto new_unboxed_fields_map = static_cast<UnboxedFieldBitmap*>(
malloc(new_capacity * sizeof(UnboxedFieldBitmap)));
for (i = 0; i < capacity_; i++) {
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_unboxed_fields_map[i] = old_unboxed_fields_map[i];
}
for (; i < new_capacity; i++) {
// Don't use memset, which changes this from a relaxed atomic operation
// to a non-atomic operation.
new_unboxed_fields_map[i] = UnboxedFieldBitmap(0);
}
old_tables_->Add(old_unboxed_fields_map);
unboxed_fields_map_ = new_unboxed_fields_map;
#endif // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
capacity_ = new_capacity;
}
void ClassTable::Unregister(intptr_t cid) {
ASSERT(!IsTopLevelCid(cid));
shared_class_table_->Unregister(cid);
table_.load()[cid] = nullptr;
}
void ClassTable::UnregisterTopLevel(intptr_t cid) {
ASSERT(IsTopLevelCid(cid));
const intptr_t tlc_index = IndexFromTopLevelCid(cid);
tlc_table_.load()[tlc_index] = nullptr;
}
void SharedClassTable::Unregister(intptr_t index) {
table_.load()[index] = 0;
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
unboxed_fields_map_[index].Reset();
#endif // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
top_level_classes_.At<kClassIndex>(tlc_index) = nullptr;
}
void ClassTable::Remap(intptr_t* old_to_new_cid) {
ASSERT(Thread::Current()->IsAtSafepoint(SafepointLevel::kGCAndDeopt));
const intptr_t num_cids = NumCids();
std::unique_ptr<ClassPtr[]> cls_by_old_cid(new ClassPtr[num_cids]);
auto* table = table_.load();
memmove(cls_by_old_cid.get(), table, sizeof(ClassPtr) * num_cids);
for (intptr_t i = 0; i < num_cids; i++) {
table[old_to_new_cid[i]] = cls_by_old_cid[i];
}
}
void SharedClassTable::Remap(intptr_t* old_to_new_cid) {
ASSERT(Thread::Current()->IsAtSafepoint(SafepointLevel::kGCAndDeopt));
const intptr_t num_cids = NumCids();
std::unique_ptr<intptr_t[]> size_by_old_cid(new intptr_t[num_cids]);
auto* table = table_.load();
for (intptr_t i = 0; i < num_cids; i++) {
size_by_old_cid[i] = table[i];
}
for (intptr_t i = 0; i < num_cids; i++) {
table[old_to_new_cid[i]] = size_by_old_cid[i];
}
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
std::unique_ptr<UnboxedFieldBitmap[]> unboxed_fields_by_old_cid(
new UnboxedFieldBitmap[num_cids]);
for (intptr_t i = 0; i < num_cids; i++) {
unboxed_fields_by_old_cid[i] = unboxed_fields_map_[i];
}
for (intptr_t i = 0; i < num_cids; i++) {
unboxed_fields_map_[old_to_new_cid[i]] = unboxed_fields_by_old_cid[i];
}
#endif // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
classes_.Remap(old_to_new_cid);
}
void ClassTable::VisitObjectPointers(ObjectPointerVisitor* visitor) {
ASSERT(visitor != NULL);
visitor->set_gc_root_type("class table");
if (top_ != 0) {
auto* table = table_.load();
const auto visit = [&](ClassPtr* table, intptr_t num_cids) {
if (num_cids == 0) {
return;
}
ObjectPtr* from = reinterpret_cast<ObjectPtr*>(&table[0]);
ObjectPtr* to = reinterpret_cast<ObjectPtr*>(&table[top_ - 1]);
ObjectPtr* to = reinterpret_cast<ObjectPtr*>(&table[num_cids - 1]);
visitor->VisitPointers(from, to);
}
if (tlc_top_ != 0) {
auto* tlc_table = tlc_table_.load();
ObjectPtr* from = reinterpret_cast<ObjectPtr*>(&tlc_table[0]);
ObjectPtr* to = reinterpret_cast<ObjectPtr*>(&tlc_table[tlc_top_ - 1]);
visitor->VisitPointers(from, to);
}
};
visit(classes_.GetColumn<kClassIndex>(), classes_.num_cids());
visit(top_level_classes_.GetColumn<kClassIndex>(),
top_level_classes_.num_cids());
visitor->clear_gc_root_type();
}
void ClassTable::CopySizesFromClassObjects() {
ASSERT(kIllegalCid == 0);
for (intptr_t i = 1; i < top_; i++) {
SetAt(i, At(i));
for (intptr_t i = 1; i < classes_.num_cids(); i++) {
UpdateClassSize(i, classes_.At<kClassIndex>(i));
}
}
void ClassTable::SetAt(intptr_t cid, ClassPtr raw_cls) {
if (IsTopLevelCid(cid)) {
top_level_classes_.At<kClassIndex>(IndexFromTopLevelCid(cid)) = raw_cls;
return;
}
// This is called by snapshot reader and class finalizer.
UpdateClassSize(cid, raw_cls);
classes_.At<kClassIndex>(cid) = raw_cls;
}
void ClassTable::UpdateClassSize(intptr_t cid, ClassPtr raw_cls) {
ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
ASSERT(!IsTopLevelCid(cid)); // "top-level" classes don't get instantiated
const intptr_t size =
raw_cls == nullptr ? 0 : Class::host_instance_size(raw_cls);
classes_.At<kSizeIndex>(cid) = static_cast<int32_t>(size);
}
void ClassTable::Validate() {
Class& cls = Class::Handle();
for (intptr_t cid = kNumPredefinedCids; cid < top_; cid++) {
for (intptr_t cid = kNumPredefinedCids; cid < classes_.num_cids(); cid++) {
// Some of the class table entries maybe NULL as we create some
// top level classes but do not add them to the list of anonymous
// classes in a library if there are no top level fields or functions.
@ -501,7 +194,7 @@ void ClassTable::Print() {
Class& cls = Class::Handle();
String& name = String::Handle();
for (intptr_t i = 1; i < top_; i++) {
for (intptr_t i = 1; i < classes_.num_cids(); i++) {
if (!HasValidClassAt(i)) {
continue;
}
@ -513,27 +206,6 @@ void ClassTable::Print() {
}
}
void ClassTable::SetAt(intptr_t cid, ClassPtr raw_cls) {
if (IsTopLevelCid(cid)) {
tlc_table_.load()[IndexFromTopLevelCid(cid)] = raw_cls;
return;
}
// This is called by snapshot reader and class finalizer.
ASSERT(cid < capacity_);
UpdateClassSize(cid, raw_cls);
table_.load()[cid] = raw_cls;
}
void ClassTable::UpdateClassSize(intptr_t cid, ClassPtr raw_cls) {
ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
ASSERT(!IsTopLevelCid(cid)); // "top-level" classes don't get instantiated
ASSERT(cid < capacity_);
const intptr_t size =
raw_cls == nullptr ? 0 : Class::host_instance_size(raw_cls);
shared_class_table_->SetSizeAt(cid, size);
}
#if defined(DART_PRECOMPILER)
void ClassTable::PrintObjectLayout(const char* filename) {
Class& cls = Class::Handle();
@ -542,7 +214,7 @@ void ClassTable::PrintObjectLayout(const char* filename) {
JSONWriter js;
js.OpenArray();
for (intptr_t i = ClassId::kObjectCid; i < top_; i++) {
for (intptr_t i = ClassId::kObjectCid; i < classes_.num_cids(); i++) {
if (!HasValidClassAt(i)) {
continue;
}
@ -604,7 +276,7 @@ void ClassTable::PrintToJSONObject(JSONObject* object) {
object->AddProperty("type", "ClassList");
{
JSONArray members(object, "classes");
for (intptr_t i = ClassId::kObjectCid; i < top_; i++) {
for (intptr_t i = ClassId::kObjectCid; i < classes_.num_cids(); i++) {
if (HasValidClassAt(i)) {
cls = At(i);
members.AddValue(cls);
@ -613,11 +285,6 @@ void ClassTable::PrintToJSONObject(JSONObject* object) {
}
}
intptr_t SharedClassTable::ClassOffsetFor(intptr_t cid) {
return cid * sizeof(uint8_t); // NOLINT
}
void ClassTable::AllocationProfilePrintJSON(JSONStream* stream, bool internal) {
Isolate* isolate = Isolate::Current();
ASSERT(isolate != NULL);
@ -659,7 +326,7 @@ void ClassTable::AllocationProfilePrintJSON(JSONStream* stream, bool internal) {
{
JSONArray arr(&obj, "members");
Class& cls = Class::Handle();
for (intptr_t i = 3; i < top_; i++) {
for (intptr_t i = 3; i < classes_.num_cids(); i++) {
if (!HasValidClassAt(i)) continue;
cls = At(i);
@ -694,4 +361,36 @@ void ClassTable::AllocationProfilePrintJSON(JSONStream* stream, bool internal) {
}
#endif // !PRODUCT
ClassTableAllocator::ClassTableAllocator()
: pending_freed_(new MallocGrowableArray<std::pair<void*, Deleter>>()) {}
ClassTableAllocator::~ClassTableAllocator() {
FreePending();
delete pending_freed_;
}
void ClassTableAllocator::Free(ClassTable* ptr) {
if (ptr != nullptr) {
pending_freed_->Add(std::make_pair(
ptr, [](void* ptr) { delete static_cast<ClassTable*>(ptr); }));
}
}
void ClassTableAllocator::Free(void* ptr) {
if (ptr != nullptr) {
pending_freed_->Add(std::make_pair(ptr, nullptr));
}
}
void ClassTableAllocator::FreePending() {
while (!pending_freed_->is_empty()) {
auto [ptr, deleter] = pending_freed_->RemoveLast();
if (deleter == nullptr) {
free(ptr);
} else {
deleter(ptr);
}
}
}
} // namespace dart

View file

@ -6,6 +6,8 @@
#define RUNTIME_VM_CLASS_TABLE_H_
#include <memory>
#include <tuple>
#include <utility>
#include "platform/allocation.h"
#include "platform/assert.h"
@ -24,8 +26,6 @@ class Class;
class ClassTable;
class Isolate;
class IsolateGroup;
class IsolateGroupReloadContext;
class ProgramReloadContext;
class JSONArray;
class JSONObject;
class JSONStream;
@ -33,8 +33,13 @@ template <typename T>
class MallocGrowableArray;
class ObjectPointerVisitor;
// Wraps a 64-bit integer to represent the bitmap of unboxed fields
// stored in the shared class table.
// A 64-bit bitmap describing unboxed fields in a class.
//
// There is a bit for each word in an instance of the class.
//
// Words corresponding to set bits must be ignored by the GC because they
// don't contain pointers. All words beyound the first 64 words of an object
// are expected to contain pointers.
class UnboxedFieldBitmap {
public:
UnboxedFieldBitmap() : bitmap_(0) {}
@ -62,295 +67,289 @@ class UnboxedFieldBitmap {
uint64_t bitmap_;
};
// Registry of all known classes and their sizes.
// Allocator used to manage memory for ClassTable arrays and ClassTable
// objects themselves.
//
// The GC will only need the information in this shared class table to scan
// object pointers.
class SharedClassTable {
// This allocator provides delayed free functionality: normally class tables
// can't be freed unless all mutator and helper threads are stopped because
// some of these threads might be holding a pointer to a table which we
// want to free. Instead of stopping the world whenever we need to free
// a table (e.g. freeing old table after growing) we delay freeing until an
// occasional GC which will need to stop the world anyway.
class ClassTableAllocator : public ValueObject {
public:
SharedClassTable();
~SharedClassTable();
ClassTableAllocator();
~ClassTableAllocator();
// Thread-safe.
intptr_t SizeAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
return table_.load()[index];
}
bool HasValidClassAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
ASSERT(table_.load()[index] >= 0);
return table_.load()[index] != 0;
}
void SetSizeAt(intptr_t index, intptr_t size) {
ASSERT(IsValidIndex(index));
// Ensure we never change size for a given cid from one non-zero size to
// another non-zero size.
intptr_t old_size = 0;
if (!table_.load()[index].compare_exchange_strong(old_size, size)) {
RELEASE_ASSERT(old_size == size);
}
}
bool IsValidIndex(intptr_t index) const { return index > 0 && index < top_; }
intptr_t NumCids() const { return top_; }
intptr_t Capacity() const { return capacity_; }
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
return FLAG_precompiled_mode ? unboxed_fields_map_[index]
: UnboxedFieldBitmap();
}
void SetUnboxedFieldsMapAt(intptr_t index,
UnboxedFieldBitmap unboxed_fields_map) {
ASSERT(IsValidIndex(index));
ASSERT(unboxed_fields_map_[index].IsEmpty());
unboxed_fields_map_[index] = unboxed_fields_map;
}
// Used to drop recently added classes.
void SetNumCids(intptr_t num_cids) {
ASSERT(num_cids <= top_);
top_ = num_cids;
}
#if !defined(PRODUCT)
void SetTraceAllocationFor(intptr_t cid, bool trace) {
ASSERT(cid > 0);
ASSERT(cid < top_);
trace_allocation_table_.load()[cid] = trace ? 1 : 0;
}
bool TraceAllocationFor(intptr_t cid);
void SetCollectInstancesFor(intptr_t cid, bool trace) {
ASSERT(cid > 0);
ASSERT(cid < top_);
if (trace) {
trace_allocation_table_.load()[cid] |= 2;
} else {
trace_allocation_table_.load()[cid] &= ~2;
}
}
bool CollectInstancesFor(intptr_t cid) {
ASSERT(cid > 0);
ASSERT(cid < top_);
return (trace_allocation_table_.load()[cid] & 2) != 0;
}
#endif // !defined(PRODUCT)
void CopyBeforeHotReload(intptr_t** copy, intptr_t* copy_num_cids) {
// The [IsolateGroupReloadContext] will need to maintain a copy of the old
// class table until instances have been morphed.
const intptr_t num_cids = NumCids();
const intptr_t bytes = sizeof(intptr_t) * num_cids;
auto size_table = static_cast<intptr_t*>(malloc(bytes));
auto table = table_.load();
for (intptr_t i = 0; i < num_cids; i++) {
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
size_table[i] = table[i];
}
*copy_num_cids = num_cids;
*copy = size_table;
}
void ResetBeforeHotReload() {
// The [ProgramReloadContext] is now source-of-truth for GC.
auto table = table_.load();
for (intptr_t i = 0; i < top_; i++) {
// Don't use memset, which changes this from a relaxed atomic operation
// to a non-atomic operation.
table[i] = 0;
}
}
void ResetAfterHotReload(intptr_t* old_table,
intptr_t num_old_cids,
bool is_rollback) {
// The [ProgramReloadContext] is no longer source-of-truth for GC after we
// return, so we restore size information for all classes.
if (is_rollback) {
SetNumCids(num_old_cids);
auto table = table_.load();
for (intptr_t i = 0; i < num_old_cids; i++) {
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
table[i] = old_table[i];
}
}
// Can't free this table immediately as another thread (e.g., concurrent
// marker or sweeper) may be between loading the table pointer and loading
// the table element. The table will be freed at the next major GC or
// isolate shutdown.
AddOldTable(old_table);
}
// Deallocates table copies. Do not call during concurrent access to table.
void FreeOldTables();
// Deallocates bitmap copies. Do not call during concurrent access to table.
void FreeOldUnboxedFieldsMaps();
#if !defined(DART_PRECOMPILED_RUNTIME)
bool IsReloading() const { return reload_context_ != nullptr; }
IsolateGroupReloadContext* reload_context() { return reload_context_; }
#endif // !defined(DART_PRECOMPILED_RUNTIME)
// Returns the newly allocated cid.
// Allocate an array of T with |len| elements.
//
// [index] is kIllegalCid or a predefined cid.
intptr_t Register(intptr_t index, intptr_t size);
void AllocateIndex(intptr_t index);
void Unregister(intptr_t index);
void Remap(intptr_t* old_to_new_cids);
// Used by the generated code.
#ifndef PRODUCT
static intptr_t class_heap_stats_table_offset() {
return OFFSET_OF(SharedClassTable, trace_allocation_table_);
// Does *not* initialize the memory.
template <class T>
inline T* Alloc(intptr_t len) {
return reinterpret_cast<T*>(dart::malloc(len * sizeof(T)));
}
#endif
// Used by the generated code.
static intptr_t ClassOffsetFor(intptr_t cid);
// Allocate a zero initialized array of T with |len| elements.
template <class T>
inline T* AllocZeroInitialized(intptr_t len) {
return reinterpret_cast<T*>(dart::calloc(len, sizeof(T)));
}
static const int kInitialCapacity = 512;
static const int kCapacityIncrement = 256;
// Clone the given |array| with |size| elements.
template <class T>
inline T* Clone(T* array, intptr_t size) {
auto result = Alloc<T>(size);
memmove(result, array, size * sizeof(T));
return result;
}
// Copy |size| elements from the given |array| into a new
// array with space for |new_size| elements. Then |Free|
// the original |array|.
//
// |new_size| is expected to be larger than |size|.
template <class T>
inline T* Realloc(T* array, intptr_t size, intptr_t new_size) {
ASSERT(size < new_size);
auto result = AllocZeroInitialized<T>(new_size);
memmove(result, array, size * sizeof(T));
Free(array);
return result;
}
// Schedule deletion of the given ClassTable.
void Free(ClassTable* table);
// Schedule freeing of the given pointer.
void Free(void* ptr);
// Free all objects which were scheduled by |Free|. Expected to only be
// called on |IsolateGroup| shutdown or when the world is stopped and no
// thread can be using a stale class table pointer.
void FreePending();
private:
typedef void (*Deleter)(void*);
MallocGrowableArray<std::pair<void*, Deleter>>* pending_freed_;
};
// A table with the given |Columns| indexed by class id.
//
// Each column is a continous array of a the given type. All columns have
// the same number of used elements (|num_cids()|) and the same capacity.
template <typename... Columns>
class CidIndexedTable {
public:
explicit CidIndexedTable(ClassTableAllocator* allocator)
: allocator_(allocator) {}
~CidIndexedTable() {
std::apply([&](auto&... column) { (allocator_->Free(column.load()), ...); },
columns_);
}
CidIndexedTable(const CidIndexedTable& other) = delete;
void SetNumCidsAndCapacity(intptr_t new_num_cids, intptr_t new_capacity) {
columns_ = std::apply(
[&](auto&... column) {
return std::make_tuple(
allocator_->Realloc(column.load(), num_cids_, new_capacity)...);
},
columns_);
capacity_ = new_capacity;
num_cids_ = new_num_cids;
}
void AllocateIndex(intptr_t index, bool* did_grow) {
*did_grow = EnsureCapacity(index);
num_cids_ = Utils::Maximum(num_cids_, index + 1);
}
intptr_t AddRow(bool* did_grow) {
*did_grow = EnsureCapacity(num_cids_);
return num_cids_++;
}
void ShrinkTo(intptr_t new_num_cids) {
ASSERT(new_num_cids <= num_cids_);
num_cids_ = new_num_cids;
}
bool IsValidIndex(intptr_t index) const {
return 0 <= index && index < num_cids_;
}
void CopyFrom(const CidIndexedTable& other) {
ASSERT(allocator_ == other.allocator_);
std::apply([&](auto&... column) { (allocator_->Free(column.load()), ...); },
columns_);
columns_ = std::apply(
[&](auto&... column) {
return std::make_tuple(
allocator_->Clone(column.load(), other.num_cids_)...);
},
other.columns_);
capacity_ = num_cids_ = other.num_cids_;
}
void Remap(intptr_t* old_to_new_cid) {
CidIndexedTable clone(allocator_);
clone.CopyFrom(*this);
RemapAllColumns(clone, old_to_new_cid,
std::index_sequence_for<Columns...>{});
}
template <
intptr_t kColumnIndex,
typename T = std::tuple_element_t<kColumnIndex, std::tuple<Columns...>>>
T* GetColumn() {
return std::get<kColumnIndex>(columns_).load();
}
template <
intptr_t kColumnIndex,
typename T = std::tuple_element_t<kColumnIndex, std::tuple<Columns...>>>
const T* GetColumn() const {
return std::get<kColumnIndex>(columns_).load();
}
template <
intptr_t kColumnIndex,
typename T = std::tuple_element_t<kColumnIndex, std::tuple<Columns...>>>
T& At(intptr_t index) {
ASSERT(IsValidIndex(index));
return GetColumn<kColumnIndex>()[index];
}
template <
intptr_t kColumnIndex,
typename T = std::tuple_element_t<kColumnIndex, std::tuple<Columns...>>>
const T& At(intptr_t index) const {
ASSERT(IsValidIndex(index));
return GetColumn<kColumnIndex>()[index];
}
intptr_t num_cids() const { return num_cids_; }
intptr_t capacity() const { return capacity_; }
private:
friend class ClassTable;
friend class GCMarker;
friend class MarkingWeakVisitor;
friend class Scavenger;
friend class ScavengerWeakVisitor;
#ifndef PRODUCT
// Copy-on-write is used for trace_allocation_table_, with old copies stored
// in old_tables_.
AcqRelAtomic<uint8_t*> trace_allocation_table_ = {nullptr};
#endif // !PRODUCT
// Wrapper around AcqRelAtomic<T*> which makes it assignable and copyable
// so that we could put it inside an std::tuple.
template <typename T>
struct Ptr {
Ptr() : ptr(nullptr) {}
Ptr(T* ptr) : ptr(ptr) {} // NOLINT
void AddOldTable(intptr_t* old_table);
Ptr(const Ptr& other) { ptr.store(other.ptr.load()); }
void Grow(intptr_t new_capacity);
Ptr& operator=(const Ptr& other) {
ptr.store(other.load());
return *this;
}
intptr_t top_;
intptr_t capacity_;
T* load() const { return ptr.load(); }
// Copy-on-write is used for table_, with old copies stored in old_tables_.
// Maps the cid to the instance size.
AcqRelAtomic<RelaxedAtomic<intptr_t>*> table_ = {nullptr};
MallocGrowableArray<void*>* old_tables_;
AcqRelAtomic<T*> ptr = {nullptr};
};
IsolateGroupReloadContext* reload_context_ = nullptr;
bool EnsureCapacity(intptr_t index) {
if (index >= capacity_) {
SetNumCidsAndCapacity(num_cids_, index + kCapacityIncrement);
return true;
}
return false;
}
// Stores a 64-bit bitmap for each class. There is one bit for each word in an
// instance of the class. A 0 bit indicates that the word contains a pointer
// the GC has to scan, a 1 indicates that the word is part of e.g. an unboxed
// double and does not need to be scanned. (see Class::Calculate...() where
// the bitmap is constructed)
UnboxedFieldBitmap* unboxed_fields_map_ = nullptr;
template <intptr_t kColumnIndex>
void RemapColumn(const CidIndexedTable& old, intptr_t* old_to_new_cid) {
auto new_column = GetColumn<kColumnIndex>();
auto old_column = old.GetColumn<kColumnIndex>();
for (intptr_t i = 0; i < num_cids_; i++) {
new_column[old_to_new_cid[i]] = old_column[i];
}
}
DISALLOW_COPY_AND_ASSIGN(SharedClassTable);
template <std::size_t... Is>
void RemapAllColumns(const CidIndexedTable& old,
intptr_t* old_to_new_cid,
std::index_sequence<Is...>) {
(RemapColumn<Is>(old, old_to_new_cid), ...);
}
static constexpr intptr_t kCapacityIncrement = 256;
ClassTableAllocator* allocator_;
intptr_t num_cids_ = 0;
intptr_t capacity_ = 0;
std::tuple<Ptr<Columns>...> columns_;
};
class ClassTable {
// Registry of all known classes.
//
// The GC will only use information about instance size and unboxed field maps
// to scan instances and will not access class objects themselves. This
// information is stored in separate columns of the |classes_| table.
//
// # Concurrency & atomicity
//
// This table is read concurrently without locking (e.g. by GC threads) so
// there are some invariants that need to be observed when working with it.
//
// * When table is updated (e.g. when the table is grown or a new class is
// registered in a table) there must be a release barrier after the update.
// Such barrier will ensure that stores which populate the table are not
// reordered past the store which exposes the new grown table or exposes
// a new class id;
// * Old versions of the table can only be freed when the world is stopped:
// no mutator and no helper threads are running. To avoid freeing a table
// which some other thread is reading from.
//
// Note that torn reads are not a concern (e.g. it is fine to use
// memmove to copy class table contents) as long as an appropriate
// barrier is issued before the copy of the table can be observed.
//
// # Hot reload
//
// Each IsolateGroup contains two ClassTable fields: |class_table| and
// |heap_walk_class_table|. GC visitors use the second field to get ClassTable
// instance which they will use for visiting pointers inside instances in
// the heap. Usually these two fields will be pointing to the same table,
// except when IsolateGroup is in the middle of reload.
//
// When reloading |class_table| will be pointing to a copy of the original
// table. Kernel loading will be modifying this table, while GC
// workers can continue using original table still available through
// |heap_walk_class_table|. If hot reload succeeds, |heap_walk_class_table|
// will be dropped and |class_table| will become the source of truth. Otherwise,
// original table will be restored from |heap_walk_class_table|.
//
// See IsolateGroup methods CloneClassTableForReload, RestoreOriginalClassTable,
// DropOriginalClassTable.
class ClassTable : public MallocAllocated {
public:
explicit ClassTable(SharedClassTable* shared_class_table_);
explicit ClassTable(ClassTableAllocator* allocator);
~ClassTable();
SharedClassTable* shared_class_table() const { return shared_class_table_; }
ClassTable* Clone() const { return new ClassTable(*this); }
void CopyBeforeHotReload(ClassPtr** copy,
ClassPtr** tlc_copy,
intptr_t* copy_num_cids,
intptr_t* copy_num_tlc_cids) {
// The [ProgramReloadContext] will need to maintain a copy of the old class
// table until instances have been morphed.
const intptr_t num_cids = NumCids();
const intptr_t num_tlc_cids = NumTopLevelCids();
auto class_table =
static_cast<ClassPtr*>(malloc(sizeof(ClassPtr) * num_cids));
auto tlc_class_table =
static_cast<ClassPtr*>(malloc(sizeof(ClassPtr) * num_tlc_cids));
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
auto table = table_.load();
for (intptr_t i = 0; i < num_cids; i++) {
class_table[i] = table[i];
}
auto tlc_table = tlc_table_.load();
for (intptr_t i = 0; i < num_tlc_cids; i++) {
tlc_class_table[i] = tlc_table[i];
}
*copy = class_table;
*tlc_copy = tlc_class_table;
*copy_num_cids = num_cids;
*copy_num_tlc_cids = num_tlc_cids;
}
void ResetBeforeHotReload() {
// We cannot clear out the class pointers, because a hot-reload
// contains only a diff: If e.g. a class included in the hot-reload has a
// super class not included in the diff, it will look up in this class table
// to find the super class (e.g. `cls.SuperClass` will cause us to come
// here).
}
void ResetAfterHotReload(ClassPtr* old_table,
ClassPtr* old_tlc_table,
intptr_t num_old_cids,
intptr_t num_old_tlc_cids,
bool is_rollback) {
// The [ProgramReloadContext] is no longer source-of-truth for GC after we
// return, so we restore size information for all classes.
if (is_rollback) {
SetNumCids(num_old_cids, num_old_tlc_cids);
// Don't use memmove, which changes this from a relaxed atomic operation
// to a non-atomic operation.
auto table = table_.load();
for (intptr_t i = 0; i < num_old_cids; i++) {
table[i] = old_table[i];
}
auto tlc_table = tlc_table_.load();
for (intptr_t i = 0; i < num_old_tlc_cids; i++) {
tlc_table[i] = old_tlc_table[i];
}
} else {
CopySizesFromClassObjects();
}
// Can't free these tables immediately as another thread (e.g., concurrent
// marker or sweeper) may be between loading the table pointer and loading
// the table element. The table will be freed at the next major GC or
// isolate shutdown.
AddOldTable(old_table);
AddOldTable(old_tlc_table);
}
// Thread-safe.
ClassPtr At(intptr_t cid) const {
ASSERT(IsValidIndex(cid));
if (IsTopLevelCid(cid)) {
return tlc_table_.load()[IndexFromTopLevelCid(cid)];
return top_level_classes_.At<kClassIndex>(IndexFromTopLevelCid(cid));
}
return table_.load()[cid];
return classes_.At<kClassIndex>(cid);
}
intptr_t SizeAt(intptr_t index) const {
int32_t SizeAt(intptr_t index) const {
if (IsTopLevelCid(index)) {
return 0;
}
return shared_class_table_->SizeAt(index);
return classes_.At<kSizeIndex>(index);
}
void SetAt(intptr_t index, ClassPtr raw_cls);
@ -358,27 +357,74 @@ class ClassTable {
bool IsValidIndex(intptr_t cid) const {
if (IsTopLevelCid(cid)) {
return IndexFromTopLevelCid(cid) < tlc_top_;
return top_level_classes_.IsValidIndex(IndexFromTopLevelCid(cid));
}
return shared_class_table_->IsValidIndex(cid);
return classes_.IsValidIndex(cid);
}
bool HasValidClassAt(intptr_t cid) const {
bool HasValidClassAt(intptr_t cid) const { return At(cid) != nullptr; }
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const {
ASSERT(IsValidIndex(cid));
if (IsTopLevelCid(cid)) {
return tlc_table_.load()[IndexFromTopLevelCid(cid)] != nullptr;
}
return table_.load()[cid] != nullptr;
return FLAG_precompiled_mode ? classes_.At<kUnboxedFieldBitmapIndex>(cid)
: UnboxedFieldBitmap();
}
intptr_t NumCids() const { return shared_class_table_->NumCids(); }
intptr_t NumTopLevelCids() const { return tlc_top_; }
intptr_t Capacity() const { return shared_class_table_->Capacity(); }
void SetUnboxedFieldsMapAt(intptr_t cid, UnboxedFieldBitmap map) {
ASSERT(IsValidIndex(cid));
ASSERT(classes_.At<kUnboxedFieldBitmapIndex>(cid).IsEmpty());
classes_.At<kUnboxedFieldBitmapIndex>(cid) = map;
}
#if !defined(PRODUCT)
bool ShouldTraceAllocationFor(intptr_t cid) {
return !IsTopLevelCid(cid) &&
(classes_.At<kAllocationTracingStateIndex>(cid) != kTracingDisabled);
}
void SetTraceAllocationFor(intptr_t cid, bool trace) {
classes_.At<kAllocationTracingStateIndex>(cid) =
trace ? kTraceAllocationBit : kTracingDisabled;
}
void SetCollectInstancesFor(intptr_t cid, bool trace) {
auto& slot = classes_.At<kAllocationTracingStateIndex>(cid);
if (trace) {
slot |= kCollectInstancesBit;
} else {
slot &= ~kCollectInstancesBit;
}
}
bool CollectInstancesFor(intptr_t cid) {
auto& slot = classes_.At<kAllocationTracingStateIndex>(cid);
return (slot & kCollectInstancesBit) != 0;
}
void UpdateCachedAllocationTracingStateTablePointer() {
cached_allocation_tracing_state_table_.store(
classes_.GetColumn<kAllocationTracingStateIndex>());
}
#else
void UpdateCachedAllocationTracingStateTablePointer() {}
#endif // !defined(PRODUCT)
intptr_t NumCids() const {
return classes_.num_cids();
}
intptr_t Capacity() const {
return classes_.capacity();
}
intptr_t NumTopLevelCids() const {
return top_level_classes_.num_cids();
}
void Register(const Class& cls);
void RegisterTopLevel(const Class& cls);
void AllocateIndex(intptr_t index);
void Unregister(intptr_t index);
void RegisterTopLevel(const Class& cls);
void UnregisterTopLevel(intptr_t index);
void Remap(intptr_t* old_to_new_cids);
@ -405,9 +451,11 @@ class ClassTable {
static constexpr intptr_t kElementSize = sizeof(uint8_t);
};
#endif
#ifndef PRODUCT
static intptr_t allocation_tracing_state_table_offset() {
static_assert(sizeof(cached_allocation_tracing_state_table_) == kWordSize);
return OFFSET_OF(ClassTable, cached_allocation_tracing_state_table_);
}
void AllocationProfilePrintJSON(JSONStream* stream, bool internal);
@ -429,6 +477,7 @@ class ClassTable {
}
private:
friend class ClassTableAllocator;
friend class GCMarker;
friend class MarkingWeakVisitor;
friend class Scavenger;
@ -438,57 +487,63 @@ class ClassTable {
const char* name,
char** error);
friend class IsolateGroup; // for table()
static const int kInitialCapacity = SharedClassTable::kInitialCapacity;
static const int kCapacityIncrement = SharedClassTable::kCapacityIncrement;
static const int kInitialCapacity = 512;
static const intptr_t kTopLevelCidOffset = (1 << 16);
void AddOldTable(ClassPtr* old_table);
ClassTable(const ClassTable& original)
: allocator_(original.allocator_),
classes_(original.allocator_),
top_level_classes_(original.allocator_) {
classes_.CopyFrom(original.classes_);
top_level_classes_.CopyFrom(original.top_level_classes_);
UpdateCachedAllocationTracingStateTablePointer();
}
void AllocateTopLevelIndex(intptr_t index);
void Grow(intptr_t index);
void GrowTopLevel(intptr_t index);
ClassPtr* table() { return table_.load(); }
void set_table(ClassPtr* table);
ClassPtr* table() {
return classes_.GetColumn<kClassIndex>();
}
// Used to drop recently added classes.
void SetNumCids(intptr_t num_cids, intptr_t num_tlc_cids) {
shared_class_table_->SetNumCids(num_cids);
ASSERT(num_cids <= top_);
top_ = num_cids;
ASSERT(num_tlc_cids <= tlc_top_);
tlc_top_ = num_tlc_cids;
classes_.ShrinkTo(num_cids);
top_level_classes_.ShrinkTo(num_tlc_cids);
}
intptr_t top_;
intptr_t capacity_;
ClassTableAllocator* allocator_;
intptr_t tlc_top_;
intptr_t tlc_capacity_;
// Unfortunately std::tuple used by CidIndexedTable does not have a stable
// layout so we can't refer to its elements from generated code.
NOT_IN_PRODUCT(AcqRelAtomic<uint8_t*> cached_allocation_tracing_state_table_ =
{nullptr});
// Copy-on-write is used for table_, with old copies stored in
// old_class_tables_.
AcqRelAtomic<ClassPtr*> table_;
AcqRelAtomic<ClassPtr*> tlc_table_;
MallocGrowableArray<ClassPtr*>* old_class_tables_;
SharedClassTable* shared_class_table_;
DISALLOW_COPY_AND_ASSIGN(ClassTable);
};
enum {
kClassIndex = 0,
kSizeIndex,
kUnboxedFieldBitmapIndex,
#if !defined(PRODUCT)
kAllocationTracingStateIndex
#endif
};
#if !defined(PRODUCT)
DART_FORCE_INLINE bool SharedClassTable::TraceAllocationFor(intptr_t cid) {
ASSERT(cid > 0);
if (ClassTable::IsTopLevelCid(cid)) {
return false;
}
ASSERT(cid < top_);
return trace_allocation_table_.load()[cid] != 0;
}
#endif // !defined(PRODUCT)
CidIndexedTable<ClassPtr, uint32_t, UnboxedFieldBitmap, uint8_t> classes_;
#else
CidIndexedTable<ClassPtr, uint32_t, UnboxedFieldBitmap> classes_;
#endif
#ifndef PRODUCT
enum {
kTracingDisabled = 0,
kTraceAllocationBit = (1 << 0),
kCollectInstancesBit = (1 << 1),
};
#endif // !PRODUCT
CidIndexedTable<ClassPtr> top_level_classes_;
};
} // namespace dart

View file

@ -3592,25 +3592,22 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
Label* trace,
Register temp_reg,
JumpDistance distance) {
LoadAllocationStatsAddress(temp_reg, cid);
LoadAllocationTracingStateAddress(temp_reg, cid);
MaybeTraceAllocation(temp_reg, trace);
}
void Assembler::LoadAllocationStatsAddress(Register dest, intptr_t cid) {
void Assembler::LoadAllocationTracingStateAddress(Register dest, intptr_t cid) {
ASSERT(dest != kNoRegister);
ASSERT(dest != TMP);
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::IsolateGroup::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
LoadIsolateGroup(dest);
ldr(dest, Address(dest, shared_table_offset));
ldr(dest, Address(dest, table_offset));
AddImmediate(dest, class_offset);
ldr(dest, Address(dest, target::IsolateGroup::class_table_offset()));
ldr(dest,
Address(dest,
target::ClassTable::allocation_tracing_state_table_offset()));
AddImmediate(dest,
target::ClassTable::AllocationTracingStateSlotOffsetFor(cid));
}
#endif // !PRODUCT
@ -3631,7 +3628,7 @@ void Assembler::TryAllocateObject(intptr_t cid,
target::ObjectAlignment::kObjectAlignment));
if (FLAG_inline_alloc &&
target::Heap::IsAllocatableInNewSpace(instance_size)) {
NOT_IN_PRODUCT(LoadAllocationStatsAddress(temp_reg, cid));
NOT_IN_PRODUCT(LoadAllocationTracingStateAddress(temp_reg, cid));
ldr(instance_reg, Address(THR, target::Thread::top_offset()));
// TODO(koda): Protect against unsigned overflow here.
AddImmediate(instance_reg, instance_size);
@ -3669,7 +3666,7 @@ void Assembler::TryAllocateArray(intptr_t cid,
Register temp2) {
if (FLAG_inline_alloc &&
target::Heap::IsAllocatableInNewSpace(instance_size)) {
NOT_IN_PRODUCT(LoadAllocationStatsAddress(temp1, cid));
NOT_IN_PRODUCT(LoadAllocationTracingStateAddress(temp1, cid));
// Potential new object start.
ldr(instance, Address(THR, target::Thread::top_offset()));
AddImmediateSetFlags(end_address, instance, instance_size);

View file

@ -1359,12 +1359,12 @@ class Assembler : public AssemblerBase {
void MonomorphicCheckedEntryAOT();
void BranchOnMonomorphicCheckedEntryJIT(Label* label);
// The register into which the allocation stats table is loaded with
// LoadAllocationStatsAddress should be passed to MaybeTraceAllocation and
// IncrementAllocationStats(WithSize) as stats_addr_reg to update the
// allocation stats. These are separate assembler macros so we can
// avoid a dependent load too nearby the load of the table address.
void LoadAllocationStatsAddress(Register dest, intptr_t cid);
// The register into which the allocation tracing state table is loaded with
// LoadAllocationTracingStateAddress should be passed to MaybeTraceAllocation.
//
// These are separate assembler macros so we can avoid a dependent load too
// nearby the load of the table address.
void LoadAllocationTracingStateAddress(Register dest, intptr_t cid);
Address ElementAddressForIntIndex(bool is_load,
bool is_external,

View file

@ -1966,17 +1966,14 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
JumpDistance distance) {
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::IsolateGroup::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
LoadIsolateGroup(temp_reg);
ldr(temp_reg, Address(temp_reg, shared_table_offset));
ldr(temp_reg, Address(temp_reg, table_offset));
AddImmediate(temp_reg, class_offset);
ldr(temp_reg, Address(temp_reg, 0), kUnsignedByte);
ldr(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
ldr(temp_reg,
Address(temp_reg,
target::ClassTable::allocation_tracing_state_table_offset()));
LoadFromOffset(temp_reg, temp_reg,
target::ClassTable::AllocationTracingStateSlotOffsetFor(cid),
kUnsignedByte);
cbnz(trace, temp_reg);
}
#endif // !PRODUCT

View file

@ -2619,17 +2619,15 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
ASSERT(cid > 0);
Address state_address(kNoRegister, 0);
const intptr_t shared_table_offset =
target::IsolateGroup::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
ASSERT(temp_reg != kNoRegister);
LoadIsolateGroup(temp_reg);
movl(temp_reg, Address(temp_reg, shared_table_offset));
movl(temp_reg, Address(temp_reg, table_offset));
cmpb(Address(temp_reg, class_offset), Immediate(0));
movl(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
movl(temp_reg,
Address(temp_reg,
target::ClassTable::allocation_tracing_state_table_offset()));
cmpb(Address(temp_reg,
target::ClassTable::AllocationTracingStateSlotOffsetFor(cid)),
Immediate(0));
// We are tracing for this class, jump to the trace label which will use
// the allocation stub.
j(NOT_ZERO, trace, distance);

View file

@ -4000,22 +4000,14 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
Register temp_reg,
JumpDistance distance) {
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::IsolateGroup::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
LoadIsolateGroup(temp_reg);
lx(temp_reg, Address(temp_reg, shared_table_offset));
lx(temp_reg, Address(temp_reg, table_offset));
if (IsITypeImm(class_offset)) {
lbu(temp_reg, Address(temp_reg, class_offset));
} else {
AddImmediate(temp_reg, class_offset);
lbu(temp_reg, Address(temp_reg, 0));
}
lx(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
lx(temp_reg,
Address(temp_reg,
target::ClassTable::allocation_tracing_state_table_offset()));
LoadFromOffset(temp_reg, temp_reg,
target::ClassTable::AllocationTracingStateSlotOffsetFor(cid),
kUnsignedByte);
bnez(temp_reg, trace);
}
#endif // !PRODUCT

View file

@ -2201,19 +2201,18 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
Register temp_reg,
JumpDistance distance) {
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::IsolateGroup::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
if (temp_reg == kNoRegister) {
temp_reg = TMP;
}
LoadIsolateGroup(temp_reg);
movq(temp_reg, Address(temp_reg, shared_table_offset));
movq(temp_reg, Address(temp_reg, table_offset));
cmpb(Address(temp_reg, class_offset), Immediate(0));
movq(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
movq(temp_reg,
Address(temp_reg,
target::ClassTable::allocation_tracing_state_table_offset()));
cmpb(Address(temp_reg,
target::ClassTable::AllocationTracingStateSlotOffsetFor(cid)),
Immediate(0));
// We are tracing for this class, jump to the trace label which will use
// the allocation stub.
j(NOT_ZERO, trace, distance);

View file

@ -1685,7 +1685,7 @@ void FlowGraphSerializer::WriteObjectImpl(const Object& x,
const auto& cls =
Class::Handle(Z, isolate_group()->class_table()->At(cid));
const auto unboxed_fields_bitmap =
isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(cid);
isolate_group()->class_table()->GetUnboxedFieldsMapAt(cid);
const intptr_t next_field_offset = cls.host_next_field_offset();
auto& obj = Object::Handle(Z);
for (intptr_t offset = Instance::NextFieldOffset();
@ -1939,7 +1939,7 @@ const Object& FlowGraphDeserializer::ReadObjectImpl(intptr_t cid,
if ((cid >= kNumPredefinedCids) || (cid == kInstanceCid)) {
const auto& cls = Class::Handle(Z, GetClassById(cid));
const auto unboxed_fields_bitmap =
isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(cid);
isolate_group()->class_table()->GetUnboxedFieldsMapAt(cid);
const intptr_t next_field_offset = cls.host_next_field_offset();
auto& instance = Instance::ZoneHandle(Z, Instance::New(cls));
for (intptr_t offset = Instance::NextFieldOffset();

View file

@ -1322,23 +1322,16 @@ class Isolate : public AllStatic {
class IsolateGroup : public AllStatic {
public:
static word object_store_offset();
static word shared_class_table_offset();
static word class_table_offset();
static word cached_class_table_table_offset();
};
class SharedClassTable : public AllStatic {
public:
static word class_heap_stats_table_offset();
};
class ClassTable : public AllStatic {
public:
#if !defined(PRODUCT)
static word ClassOffsetFor(intptr_t cid);
static word SharedTableOffsetFor();
static word SizeOffsetFor(intptr_t cid, bool is_new);
static word allocation_tracing_state_table_offset();
static word AllocationTracingStateSlotOffsetFor(intptr_t cid);
#endif // !defined(PRODUCT)
static const word kSizeOfClassPairLog2;
};
class InstructionsSection : public AllStatic {

View file

@ -124,7 +124,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 4;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -189,11 +189,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 45;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 32;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
20;
16;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
8;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 8;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 16;
IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 44;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 20;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 12;
@ -789,7 +789,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 180;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -854,11 +854,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 89;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 64;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 88;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 40;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 24;
@ -1459,7 +1459,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 4;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -1524,11 +1524,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 45;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 32;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
20;
16;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
8;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 8;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 16;
IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 44;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 20;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 12;
@ -2121,7 +2121,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 180;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -2186,11 +2186,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 89;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 64;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 88;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 40;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 24;
@ -2792,7 +2792,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 48;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 108;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word Closure_context_offset = 24;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 16;
@ -2857,11 +2857,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 89;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 64;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 88;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 40;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 16;
@ -3462,7 +3462,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 48;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 108;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word Closure_context_offset = 24;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 16;
@ -3527,11 +3527,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 89;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 64;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 88;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 40;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 16;
@ -4133,7 +4133,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 104;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 4;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -4198,11 +4198,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 45;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 32;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
20;
16;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
8;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 8;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 16;
IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 44;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 20;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 12;
@ -4800,7 +4800,7 @@ static constexpr dart::compiler::target::word Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
Class_host_type_arguments_field_offset_in_words_offset = 180;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -4865,11 +4865,11 @@ static constexpr dart::compiler::target::word
Isolate_has_resumption_breakpoints_offset = 89;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 64;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 88;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 40;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 24;
@ -5531,11 +5531,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 36;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
20;
16;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
8;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 8;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 16;
IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 12;
static constexpr dart::compiler::target::word
@ -6188,11 +6188,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 72;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 24;
static constexpr dart::compiler::target::word
@ -6850,11 +6850,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 36;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
20;
16;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
8;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 8;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 16;
IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 12;
static constexpr dart::compiler::target::word
@ -7504,11 +7504,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 72;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 24;
static constexpr dart::compiler::target::word
@ -8167,11 +8167,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 72;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 16;
static constexpr dart::compiler::target::word
@ -8829,11 +8829,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 72;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 16;
static constexpr dart::compiler::target::word
@ -9492,11 +9492,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 36;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
20;
16;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
8;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 8;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 16;
IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 12;
static constexpr dart::compiler::target::word
@ -10151,11 +10151,11 @@ static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_finalizers_offset = 72;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word IsolateGroup_object_store_offset =
40;
32;
static constexpr dart::compiler::target::word IsolateGroup_class_table_offset =
16;
static constexpr dart::compiler::target::word
IsolateGroup_shared_class_table_offset = 16;
static constexpr dart::compiler::target::word
IsolateGroup_cached_class_table_table_offset = 32;
IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashBase_data_offset = 24;
static constexpr dart::compiler::target::word
@ -10765,7 +10765,7 @@ static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 84;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
AOT_ClassTable_allocation_tracing_state_table_offset = 4;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 20;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 12;
@ -10837,11 +10837,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 20;
AOT_IsolateGroup_object_store_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 8;
AOT_IsolateGroup_class_table_offset = 8;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 16;
AOT_IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
44;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 20;
@ -11505,7 +11505,7 @@ static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 152;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
AOT_ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 24;
@ -11577,11 +11577,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
64;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
88;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 40;
@ -12251,7 +12251,7 @@ static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 152;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
AOT_ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 24;
@ -12323,11 +12323,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
64;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
88;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 40;
@ -12994,7 +12994,7 @@ static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 48;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 88;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
AOT_ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 24;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 16;
@ -13066,11 +13066,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
64;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
88;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 40;
@ -13736,7 +13736,7 @@ static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 48;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 88;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
AOT_ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 24;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 16;
@ -13808,11 +13808,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
64;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
88;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 40;
@ -14479,7 +14479,7 @@ static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 44;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 84;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
AOT_ClassTable_allocation_tracing_state_table_offset = 4;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 20;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 12;
@ -14551,11 +14551,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 20;
AOT_IsolateGroup_object_store_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 8;
AOT_IsolateGroup_class_table_offset = 8;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 16;
AOT_IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
44;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 20;
@ -15221,7 +15221,7 @@ static constexpr dart::compiler::target::word AOT_Class_super_type_offset = 88;
static constexpr dart::compiler::target::word
AOT_Class_host_type_arguments_field_offset_in_words_offset = 152;
static constexpr dart::compiler::target::word
AOT_SharedClassTable_class_heap_stats_table_offset = 0;
AOT_ClassTable_allocation_tracing_state_table_offset = 8;
static constexpr dart::compiler::target::word AOT_Closure_context_offset = 40;
static constexpr dart::compiler::target::word
AOT_Closure_delayed_type_arguments_offset = 24;
@ -15293,11 +15293,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
64;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_single_step_offset =
88;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 40;
@ -16031,11 +16031,11 @@ static constexpr dart::compiler::target::word AOT_Isolate_finalizers_offset =
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
28;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 20;
AOT_IsolateGroup_object_store_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 8;
AOT_IsolateGroup_class_table_offset = 8;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 16;
AOT_IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashBase_data_offset =
12;
@ -16762,11 +16762,11 @@ static constexpr dart::compiler::target::word AOT_Isolate_finalizers_offset =
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashBase_data_offset =
24;
@ -17499,11 +17499,11 @@ static constexpr dart::compiler::target::word AOT_Isolate_finalizers_offset =
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashBase_data_offset =
24;
@ -18233,11 +18233,11 @@ static constexpr dart::compiler::target::word AOT_Isolate_finalizers_offset =
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashBase_data_offset =
16;
@ -18966,11 +18966,11 @@ static constexpr dart::compiler::target::word AOT_Isolate_finalizers_offset =
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashBase_data_offset =
16;
@ -19700,11 +19700,11 @@ static constexpr dart::compiler::target::word AOT_Isolate_finalizers_offset =
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
28;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 20;
AOT_IsolateGroup_object_store_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 8;
AOT_IsolateGroup_class_table_offset = 8;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 16;
AOT_IsolateGroup_cached_class_table_table_offset = 12;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word AOT_LinkedHashBase_data_offset =
12;
@ -20433,11 +20433,11 @@ static constexpr dart::compiler::target::word AOT_Isolate_finalizers_offset =
static constexpr dart::compiler::target::word AOT_Isolate_ic_miss_code_offset =
56;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_object_store_offset = 40;
AOT_IsolateGroup_object_store_offset = 32;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_shared_class_table_offset = 16;
AOT_IsolateGroup_class_table_offset = 16;
static constexpr dart::compiler::target::word
AOT_IsolateGroup_cached_class_table_table_offset = 32;
AOT_IsolateGroup_cached_class_table_table_offset = 24;
static constexpr dart::compiler::target::word AOT_Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word AOT_LinkedHashBase_data_offset =
24;

View file

@ -46,7 +46,7 @@
#define COMMON_OFFSETS_LIST(FIELD, ARRAY, SIZEOF, ARRAY_SIZEOF, \
PAYLOAD_SIZEOF, RANGE, CONSTANT) \
ARRAY(Array, element_offset) \
NOT_IN_PRODUCT(ARRAY(ClassTable, ClassOffsetFor)) \
NOT_IN_PRODUCT(ARRAY(ClassTable, AllocationTracingStateSlotOffsetFor)) \
ARRAY(Code, element_offset) \
ARRAY(Context, variable_offset) \
ARRAY(ContextScope, element_offset) \
@ -103,7 +103,7 @@
FIELD(Class, num_type_arguments_offset) \
FIELD(Class, super_type_offset) \
FIELD(Class, host_type_arguments_field_offset_in_words_offset) \
NOT_IN_PRODUCT(FIELD(SharedClassTable, class_heap_stats_table_offset)) \
NOT_IN_PRODUCT(FIELD(ClassTable, allocation_tracing_state_table_offset)) \
FIELD(Closure, context_offset) \
FIELD(Closure, delayed_type_arguments_offset) \
FIELD(Closure, function_offset) \
@ -152,7 +152,7 @@
NOT_IN_PRODUCT(FIELD(Isolate, has_resumption_breakpoints_offset)) \
FIELD(Isolate, ic_miss_code_offset) \
FIELD(IsolateGroup, object_store_offset) \
FIELD(IsolateGroup, shared_class_table_offset) \
FIELD(IsolateGroup, class_table_offset) \
FIELD(IsolateGroup, cached_class_table_table_offset) \
NOT_IN_PRODUCT(FIELD(Isolate, single_step_offset)) \
FIELD(Isolate, user_tag_offset) \

View file

@ -1640,7 +1640,7 @@ void Debugger::DeoptimizeWorld() {
// before deoptimizing the world.
SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) {
const classid_t cid =
const intptr_t cid =
i < num_classes ? i : ClassTable::CidFromTopLevelIndex(i - num_classes);
if (class_table.HasValidClassAt(cid)) {
cls = class_table.At(cid);
@ -2536,7 +2536,7 @@ void Debugger::FindCompiledFunctions(
const intptr_t num_classes = class_table.NumCids();
const intptr_t num_tlc_classes = class_table.NumTopLevelCids();
for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) {
const classid_t cid =
const intptr_t cid =
i < num_classes ? i
: ClassTable::CidFromTopLevelIndex(i - num_classes);
if (class_table.HasValidClassAt(cid)) {
@ -2659,7 +2659,7 @@ bool Debugger::FindBestFit(const Script& script,
const intptr_t num_classes = class_table.NumCids();
const intptr_t num_tlc_classes = class_table.NumTopLevelCids();
for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) {
const classid_t cid =
const intptr_t cid =
i < num_classes ? i
: ClassTable::CidFromTopLevelIndex(i - num_classes);
if (!class_table.HasValidClassAt(cid)) {

View file

@ -460,7 +460,7 @@ class MarkingWeakVisitor : public HandleVisitor {
public:
explicit MarkingWeakVisitor(Thread* thread)
: HandleVisitor(thread),
class_table_(thread->isolate_group()->shared_class_table()) {}
class_table_(thread->isolate_group()->class_table()) {}
void VisitHandle(uword addr) {
FinalizablePersistentHandle* handle =
@ -472,7 +472,7 @@ class MarkingWeakVisitor : public HandleVisitor {
}
private:
SharedClassTable* class_table_;
ClassTable* class_table_;
DISALLOW_COPY_AND_ASSIGN(MarkingWeakVisitor);
};

View file

@ -1164,7 +1164,7 @@ void PageSpace::CollectGarbageHelper(Thread* thread,
const int64_t start = OS::GetCurrentMonotonicMicros();
// Perform various cleanup that relies on no tasks interfering.
isolate_group->shared_class_table()->FreeOldTables();
isolate_group->class_table_allocator()->FreePending();
isolate_group->ForEachIsolate(
[&](Isolate* isolate) { isolate->field_table()->FreeOldTables(); },
/*at_safepoint=*/true);

View file

@ -560,7 +560,7 @@ class ScavengerWeakVisitor : public HandleVisitor {
ScavengerWeakVisitor(Thread* thread, Scavenger* scavenger)
: HandleVisitor(thread),
scavenger_(scavenger),
class_table_(thread->isolate_group()->shared_class_table()) {
class_table_(thread->isolate_group()->class_table()) {
ASSERT(scavenger->heap_->isolate_group() == thread->isolate_group());
}
@ -577,7 +577,7 @@ class ScavengerWeakVisitor : public HandleVisitor {
private:
Scavenger* scavenger_;
SharedClassTable* class_table_;
ClassTable* class_table_;
DISALLOW_COPY_AND_ASSIGN(ScavengerWeakVisitor);
};

View file

@ -329,10 +329,10 @@ IsolateGroup::IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
void* embedder_data,
ObjectStore* object_store,
Dart_IsolateFlags api_flags)
: shared_class_table_(new SharedClassTable()),
class_table_(new ClassTable(shared_class_table_.get())),
cached_class_table_table_(class_table_->table()),
: class_table_(nullptr),
cached_class_table_table_(nullptr),
object_store_(object_store),
class_table_allocator_(),
embedder_data_(embedder_data),
thread_pool_(),
isolates_lock_(new SafepointRwLock()),
@ -401,6 +401,9 @@ IsolateGroup::IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
WriteRwLocker wl(ThreadState::Current(), isolate_groups_rwlock_);
id_ = isolate_group_random_->NextUInt64();
}
heap_walk_class_table_ = class_table_ =
new ClassTable(&class_table_allocator_);
cached_class_table_table_.store(class_table_->table());
}
IsolateGroup::IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
@ -424,6 +427,11 @@ IsolateGroup::~IsolateGroup() {
delete[] obfuscation_map_;
}
class_table_allocator_.Free(class_table_);
if (heap_walk_class_table_ != class_table_) {
class_table_allocator_.Free(heap_walk_class_table_);
}
#if !defined(PRODUCT)
delete debugger_;
debugger_ = nullptr;
@ -2040,9 +2048,9 @@ bool IsolateGroup::ReloadSources(JSONStream* js,
ASSERT(!IsReloading());
auto shared_class_table = IsolateGroup::Current()->shared_class_table();
auto class_table = IsolateGroup::Current()->class_table();
std::shared_ptr<IsolateGroupReloadContext> group_reload_context(
new IsolateGroupReloadContext(this, shared_class_table, js));
new IsolateGroupReloadContext(this, class_table, js));
group_reload_context_ = group_reload_context;
SetHasAttemptedReload(true);
@ -2069,9 +2077,9 @@ bool IsolateGroup::ReloadKernel(JSONStream* js,
ASSERT(!IsReloading());
auto shared_class_table = IsolateGroup::Current()->shared_class_table();
auto class_table = IsolateGroup::Current()->class_table();
std::shared_ptr<IsolateGroupReloadContext> group_reload_context(
new IsolateGroupReloadContext(this, shared_class_table, js));
new IsolateGroupReloadContext(this, class_table, js));
group_reload_context_ = group_reload_context;
SetHasAttemptedReload(true);
@ -2088,7 +2096,6 @@ bool IsolateGroup::ReloadKernel(JSONStream* js,
}
void IsolateGroup::DeleteReloadContext() {
// Another thread may be in the middle of GetClassForHeapWalkAt.
GcSafepointOperationScope safepoint_scope(Thread::Current());
group_reload_context_.reset();
@ -2966,10 +2973,10 @@ void IsolateGroup::VisitObjectPointers(ObjectPointerVisitor* visitor,
}
void IsolateGroup::VisitSharedPointers(ObjectPointerVisitor* visitor) {
// if class table is shared, it's stored on isolate group
if (class_table() != nullptr) {
// Visit objects in the class table.
class_table()->VisitObjectPointers(visitor);
// Visit objects in the class table.
class_table()->VisitObjectPointers(visitor);
if (heap_walk_class_table() != class_table()) {
heap_walk_class_table()->VisitObjectPointers(visitor);
}
api_state()->VisitObjectPointersUnlocked(visitor);
// Visit objects in the object store.
@ -3055,34 +3062,6 @@ void IsolateGroup::RememberLiveTemporaries() {
/*at_safepoint=*/true);
}
ClassPtr IsolateGroup::GetClassForHeapWalkAt(intptr_t cid) {
ClassPtr raw_class = nullptr;
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
if (IsReloading()) {
raw_class = program_reload_context()->GetClassForHeapWalkAt(cid);
} else {
raw_class = class_table()->At(cid);
}
#else
raw_class = class_table()->At(cid);
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
ASSERT(raw_class != nullptr);
ASSERT(remapping_cids() || raw_class->untag()->id_ == cid);
return raw_class;
}
intptr_t IsolateGroup::GetClassSizeForHeapWalkAt(intptr_t cid) {
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
if (IsReloading()) {
return group_reload_context_->GetClassSizeForHeapWalkAt(cid);
} else {
return shared_class_table()->SizeAt(cid);
}
#else
return shared_class_table()->SizeAt(cid);
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
}
#if !defined(PRODUCT)
ObjectIdRing* Isolate::EnsureObjectIdRing() {
if (object_id_ring_ == nullptr) {
@ -3868,4 +3847,26 @@ void Isolate::UnscheduleThread(Thread* thread,
}
}
#if !defined(PRODUCT)
void IsolateGroup::CloneClassTableForReload() {
RELEASE_ASSERT(class_table_ == heap_walk_class_table_);
class_table_ = class_table_->Clone();
set_cached_class_table_table(nullptr);
}
void IsolateGroup::RestoreOriginalClassTable() {
RELEASE_ASSERT(class_table_ != heap_walk_class_table_);
class_table_allocator_.Free(class_table_);
class_table_ = heap_walk_class_table_;
set_cached_class_table_table(class_table_->table());
}
void IsolateGroup::DropOriginalClassTable() {
RELEASE_ASSERT(class_table_ != heap_walk_class_table_);
class_table_allocator_.Free(heap_walk_class_table_);
heap_walk_class_table_ = class_table_;
set_cached_class_table_table(class_table_->table());
}
#endif
} // namespace dart

View file

@ -57,6 +57,7 @@ class HandleScope;
class HandleVisitor;
class Heap;
class ICData;
class IsolateGroupReloadContext;
class IsolateObjectStore;
class IsolateProfilerData;
class ProgramReloadContext;
@ -393,13 +394,13 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
dispatch_table_snapshot_size_ = size;
}
SharedClassTable* shared_class_table() const {
return shared_class_table_.get();
ClassTableAllocator* class_table_allocator() {
return &class_table_allocator_;
}
static intptr_t shared_class_table_offset() {
COMPILE_ASSERT(sizeof(IsolateGroup::shared_class_table_) == kWordSize);
return OFFSET_OF(IsolateGroup, shared_class_table_);
static intptr_t class_table_offset() {
COMPILE_ASSERT(sizeof(IsolateGroup::class_table_) == kWordSize);
return OFFSET_OF(IsolateGroup, class_table_);
}
ClassPtr* cached_class_table_table() {
@ -520,8 +521,27 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
}
#endif // defined(PRODUCT)
// Class table for the program loaded into this isolate group.
//
// This table is modified by kernel loading.
ClassTable* class_table() const {
return class_table_;
}
// Class table used for heap walks by GC visitors. Usually it
// is the same table as one in |class_table_|, except when in the
// middle of the reload.
//
// See comment for |ClassTable| class for more details.
ClassTable* heap_walk_class_table() const {
return heap_walk_class_table_;
}
void CloneClassTableForReload();
void RestoreOriginalClassTable();
void DropOriginalClassTable();
StoreBuffer* store_buffer() const { return store_buffer_.get(); }
ClassTable* class_table() const { return class_table_.get(); }
ObjectStore* object_store() const { return object_store_.get(); }
Mutex* symbols_mutex() { return &symbols_mutex_; }
Mutex* type_canonicalization_mutex() { return &type_canonicalization_mutex_; }
@ -595,8 +615,6 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
deferred_load_handler_ = handler;
}
intptr_t GetClassSizeForHeapWalkAt(intptr_t cid);
// Prepares all threads in an isolate for Garbage Collection.
void ReleaseStoreBuffers();
void EnableIncrementalBarrier(MarkingStack* marking_stack,
@ -695,9 +713,6 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
bool CanReload() { return false; }
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
// Prefers old classes when we are in the middle of a reload.
ClassPtr GetClassForHeapWalkAt(intptr_t cid);
bool IsReloading() const {
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
return group_reload_context_ != nullptr;
@ -823,12 +838,14 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
void set_heap(std::unique_ptr<Heap> value);
// Accessed from generated code.
std::unique_ptr<SharedClassTable> shared_class_table_;
std::unique_ptr<ClassTable> class_table_;
ClassTable* class_table_;
AcqRelAtomic<ClassPtr*> cached_class_table_table_;
std::unique_ptr<ObjectStore> object_store_;
// End accessed from generated code.
ClassTableAllocator class_table_allocator_;
ClassTable* heap_walk_class_table_;
const char** obfuscation_map_ = nullptr;
bool is_vm_isolate_heap_ = false;

View file

@ -97,7 +97,7 @@ static bool HasNoTasks(Heap* heap) {
InstanceMorpher* InstanceMorpher::CreateFromClassDescriptors(
Zone* zone,
SharedClassTable* shared_class_table,
ClassTable* class_table,
const Class& from,
const Class& to) {
auto mapping = new (zone) ZoneGrowableArray<intptr_t>();
@ -115,8 +115,8 @@ InstanceMorpher* InstanceMorpher::CreateFromClassDescriptors(
// Add copying of the instance fields if matching by name.
// Note: currently the type of the fields are ignored.
const Array& from_fields =
Array::Handle(from.OffsetToFieldMap(true /* original classes */));
const Array& from_fields = Array::Handle(
from.OffsetToFieldMap(IsolateGroup::Current()->heap_walk_class_table()));
const Array& to_fields = Array::Handle(to.OffsetToFieldMap());
Field& from_field = Field::Handle();
Field& to_field = Field::Handle();
@ -163,19 +163,19 @@ InstanceMorpher* InstanceMorpher::CreateFromClassDescriptors(
}
ASSERT(from.id() == to.id());
return new (zone) InstanceMorpher(zone, to.id(), shared_class_table, mapping,
new_fields_offsets);
return new (zone)
InstanceMorpher(zone, to.id(), class_table, mapping, new_fields_offsets);
}
InstanceMorpher::InstanceMorpher(
Zone* zone,
classid_t cid,
SharedClassTable* shared_class_table,
ClassTable* class_table,
ZoneGrowableArray<intptr_t>* mapping,
ZoneGrowableArray<intptr_t>* new_fields_offsets)
: zone_(zone),
cid_(cid),
shared_class_table_(shared_class_table),
class_table_(class_table),
mapping_(mapping),
new_fields_offsets_(new_fields_offsets),
before_(zone, 16) {}
@ -212,7 +212,7 @@ void InstanceMorpher::CreateMorphedCopies(Become* become) {
// objects to old space.
const bool is_canonical = before.IsCanonical();
const Heap::Space space = is_canonical ? Heap::kOld : Heap::kNew;
after = Instance::NewFromCidAndSize(shared_class_table_, cid_, space);
after = Instance::NewFromCidAndSize(class_table_, cid_, space);
// We preserve the canonical bit of the object, since this object is present
// in the class's constants.
@ -402,15 +402,14 @@ bool ProgramReloadContext::IsSameLibrary(const Library& a_lib,
IsolateGroupReloadContext::IsolateGroupReloadContext(
IsolateGroup* isolate_group,
SharedClassTable* shared_class_table,
ClassTable* class_table,
JSONStream* js)
: zone_(Thread::Current()->zone()),
isolate_group_(isolate_group),
shared_class_table_(shared_class_table),
class_table_(class_table),
start_time_micros_(OS::GetCurrentMonotonicMicros()),
reload_timestamp_(OS::GetCurrentTimeMillis()),
js_(js),
saved_size_table_(nullptr),
instance_morphers_(zone_, 0),
reasons_to_cancel_reload_(zone_, 0),
instance_morpher_by_cid_(zone_),
@ -425,8 +424,6 @@ ProgramReloadContext::ProgramReloadContext(
: zone_(Thread::Current()->zone()),
group_reload_context_(group_reload_context),
isolate_group_(isolate_group),
saved_class_table_(nullptr),
saved_tlc_class_table_(nullptr),
old_classes_set_storage_(Array::null()),
class_map_storage_(Array::null()),
removed_class_set_storage_(Array::null()),
@ -442,8 +439,7 @@ ProgramReloadContext::ProgramReloadContext(
ProgramReloadContext::~ProgramReloadContext() {
ASSERT(zone_ == Thread::Current()->zone());
ASSERT(saved_class_table_.load(std::memory_order_relaxed) == nullptr);
ASSERT(saved_tlc_class_table_.load(std::memory_order_relaxed) == nullptr);
ASSERT(IG->class_table() == IG->heap_walk_class_table());
}
void IsolateGroupReloadContext::ReportError(const Error& error) {
@ -671,10 +667,9 @@ bool IsolateGroupReloadContext::Reload(bool force_reload,
heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/ true);
}
// Copy the size table for isolate group & class tables for each isolate.
// Clone the class table.
{
TIMELINE_SCOPE(CheckpointClasses);
CheckpointSharedClassTable();
IG->program_reload_context()->CheckpointClasses();
}
@ -699,7 +694,6 @@ bool IsolateGroupReloadContext::Reload(bool force_reload,
const auto& error = Error::Cast(result);
AddReasonForCancelling(new Aborted(Z, error));
DiscardSavedClassTable(/*is_rollback=*/true);
IG->program_reload_context()->ReloadPhase4Rollback();
CommonFinalizeTail(num_old_libs_);
} else {
@ -769,9 +763,7 @@ bool IsolateGroupReloadContext::Reload(bool force_reload,
// We accepted the hot-reload and morphed instances. So now we can
// commit to the changed class table and deleted the saved one.
DiscardSavedClassTable(/*is_rollback=*/false);
IG->program_reload_context()->DiscardSavedClassTable(
/*is_rollback=*/false);
IG->DropOriginalClassTable();
}
MorphInstancesPhase2Become(IG->become());
@ -784,17 +776,31 @@ bool IsolateGroupReloadContext::Reload(bool force_reload,
heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/ true);
}
}
if (FLAG_identity_reload) {
if (!discard_class_tables) {
TIR_Print("Identity reload failed! Some instances were morphed\n");
}
if (IG->heap_walk_class_table()->NumCids() !=
IG->class_table()->NumCids()) {
TIR_Print("Identity reload failed! B#C=%" Pd " A#C=%" Pd "\n",
IG->heap_walk_class_table()->NumCids(),
IG->class_table()->NumCids());
}
if (IG->heap_walk_class_table()->NumTopLevelCids() !=
IG->class_table()->NumTopLevelCids()) {
TIR_Print("Identity reload failed! B#TLC=%" Pd " A#TLC=%" Pd "\n",
IG->heap_walk_class_table()->NumTopLevelCids(),
IG->class_table()->NumTopLevelCids());
}
}
if (discard_class_tables) {
DiscardSavedClassTable(/*is_rollback=*/false);
IG->program_reload_context()->DiscardSavedClassTable(
/*is_rollback=*/false);
IG->DropOriginalClassTable();
}
isolate_group_->program_reload_context()->ReloadPhase4CommitFinish();
TIR_Print("---- DONE COMMIT\n");
isolate_group_->set_last_reload_timestamp(reload_timestamp_);
} else {
TIR_Print("---- ROLLING BACK");
DiscardSavedClassTable(/*is_rollback=*/true);
isolate_group_->program_reload_context()->ReloadPhase4Rollback();
}
@ -1066,7 +1072,7 @@ void ProgramReloadContext::ReloadPhase4CommitFinish() {
}
void ProgramReloadContext::ReloadPhase4Rollback() {
RollbackClasses();
IG->RestoreOriginalClassTable();
RollbackLibraries();
}
@ -1209,64 +1215,33 @@ void ProgramReloadContext::DeoptimizeDependentCode() {
// TODO(rmacnak): Also call LibraryPrefix::InvalidateDependentCode.
}
void IsolateGroupReloadContext::CheckpointSharedClassTable() {
// Copy the size table for isolate group.
intptr_t* saved_size_table = nullptr;
shared_class_table_->CopyBeforeHotReload(&saved_size_table, &saved_num_cids_);
Thread* thread = Thread::Current();
{
NoSafepointScope no_safepoint_scope(thread);
// The saved_size_table_ will now become source of truth for GC.
saved_size_table_.store(saved_size_table, std::memory_order_release);
}
// But the concurrent sweeper may still be reading from the old table.
thread->heap()->WaitForSweeperTasks(thread);
// Now we can clear the old table. This satisfies asserts during class
// registration and encourages fast failure if we use the wrong table
// for GC during reload, but isn't strictly needed for correctness.
shared_class_table_->ResetBeforeHotReload();
}
void ProgramReloadContext::CheckpointClasses() {
TIR_Print("---- CHECKPOINTING CLASSES\n");
// Checkpoint classes before a reload. We need to copy the following:
// 1) The size of the class table.
// 2) The class table itself.
// Checkpoint classes before a reload.
// Before this operation class table which is used for heap scanning and
// the class table used for program loading are the same. After this step
// they will become different until reload is commited (or rolled back).
//
// Note that because GC is always reading from heap_walk_class_table and
// we are not changing that, there is no reason to wait for sweeping
// threads or marking to complete.
RELEASE_ASSERT(IG->class_table() == IG->heap_walk_class_table());
IG->CloneClassTableForReload();
// IG->class_table() is now the clone of heap_walk_class_table.
RELEASE_ASSERT(IG->class_table() != IG->heap_walk_class_table());
ClassTable* class_table = IG->class_table();
// For efficiency, we build a set of classes before the reload. This set
// is used to pair new classes with old classes.
// Copy the class table for isolate.
ClassTable* class_table = IG->class_table();
ClassPtr* saved_class_table = nullptr;
ClassPtr* saved_tlc_class_table = nullptr;
class_table->CopyBeforeHotReload(&saved_class_table, &saved_tlc_class_table,
&saved_num_cids_, &saved_num_tlc_cids_);
// Copy classes into saved_class_table_ first. Make sure there are no
// safepoints until saved_class_table_ is filled up and saved so class raw
// pointers in saved_class_table_ are properly visited by GC.
{
NoSafepointScope no_safepoint_scope(Thread::Current());
// The saved_class_table_ is now source of truth for GC.
saved_class_table_.store(saved_class_table, std::memory_order_release);
saved_tlc_class_table_.store(saved_tlc_class_table,
std::memory_order_release);
// We can therefore wipe out all of the old entries (if that table is used
// for GC during the hot-reload we have a bug).
class_table->ResetBeforeHotReload();
}
// Add classes to the set. Set is stored in the Array, so adding an element
// may allocate Dart object on the heap and trigger GC.
Class& cls = Class::Handle();
UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
for (intptr_t i = 0; i < saved_num_cids_; i++) {
for (intptr_t i = 0; i < class_table->NumCids(); i++) {
if (class_table->IsValidIndex(i) && class_table->HasValidClassAt(i)) {
if (i != kFreeListElement && i != kForwardingCorpse) {
cls = class_table->At(i);
@ -1275,7 +1250,7 @@ void ProgramReloadContext::CheckpointClasses() {
}
}
}
for (intptr_t i = 0; i < saved_num_tlc_cids_; i++) {
for (intptr_t i = 0; i < class_table->NumTopLevelCids(); i++) {
const intptr_t cid = ClassTable::CidFromTopLevelIndex(i);
if (class_table->IsValidIndex(cid) && class_table->HasValidClassAt(cid)) {
cls = class_table->At(cid);
@ -1284,7 +1259,8 @@ void ProgramReloadContext::CheckpointClasses() {
}
}
old_classes_set_storage_ = old_classes_set.Release().ptr();
TIR_Print("---- System had %" Pd " classes\n", saved_num_cids_);
TIR_Print("---- System had %" Pd " classes\n",
class_table->NumCids() + class_table->NumTopLevelCids());
}
Dart_FileModifiedCallback IsolateGroupReloadContext::file_modified_callback_ =
@ -1429,14 +1405,6 @@ void ProgramReloadContext::CheckpointLibraries() {
object_store()->set_root_library(Library::Handle());
}
void ProgramReloadContext::RollbackClasses() {
TIR_Print("---- ROLLING BACK CLASS TABLE\n");
ASSERT((saved_num_cids_ + saved_num_tlc_cids_) > 0);
ASSERT(saved_class_table_.load(std::memory_order_relaxed) != nullptr);
ASSERT(saved_tlc_class_table_.load(std::memory_order_relaxed) != nullptr);
DiscardSavedClassTable(/*is_rollback=*/true);
}
void ProgramReloadContext::RollbackLibraries() {
TIR_Print("---- ROLLING BACK LIBRARY CHANGES\n");
@ -1618,14 +1586,6 @@ void ProgramReloadContext::CommitAfterInstanceMorphing() {
#endif
if (FLAG_identity_reload) {
if (saved_num_cids_ != IG->class_table()->NumCids()) {
TIR_Print("Identity reload failed! B#C=%" Pd " A#C=%" Pd "\n",
saved_num_cids_, IG->class_table()->NumCids());
}
if (saved_num_tlc_cids_ != IG->class_table()->NumTopLevelCids()) {
TIR_Print("Identity reload failed! B#TLC=%" Pd " A#TLC=%" Pd "\n",
saved_num_tlc_cids_, IG->class_table()->NumTopLevelCids());
}
const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
const GrowableObjectArray& libs =
GrowableObjectArray::Handle(IG->object_store()->libraries());
@ -1755,60 +1715,6 @@ void ProgramReloadContext::ValidateReload() {
}
}
ClassPtr ProgramReloadContext::GetClassForHeapWalkAt(intptr_t cid) {
ClassPtr* class_table = nullptr;
intptr_t index = -1;
if (ClassTable::IsTopLevelCid(cid)) {
class_table = saved_tlc_class_table_.load(std::memory_order_acquire);
index = ClassTable::IndexFromTopLevelCid(cid);
ASSERT(index < saved_num_tlc_cids_);
} else {
class_table = saved_class_table_.load(std::memory_order_acquire);
index = cid;
ASSERT(cid > 0 && cid < saved_num_cids_);
}
if (class_table != nullptr) {
return class_table[index];
}
return IG->class_table()->At(cid);
}
intptr_t IsolateGroupReloadContext::GetClassSizeForHeapWalkAt(classid_t cid) {
if (ClassTable::IsTopLevelCid(cid)) {
return 0;
}
intptr_t* size_table = saved_size_table_.load(std::memory_order_acquire);
if (size_table != nullptr) {
ASSERT(cid < saved_num_cids_);
return size_table[cid];
} else {
return shared_class_table_->SizeAt(cid);
}
}
void ProgramReloadContext::DiscardSavedClassTable(bool is_rollback) {
ClassPtr* local_saved_class_table =
saved_class_table_.load(std::memory_order_relaxed);
ClassPtr* local_saved_tlc_class_table =
saved_tlc_class_table_.load(std::memory_order_relaxed);
{
auto thread = Thread::Current();
SafepointWriteRwLocker sl(thread, thread->isolate_group()->program_lock());
IG->class_table()->ResetAfterHotReload(
local_saved_class_table, local_saved_tlc_class_table, saved_num_cids_,
saved_num_tlc_cids_, is_rollback);
}
saved_class_table_.store(nullptr, std::memory_order_release);
saved_tlc_class_table_.store(nullptr, std::memory_order_release);
}
void IsolateGroupReloadContext::DiscardSavedClassTable(bool is_rollback) {
intptr_t* local_saved_size_table = saved_size_table_;
shared_class_table_->ResetAfterHotReload(local_saved_size_table,
saved_num_cids_, is_rollback);
saved_size_table_.store(nullptr, std::memory_order_release);
}
void IsolateGroupReloadContext::VisitObjectPointers(
ObjectPointerVisitor* visitor) {
visitor->VisitPointers(from(), to());
@ -1816,20 +1722,6 @@ void IsolateGroupReloadContext::VisitObjectPointers(
void ProgramReloadContext::VisitObjectPointers(ObjectPointerVisitor* visitor) {
visitor->VisitPointers(from(), to());
ClassPtr* saved_class_table =
saved_class_table_.load(std::memory_order_relaxed);
if (saved_class_table != NULL) {
auto class_table = reinterpret_cast<ObjectPtr*>(&(saved_class_table[0]));
visitor->VisitPointers(class_table, saved_num_cids_);
}
ClassPtr* saved_tlc_class_table =
saved_tlc_class_table_.load(std::memory_order_relaxed);
if (saved_tlc_class_table != NULL) {
auto class_table =
reinterpret_cast<ObjectPtr*>(&(saved_tlc_class_table[0]));
visitor->VisitPointers(class_table, saved_num_tlc_cids_);
}
}
ObjectStore* ProgramReloadContext::object_store() {

View file

@ -56,15 +56,14 @@ class InstanceMorpher : public ZoneAllocated {
public:
// Creates a new [InstanceMorpher] based on the [from]/[to] class
// descriptions.
static InstanceMorpher* CreateFromClassDescriptors(
Zone* zone,
SharedClassTable* shared_class_table,
const Class& from,
const Class& to);
static InstanceMorpher* CreateFromClassDescriptors(Zone* zone,
ClassTable* class_table,
const Class& from,
const Class& to);
InstanceMorpher(Zone* zone,
classid_t cid,
SharedClassTable* shared_class_table,
ClassTable* class_table,
ZoneGrowableArray<intptr_t>* mapping,
ZoneGrowableArray<intptr_t>* new_fields_offsets);
virtual ~InstanceMorpher() {}
@ -87,7 +86,7 @@ class InstanceMorpher : public ZoneAllocated {
private:
Zone* zone_;
classid_t cid_;
SharedClassTable* shared_class_table_;
ClassTable* class_table_;
ZoneGrowableArray<intptr_t>* mapping_;
ZoneGrowableArray<intptr_t>* new_fields_offsets_;
@ -130,7 +129,7 @@ class ClassReasonForCancelling : public ReasonForCancelling {
class IsolateGroupReloadContext {
public:
IsolateGroupReloadContext(IsolateGroup* isolate,
SharedClassTable* shared_class_table,
ClassTable* class_table,
JSONStream* js);
~IsolateGroupReloadContext();
@ -144,10 +143,6 @@ class IsolateGroupReloadContext {
// All zone allocated objects must be allocated from this zone.
Zone* zone() const { return zone_; }
bool UseSavedSizeTableForGC() const {
return saved_size_table_.load(std::memory_order_relaxed) != nullptr;
}
IsolateGroup* isolate_group() const { return isolate_group_; }
bool reload_aborted() const { return HasReasonsForCancelling(); }
bool reload_skipped() const { return reload_skipped_; }
@ -163,9 +158,6 @@ class IsolateGroupReloadContext {
}
private:
intptr_t GetClassSizeForHeapWalkAt(classid_t cid);
void DiscardSavedClassTable(bool is_rollback);
// Tells whether there are reasons for cancelling the reload.
bool HasReasonsForCancelling() const {
return !reasons_to_cancel_reload_.is_empty();
@ -209,8 +201,6 @@ class IsolateGroupReloadContext {
const char* packages_url);
bool ScriptModifiedSince(const Script& script, int64_t since);
void CheckpointSharedClassTable();
void MorphInstancesPhase1Allocate(ObjectLocator* locator, Become* become);
void MorphInstancesPhase2Become(Become* become);
@ -220,7 +210,7 @@ class IsolateGroupReloadContext {
Zone* zone_;
IsolateGroup* isolate_group_;
SharedClassTable* shared_class_table_;
ClassTable* class_table_;
int64_t start_time_micros_ = -1;
int64_t reload_timestamp_ = -1;
@ -229,8 +219,6 @@ class IsolateGroupReloadContext {
JSONStream* js_;
intptr_t num_old_libs_ = -1;
intptr_t saved_num_cids_ = -1;
std::atomic<intptr_t*> saved_size_table_;
intptr_t num_received_libs_ = -1;
intptr_t bytes_received_libs_ = -1;
intptr_t num_received_classes_ = -1;
@ -283,8 +271,7 @@ class IsolateGroupReloadContext {
friend class ObjectLocator;
friend class ReasonForCancelling;
friend class ProgramReloadContext;
friend class IsolateGroup; // GetClassSizeForHeapWalkAt
friend class UntaggedObject; // GetClassSizeForHeapWalkAt
friend class IsolateGroup;
static Dart_FileModifiedCallback file_modified_callback_;
};
@ -309,10 +296,6 @@ class ProgramReloadContext {
private:
bool IsDirty(const Library& lib);
// Prefers old classes when we are in the middle of a reload.
ClassPtr GetClassForHeapWalkAt(intptr_t cid);
void DiscardSavedClassTable(bool is_rollback);
void RegisterClass(const Class& new_cls);
// Finds the library private key for |replacement_or_new| or return null
@ -338,7 +321,6 @@ class ProgramReloadContext {
void CheckpointLibraries();
void RollbackClasses();
void RollbackLibraries();
#ifdef DEBUG
@ -373,10 +355,6 @@ class ProgramReloadContext {
Zone* zone_;
std::shared_ptr<IsolateGroupReloadContext> group_reload_context_;
IsolateGroup* isolate_group_;
intptr_t saved_num_cids_ = -1;
intptr_t saved_num_tlc_cids_ = -1;
std::atomic<ClassPtr*> saved_class_table_;
std::atomic<ClassPtr*> saved_tlc_class_table_;
MallocGrowableArray<LibraryInfo> library_infos_;
ClassPtr OldClassOrNull(const Class& replacement_or_new);

View file

@ -824,7 +824,7 @@ class InstanceMessageSerializationCluster : public MessageSerializationCluster {
const intptr_t next_field_offset = next_field_offset_;
#if defined(DART_PRECOMPILED_RUNTIME)
const auto unboxed_fields_bitmap =
s->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(cid_);
s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(cid_);
#endif
for (intptr_t offset = Instance::NextFieldOffset();
offset < next_field_offset; offset += kCompressedWordSize) {
@ -858,7 +858,7 @@ class InstanceMessageSerializationCluster : public MessageSerializationCluster {
const intptr_t next_field_offset = next_field_offset_;
#if defined(DART_PRECOMPILED_RUNTIME)
const auto unboxed_fields_bitmap =
s->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(cid_);
s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(cid_);
#endif
for (intptr_t offset = Instance::NextFieldOffset();
offset < next_field_offset; offset += kCompressedWordSize) {
@ -907,8 +907,7 @@ class InstanceMessageDeserializationCluster
const intptr_t next_field_offset = cls_.host_next_field_offset();
#if defined(DART_PRECOMPILED_RUNTIME)
const auto unboxed_fields_bitmap =
d->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(
cls_.id());
d->isolate_group()->class_table()->GetUnboxedFieldsMapAt(cls_.id());
#else
const intptr_t type_argument_field_offset =
cls_.host_type_arguments_field_offset();

View file

@ -2766,8 +2766,8 @@ ObjectPtr Object::Allocate(intptr_t cls_id,
heap->old_space()->AllocateBlack(size);
}
#ifndef PRODUCT
auto class_table = thread->isolate_group()->shared_class_table();
if (class_table->TraceAllocationFor(cls_id)) {
auto class_table = thread->isolate_group()->class_table();
if (class_table->ShouldTraceAllocationFor(cls_id)) {
uint32_t hash =
HeapSnapshotWriter::GetHeapSnapshotIdentityHash(thread, raw_obj);
Profiler::SampleAllocation(thread, cls_id, hash);
@ -3085,7 +3085,8 @@ void Class::InitEmptyFields() {
set_invocation_dispatcher_cache(Object::empty_array());
}
ArrayPtr Class::OffsetToFieldMap(bool original_classes) const {
ArrayPtr Class::OffsetToFieldMap(
ClassTable* class_table /* = nullptr */) const {
ASSERT(is_finalized());
if (untag()->offset_in_words_to_field<std::memory_order_acquire>() ==
Array::null()) {
@ -3104,7 +3105,7 @@ ArrayPtr Class::OffsetToFieldMap(bool original_classes) const {
array.SetAt(f.HostOffset() >> kCompressedWordSizeLog2, f);
}
}
cls = cls.SuperClass(original_classes);
cls = cls.SuperClass(class_table);
}
untag()->set_offset_in_words_to_field<std::memory_order_release>(
array.ptr());
@ -3492,24 +3493,23 @@ TypeArgumentsPtr Class::InstantiateToBounds(Thread* thread) const {
return type_params.defaults();
}
ClassPtr Class::SuperClass(bool original_classes) const {
ClassPtr Class::SuperClass(ClassTable* class_table /* = nullptr */) const {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
auto isolate_group = thread->isolate_group();
if (class_table == nullptr) {
class_table = thread->isolate_group()->class_table();
}
if (super_type() == AbstractType::null()) {
if (id() == kTypeArgumentsCid) {
// Pretend TypeArguments objects are Dart instances.
return isolate_group->class_table()->At(kInstanceCid);
return class_table->At(kInstanceCid);
}
return Class::null();
}
const AbstractType& sup_type = AbstractType::Handle(zone, super_type());
const intptr_t type_class_id = sup_type.type_class_id();
if (original_classes) {
return isolate_group->GetClassForHeapWalkAt(type_class_id);
} else {
return isolate_group->class_table()->At(type_class_id);
}
return class_table->At(type_class_id);
}
void Class::set_super_type(const AbstractType& value) const {
@ -3561,7 +3561,7 @@ UnboxedFieldBitmap Class::CalculateFieldOffsets() const {
if (FLAG_precompiled_mode) {
host_bitmap =
IsolateGroup::Current()->shared_class_table()->GetUnboxedFieldsMapAt(
IsolateGroup::Current()->class_table()->GetUnboxedFieldsMapAt(
super.id());
}
}
@ -4129,8 +4129,7 @@ void Class::Finalize() const {
isolate_group->class_table()->UpdateClassSize(id(), ptr());
}
if (FLAG_precompiled_mode && !ClassTable::IsTopLevelCid(id())) {
isolate_group->shared_class_table()->SetUnboxedFieldsMapAt(id(),
host_bitmap);
isolate_group->class_table()->SetUnboxedFieldsMapAt(id(), host_bitmap);
}
}
}
@ -4243,8 +4242,8 @@ void Class::set_dependent_code(const Array& array) const {
bool Class::TraceAllocation(IsolateGroup* isolate_group) const {
#ifndef PRODUCT
auto class_table = isolate_group->shared_class_table();
return class_table->TraceAllocationFor(id());
auto class_table = isolate_group->class_table();
return class_table->ShouldTraceAllocationFor(id());
#else
return false;
#endif
@ -4255,7 +4254,7 @@ void Class::SetTraceAllocation(bool trace_allocation) const {
auto isolate_group = IsolateGroup::Current();
const bool changed = trace_allocation != this->TraceAllocation(isolate_group);
if (changed) {
auto class_table = isolate_group->shared_class_table();
auto class_table = isolate_group->class_table();
class_table->SetTraceAllocationFor(id(), trace_allocation);
DisableAllocationStub();
}
@ -19382,7 +19381,7 @@ uint32_t Instance::CanonicalizeHash() const {
Instance& instance = Instance::Handle(zone);
const auto unboxed_fields_bitmap =
thread->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(
thread->isolate_group()->class_table()->GetUnboxedFieldsMapAt(
GetClassId());
for (intptr_t offset = Instance::NextFieldOffset();
@ -19453,8 +19452,7 @@ void Instance::CanonicalizeFieldsLocked(Thread* thread) const {
const intptr_t instance_size = SizeFromClass();
ASSERT(instance_size != 0);
const auto unboxed_fields_bitmap =
thread->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(
class_id);
thread->isolate_group()->class_table()->GetUnboxedFieldsMapAt(class_id);
for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
offset += kCompressedWordSize) {
if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
@ -19988,10 +19986,10 @@ InstancePtr Instance::NewAlreadyFinalized(const Class& cls, Heap::Space space) {
return static_cast<InstancePtr>(raw);
}
InstancePtr Instance::NewFromCidAndSize(SharedClassTable* shared_class_table,
InstancePtr Instance::NewFromCidAndSize(ClassTable* class_table,
classid_t cid,
Heap::Space heap) {
const intptr_t instance_size = shared_class_table->SizeAt(cid);
const intptr_t instance_size = class_table->SizeAt(cid);
ASSERT(instance_size > 0);
ObjectPtr raw = Object::Allocate(cid, instance_size, heap,
Instance::ContainsCompressedPointers());

View file

@ -1302,10 +1302,10 @@ class Class : public Object {
}
// Asserts that the class of the super type has been resolved.
// |original_classes| only has an effect when reloading. If true and we
// are reloading, it will prefer the original classes to the replacement
// classes.
ClassPtr SuperClass(bool original_classes = false) const;
// If |class_table| is provided it will be used to resolve class id to the
// actual class object, instead of using current class table on the isolate
// group.
ClassPtr SuperClass(ClassTable* class_table = nullptr) const;
// Interfaces is an array of Types.
ArrayPtr interfaces() const {
@ -1491,10 +1491,9 @@ class Class : public Object {
// Returns an array of all instance fields of this class and its superclasses
// indexed by offset in words.
// |original_classes| only has an effect when reloading. If true and we
// are reloading, it will prefer the original classes to the replacement
// classes.
ArrayPtr OffsetToFieldMap(bool original_classes = false) const;
// If |class_table| is provided it will be used to resolve super classes by
// class id, instead of the current class_table stored in the isolate.
ArrayPtr OffsetToFieldMap(ClassTable* class_table = nullptr) const;
// Returns true if non-static fields are defined.
bool HasInstanceFields() const;
@ -7765,7 +7764,7 @@ class Instance : public Object {
StoreCompressedPointer(RawFieldAddrAtOffset(offset), value.ptr());
}
static InstancePtr NewFromCidAndSize(SharedClassTable* shared_class_table,
static InstancePtr NewFromCidAndSize(ClassTable* class_table,
classid_t cid,
Heap::Space heap = Heap::kNew);

View file

@ -1857,8 +1857,7 @@ class FastObjectCopy : public ObjectCopy<FastObjectCopyBase> {
return;
}
#if defined(DART_PRECOMPILED_RUNTIME)
const auto bitmap =
class_table_->shared_class_table()->GetUnboxedFieldsMapAt(cid);
const auto bitmap = class_table_->GetUnboxedFieldsMapAt(cid);
CopyUserdefinedInstanceAOT(Instance::RawCast(from), Instance::RawCast(to),
bitmap);
#else
@ -1992,8 +1991,7 @@ class SlowObjectCopy : public ObjectCopy<SlowObjectCopyBase> {
return;
}
#if defined(DART_PRECOMPILED_RUNTIME)
const auto bitmap =
class_table_->shared_class_table()->GetUnboxedFieldsMapAt(cid);
const auto bitmap = class_table_->GetUnboxedFieldsMapAt(cid);
CopyUserdefinedInstanceAOT(from, to, bitmap);
#else
CopyUserdefinedInstance(from, to);

View file

@ -688,8 +688,8 @@ void Class::CheckReload(const Class& replacement,
// Consts can't lose fields.
bool field_removed = false;
const Array& old_fields =
Array::Handle(OffsetToFieldMap(true /* original classes */));
const Array& old_fields = Array::Handle(
OffsetToFieldMap(IsolateGroup::Current()->heap_walk_class_table()));
const Array& new_fields = Array::Handle(replacement.OffsetToFieldMap());
if (new_fields.Length() < old_fields.Length()) {
field_removed = true;
@ -748,8 +748,12 @@ void Class::CheckReload(const Class& replacement,
bool Class::RequiresInstanceMorphing(const Class& replacement) const {
// Get the field maps for both classes. These field maps walk the class
// hierarchy.
auto isolate_group = IsolateGroup::Current();
// heap_walk_class_table is the original class table before it was
// updated by reloading sources.
const Array& fields =
Array::Handle(OffsetToFieldMap(true /* original classes */));
Array::Handle(OffsetToFieldMap(isolate_group->heap_walk_class_table()));
const Array& replacement_fields =
Array::Handle(replacement.OffsetToFieldMap());
@ -788,8 +792,7 @@ bool Class::CanReloadFinalized(const Class& replacement,
// Make sure the declaration types argument count matches for the two classes.
// ex. class A<int,B> {} cannot be replace with class A<B> {}.
auto group_context = context->group_reload_context();
auto shared_class_table =
group_context->isolate_group()->shared_class_table();
auto class_table = group_context->isolate_group()->class_table();
if (NumTypeArguments() != replacement.NumTypeArguments()) {
group_context->AddReasonForCancelling(
new (context->zone())
@ -802,7 +805,7 @@ bool Class::CanReloadFinalized(const Class& replacement,
// We unconditionally create an instance morpher. As a side effect of
// building the morpher, we will mark all new fields as late.
auto instance_morpher = InstanceMorpher::CreateFromClassDescriptors(
context->zone(), shared_class_table, *this, replacement);
context->zone(), class_table, *this, replacement);
group_context->EnsureHasInstanceMorpherFor(cid, instance_morpher);
}
return true;

View file

@ -69,11 +69,11 @@ void UntaggedObject::Validate(IsolateGroup* isolate_group) const {
}
}
const intptr_t class_id = ClassIdTag::decode(tags);
if (!isolate_group->shared_class_table()->IsValidIndex(class_id)) {
if (!isolate_group->class_table()->IsValidIndex(class_id)) {
FATAL1("Invalid class id encountered %" Pd "\n", class_id);
}
if (class_id == kNullCid &&
isolate_group->shared_class_table()->HasValidClassAt(class_id)) {
isolate_group->class_table()->HasValidClassAt(class_id)) {
// Null class not yet initialized; skip.
return;
}
@ -241,24 +241,15 @@ intptr_t UntaggedObject::HeapSizeFromClass(uword tags) const {
// TODO(koda): Add Size(ClassTable*) interface to allow caching in loops.
auto isolate_group = IsolateGroup::Current();
#if defined(DEBUG)
#if !defined(DART_PRECOMPILED_RUNTIME)
auto reload_context = isolate_group->reload_context();
const bool use_saved_class_table =
reload_context != nullptr ? reload_context->UseSavedSizeTableForGC()
: false;
#else
const bool use_saved_class_table = false;
#endif
auto class_table = isolate_group->shared_class_table();
ASSERT(use_saved_class_table || class_table->SizeAt(class_id) > 0);
auto class_table = isolate_group->heap_walk_class_table();
ASSERT(class_table->SizeAt(class_id) > 0);
if (!class_table->IsValidIndex(class_id) ||
(!class_table->HasValidClassAt(class_id) && !use_saved_class_table)) {
!class_table->HasValidClassAt(class_id)) {
FATAL3("Invalid cid: %" Pd ", obj: %p, tags: %x. Corrupt heap?",
class_id, this, static_cast<uint32_t>(tags));
}
#endif // DEBUG
instance_size = isolate_group->GetClassSizeForHeapWalkAt(class_id);
instance_size = isolate_group->heap_walk_class_table()->SizeAt(class_id);
}
}
ASSERT(instance_size != 0);
@ -386,7 +377,8 @@ void UntaggedObject::VisitPointersPrecise(IsolateGroup* isolate_group,
}
// N.B.: Not using the heap size!
uword next_field_offset = isolate_group->GetClassForHeapWalkAt(class_id)
uword next_field_offset = visitor->class_table()
->At(class_id)
->untag()
->host_next_field_offset_in_words_
<< kCompressedWordSizeLog2;
@ -399,7 +391,7 @@ void UntaggedObject::VisitPointersPrecise(IsolateGroup* isolate_group,
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
const auto unboxed_fields_bitmap =
visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
if (!unboxed_fields_bitmap.IsEmpty()) {
intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
@ -732,8 +724,7 @@ intptr_t UntaggedInstance::VisitInstancePointers(
uword tags = raw_obj->untag()->tags_;
intptr_t instance_size = SizeTag::decode(tags);
if (instance_size == 0) {
instance_size = visitor->isolate_group()->GetClassSizeForHeapWalkAt(
raw_obj->GetClassId());
instance_size = visitor->class_table()->SizeAt(raw_obj->GetClassId());
}
// Calculate the first and last raw object pointer fields.

View file

@ -418,7 +418,7 @@ class UntaggedObject {
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
const auto unboxed_fields_bitmap =
visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
if (!unboxed_fields_bitmap.IsEmpty()) {
intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
@ -456,7 +456,7 @@ class UntaggedObject {
#if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS)
const auto unboxed_fields_bitmap =
visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id);
visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
if (!unboxed_fields_bitmap.IsEmpty()) {
intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;

View file

@ -3397,7 +3397,7 @@ static void MarkClasses(const Class& root,
bool include_implementors) {
Thread* thread = Thread::Current();
HANDLESCOPE(thread);
SharedClassTable* table = thread->isolate()->group()->shared_class_table();
ClassTable* table = thread->isolate()->group()->class_table();
GrowableArray<const Class*> worklist;
table->SetCollectInstancesFor(root.id(), true);
worklist.Add(&root);
@ -3437,7 +3437,7 @@ static void MarkClasses(const Class& root,
}
static void UnmarkClasses() {
SharedClassTable* table = IsolateGroup::Current()->shared_class_table();
ClassTable* table = IsolateGroup::Current()->class_table();
for (intptr_t i = 1; i < table->NumCids(); i++) {
table->SetCollectInstancesFor(i, false);
}
@ -3447,7 +3447,7 @@ class GetInstancesVisitor : public ObjectGraph::Visitor {
public:
GetInstancesVisitor(ZoneGrowableHandlePtrArray<Object>* storage,
intptr_t limit)
: table_(IsolateGroup::Current()->shared_class_table()),
: table_(IsolateGroup::Current()->class_table()),
storage_(storage),
limit_(limit),
count_(0) {}
@ -3469,7 +3469,7 @@ class GetInstancesVisitor : public ObjectGraph::Visitor {
intptr_t count() const { return count_; }
private:
SharedClassTable* const table_;
ClassTable* const table_;
ZoneGrowableHandlePtrArray<Object>* storage_;
const intptr_t limit_;
intptr_t count_;

View file

@ -11,6 +11,6 @@ namespace dart {
ObjectPointerVisitor::ObjectPointerVisitor(IsolateGroup* isolate_group)
: isolate_group_(isolate_group),
gc_root_type_("unknown"),
shared_class_table_(isolate_group->shared_class_table()) {}
class_table_(isolate_group->heap_walk_class_table()) {}
} // namespace dart

View file

@ -60,9 +60,7 @@ class ObjectPointerVisitor {
// Otherwise trace field values through isolate's field_table.
virtual bool trace_values_through_fields() const { return false; }
const SharedClassTable* shared_class_table() const {
return shared_class_table_;
}
const ClassTable* class_table() const { return class_table_; }
// Returns true if pointers of the given SuspendState object can be visited.
// Compactor overrides this method in order to postpone visiting SuspendState
@ -76,7 +74,7 @@ class ObjectPointerVisitor {
private:
IsolateGroup* isolate_group_;
const char* gc_root_type_;
SharedClassTable* shared_class_table_;
ClassTable* class_table_;
DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectPointerVisitor);
};