[vm/concurrency] Move all information except for the class pointers out of [ClassTable] into [SharedClassTable]

This CL moves heap related information (namely instance sizes and
allocation stats) out of the [ClassTable] into a [SharedClassTable].
Both classes are always in sync (i.e. they have the same number of entries).

This CL also changes GC related code to start using the size information
from the new [SharedClassTable].

In a futher step we will move the heap as well as this shared class
table out of the [Isolate] and into [IsolateGroup].

Issue https://github.com/dart-lang/sdk/issues/36097

Change-Id: Id54a89c9251ad3bbc13e60d32dc4f7bcc7f1d805
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/116064
Commit-Queue: Martin Kustermann <kustermann@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
This commit is contained in:
Martin Kustermann 2019-09-10 12:48:16 +00:00 committed by commit-bot@chromium.org
parent 9a2e2801d9
commit ef9d699f07
23 changed files with 627 additions and 360 deletions

View file

@ -16,58 +16,99 @@ namespace dart {
DEFINE_FLAG(bool, print_class_table, false, "Print initial class table.");
ClassTable::ClassTable()
SharedClassTable::SharedClassTable()
: top_(kNumPredefinedCids),
capacity_(0),
table_(NULL),
old_tables_(new MallocGrowableArray<ClassAndSize*>()) {
NOT_IN_PRODUCT(class_heap_stats_table_ = NULL);
old_tables_(new MallocGrowableArray<intptr_t*>()) {
if (Dart::vm_isolate() == NULL) {
capacity_ = initial_capacity_;
table_ = reinterpret_cast<ClassAndSize*>(
calloc(capacity_, sizeof(ClassAndSize))); // NOLINT
ASSERT(kInitialCapacity >= kNumPredefinedCids);
capacity_ = kInitialCapacity;
// Note that [calloc] will zero-initialize the memory.
table_ = static_cast<intptr_t*>(calloc(capacity_, sizeof(intptr_t)));
} else {
// Duplicate the class table from the VM isolate.
ClassTable* vm_class_table = Dart::vm_isolate()->class_table();
capacity_ = vm_class_table->capacity_;
table_ = reinterpret_cast<ClassAndSize*>(
calloc(capacity_, sizeof(ClassAndSize))); // NOLINT
auto vm_shared_class_table = Dart::vm_isolate()->shared_class_table();
capacity_ = vm_shared_class_table->capacity_;
// Note that [calloc] will zero-initialize the memory.
table_ = static_cast<intptr_t*>(calloc(capacity_, sizeof(RawClass*)));
// The following cids don't have a corresponding class object in Dart code.
// We therefore need to initialize them eagerly.
for (intptr_t i = kObjectCid; i < kInstanceCid; i++) {
table_[i] = vm_class_table->PairAt(i);
table_[i] = vm_shared_class_table->SizeAt(i);
}
table_[kTypeArgumentsCid] = vm_class_table->PairAt(kTypeArgumentsCid);
table_[kFreeListElement] = vm_class_table->PairAt(kFreeListElement);
table_[kForwardingCorpse] = vm_class_table->PairAt(kForwardingCorpse);
table_[kDynamicCid] = vm_class_table->PairAt(kDynamicCid);
table_[kVoidCid] = vm_class_table->PairAt(kVoidCid);
table_[kTypeArgumentsCid] =
vm_shared_class_table->SizeAt(kTypeArgumentsCid);
table_[kFreeListElement] = vm_shared_class_table->SizeAt(kFreeListElement);
table_[kForwardingCorpse] =
vm_shared_class_table->SizeAt(kForwardingCorpse);
table_[kDynamicCid] = vm_shared_class_table->SizeAt(kDynamicCid);
table_[kVoidCid] = vm_shared_class_table->SizeAt(kVoidCid);
}
#ifndef PRODUCT
class_heap_stats_table_ = reinterpret_cast<ClassHeapStats*>(
calloc(capacity_, sizeof(ClassHeapStats))); // NOLINT
class_heap_stats_table_ = static_cast<ClassHeapStats*>(
malloc(capacity_ * sizeof(ClassHeapStats))); // NOLINT
for (intptr_t i = 0; i < capacity_; i++) {
class_heap_stats_table_[i].Initialize();
}
#endif // !PRODUCT
}
ClassTable::ClassTable(ClassTable* original)
: top_(original->top_),
capacity_(original->top_),
table_(original->table_),
old_tables_(NULL) {
NOT_IN_PRODUCT(class_heap_stats_table_ = NULL);
}
ClassTable::~ClassTable() {
SharedClassTable::~SharedClassTable() {
if (old_tables_ != NULL) {
FreeOldTables();
delete old_tables_;
free(table_);
NOT_IN_PRODUCT(free(class_heap_stats_table_));
} else {
// This instance was a shallow copy. It doesn't own any memory.
NOT_IN_PRODUCT(ASSERT(class_heap_stats_table_ == NULL));
}
NOT_IN_PRODUCT(free(class_heap_stats_table_));
}
ClassTable::ClassTable(SharedClassTable* shared_class_table)
: top_(kNumPredefinedCids),
capacity_(0),
table_(NULL),
old_tables_(new MallocGrowableArray<ClassAndSize*>()),
old_class_tables_(new MallocGrowableArray<RawClass**>()),
shared_class_table_(shared_class_table) {
if (Dart::vm_isolate() == NULL) {
ASSERT(kInitialCapacity >= kNumPredefinedCids);
capacity_ = kInitialCapacity;
// Note that [calloc] will zero-initialize the memory.
table_ = static_cast<RawClass**>(calloc(capacity_, sizeof(RawClass*)));
} else {
// Duplicate the class table from the VM isolate.
ClassTable* vm_class_table = Dart::vm_isolate()->class_table();
capacity_ = vm_class_table->capacity_;
// Note that [calloc] will zero-initialize the memory.
table_ = static_cast<RawClass**>(calloc(capacity_, sizeof(RawClass*)));
// The following cids don't have a corresponding class object in Dart code.
// We therefore need to initialize them eagerly.
for (intptr_t i = kObjectCid; i < kInstanceCid; i++) {
table_[i] = vm_class_table->At(i);
}
table_[kTypeArgumentsCid] = vm_class_table->At(kTypeArgumentsCid);
table_[kFreeListElement] = vm_class_table->At(kFreeListElement);
table_[kForwardingCorpse] = vm_class_table->At(kForwardingCorpse);
table_[kDynamicCid] = vm_class_table->At(kDynamicCid);
table_[kVoidCid] = vm_class_table->At(kVoidCid);
}
}
ClassTable::ClassTable(ClassTable* original,
SharedClassTable* shared_class_table)
: top_(original->top_),
capacity_(original->top_),
table_(original->table_),
old_tables_(nullptr),
old_class_tables_(nullptr),
shared_class_table_(shared_class_table) {}
ClassTable::~ClassTable() {
if (old_tables_ != nullptr || old_class_tables_ != nullptr) {
FreeOldTables();
delete old_tables_;
delete old_class_tables_;
}
free(table_);
}
void ClassTable::AddOldTable(ClassAndSize* old_table) {
@ -79,17 +120,32 @@ void ClassTable::FreeOldTables() {
while (old_tables_->length() > 0) {
free(old_tables_->RemoveLast());
}
while (old_class_tables_->length() > 0) {
free(old_class_tables_->RemoveLast());
}
}
void SharedClassTable::FreeOldTables() {
while (old_tables_->length() > 0) {
free(old_tables_->RemoveLast());
}
}
void ClassTable::Register(const Class& cls) {
ASSERT(Thread::Current()->IsMutatorThread());
intptr_t index = cls.id();
const intptr_t index = cls.id();
// During the transition period we would like [SharedClassTable] to operate in
// parallel to [ClassTable].
const intptr_t expected_cid =
shared_class_table_->Register(index, Class::instance_size(cls.raw()));
if (index != kIllegalCid) {
ASSERT(index > 0);
ASSERT(index < kNumPredefinedCids);
ASSERT(table_[index].class_ == NULL);
ASSERT(index < capacity_);
table_[index] = ClassAndSize(cls.raw(), Class::instance_size(cls.raw()));
ASSERT(index > 0 && index < kNumPredefinedCids && index < top_);
ASSERT(table_[index] == nullptr);
table_[index] = cls.raw();
// Add the vtable for this predefined class into the static vtable registry
// if it has not been setup yet.
cpp_vtable cls_vtable = cls.handle_vtable();
@ -100,95 +156,141 @@ void ClassTable::Register(const Class& cls) {
}
} else {
if (top_ == capacity_) {
// Grow the capacity of the class table.
// TODO(koda): Add ClassTable::Grow to share code.
#ifndef PRODUCT
// Wait for any marking tasks to complete. Allocation stats in the
// marker rely on the class table size not changing.
Thread* thread = Thread::Current();
thread->heap()->WaitForMarkerTasks(thread);
#endif
intptr_t new_capacity = capacity_ + capacity_increment_;
ClassAndSize* new_table = reinterpret_cast<ClassAndSize*>(
malloc(new_capacity * sizeof(ClassAndSize))); // NOLINT
memmove(new_table, table_, capacity_ * sizeof(ClassAndSize));
#ifndef PRODUCT
ClassHeapStats* new_stats_table = reinterpret_cast<ClassHeapStats*>(
realloc(class_heap_stats_table_,
new_capacity * sizeof(ClassHeapStats))); // NOLINT
#endif
for (intptr_t i = capacity_; i < new_capacity; i++) {
new_table[i] = ClassAndSize(NULL, 0);
NOT_IN_PRODUCT(new_stats_table[i].Initialize());
}
capacity_ = new_capacity;
old_tables_->Add(table_);
table_ = new_table; // TODO(koda): This should use atomics.
NOT_IN_PRODUCT(class_heap_stats_table_ = new_stats_table);
const intptr_t new_capacity = capacity_ + kCapacityIncrement;
Grow(new_capacity);
}
ASSERT(top_ < capacity_);
if (!Class::is_valid_id(top_)) {
FATAL1("Fatal error in ClassTable::Register: invalid index %" Pd "\n",
top_);
}
cls.set_id(top_);
table_[top_] = ClassAndSize(cls.raw());
table_[top_] = cls.raw();
top_++; // Increment next index.
}
ASSERT(expected_cid == cls.id());
}
intptr_t SharedClassTable::Register(intptr_t index, intptr_t size) {
if (!Class::is_valid_id(top_)) {
FATAL1("Fatal error in SharedClassTable::Register: invalid index %" Pd "\n",
top_);
}
ASSERT(Thread::Current()->IsMutatorThread());
if (index != kIllegalCid) {
// We are registring the size of a predefined class.
ASSERT(index > 0 && index < kNumPredefinedCids);
SetSizeAt(index, size);
return index;
} else {
if (top_ == capacity_) {
const intptr_t new_capacity = capacity_ + kCapacityIncrement;
Grow(new_capacity);
}
ASSERT(top_ < capacity_);
table_[top_] = size;
return top_++; // Increment next index.
}
}
void ClassTable::AllocateIndex(intptr_t index) {
// This is called by a snapshot reader.
shared_class_table_->AllocateIndex(index);
ASSERT(Class::is_valid_id(index));
if (index >= capacity_) {
// Grow the capacity of the class table.
// TODO(koda): Add ClassTable::Grow to share code.
#ifndef PRODUCT
// Wait for any marking tasks to complete. Allocation stats in the
// marker rely on the class table size not changing.
Thread* thread = Thread::Current();
thread->heap()->WaitForMarkerTasks(thread);
#endif
intptr_t new_capacity = index + capacity_increment_;
if (!Class::is_valid_id(index) || new_capacity < capacity_) {
FATAL1("Fatal error in ClassTable::Register: invalid index %" Pd "\n",
index);
}
ClassAndSize* new_table = reinterpret_cast<ClassAndSize*>(
malloc(new_capacity * sizeof(ClassAndSize))); // NOLINT
memmove(new_table, table_, capacity_ * sizeof(ClassAndSize));
#ifndef PRODUCT
ClassHeapStats* new_stats_table = reinterpret_cast<ClassHeapStats*>(
realloc(class_heap_stats_table_,
new_capacity * sizeof(ClassHeapStats))); // NOLINT
#endif
for (intptr_t i = capacity_; i < new_capacity; i++) {
new_table[i] = ClassAndSize(NULL);
NOT_IN_PRODUCT(new_stats_table[i].Initialize());
}
capacity_ = new_capacity;
old_tables_->Add(table_);
table_ = new_table; // TODO(koda): This should use atomics.
NOT_IN_PRODUCT(class_heap_stats_table_ = new_stats_table);
ASSERT(capacity_increment_ >= 1);
const intptr_t new_capacity = index + kCapacityIncrement;
Grow(new_capacity);
}
ASSERT(table_[index].class_ == NULL);
ASSERT(table_[index] == nullptr);
if (index >= top_) {
top_ = index + 1;
}
ASSERT(top_ == shared_class_table_->top_);
ASSERT(capacity_ == shared_class_table_->capacity_);
}
void ClassTable::Grow(intptr_t new_capacity) {
ASSERT(new_capacity > capacity_);
auto new_table = static_cast<RawClass**>(
malloc(new_capacity * sizeof(RawClass*))); // NOLINT
memmove(new_table, table_, top_ * sizeof(RawClass*));
memset(new_table + top_, 0, (new_capacity - top_) * sizeof(RawClass*));
capacity_ = new_capacity;
old_class_tables_->Add(table_);
table_ = new_table; // TODO(koda): This should use atomics.
}
void SharedClassTable::AllocateIndex(intptr_t index) {
// This is called by a snapshot reader.
ASSERT(Class::is_valid_id(index));
if (index >= capacity_) {
const intptr_t new_capacity = index + kCapacityIncrement;
Grow(new_capacity);
}
ASSERT(table_[index] == 0);
if (index >= top_) {
top_ = index + 1;
}
}
void SharedClassTable::Grow(intptr_t new_capacity) {
ASSERT(new_capacity >= capacity_);
#ifndef PRODUCT
// Wait for any marking tasks to complete. Allocation stats in the
// marker rely on the class table size not changing.
Thread* thread = Thread::Current();
thread->heap()->WaitForMarkerTasks(thread);
#endif
intptr_t* new_table = static_cast<intptr_t*>(
malloc(new_capacity * sizeof(intptr_t))); // NOLINT
memmove(new_table, table_, top_ * sizeof(intptr_t));
memset(new_table + top_, 0, (new_capacity - top_) * sizeof(intptr_t));
#ifndef PRODUCT
auto new_stats_table = static_cast<ClassHeapStats*>(
realloc(class_heap_stats_table_,
new_capacity * sizeof(ClassHeapStats))); // NOLINT
#endif
for (intptr_t i = capacity_; i < new_capacity; i++) {
new_table[i] = 0;
NOT_IN_PRODUCT(new_stats_table[i].Initialize());
}
capacity_ = new_capacity;
old_tables_->Add(table_);
table_ = new_table; // TODO(koda): This should use atomics.
NOT_IN_PRODUCT(class_heap_stats_table_ = new_stats_table);
}
void ClassTable::Unregister(intptr_t index) {
table_[index] = ClassAndSize(NULL);
shared_class_table_->Unregister(index);
table_[index] = nullptr;
}
void SharedClassTable::Unregister(intptr_t index) {
table_[index] = 0;
}
void ClassTable::Remap(intptr_t* old_to_new_cid) {
ASSERT(Thread::Current()->IsAtSafepoint());
intptr_t num_cids = NumCids();
ClassAndSize* cls_by_old_cid = new ClassAndSize[num_cids];
shared_class_table_->Remap(old_to_new_cid);
const intptr_t num_cids = NumCids();
auto cls_by_old_cid = new RawClass*[num_cids];
memmove(cls_by_old_cid, table_, sizeof(RawClass*) * num_cids);
for (intptr_t i = 0; i < num_cids; i++) {
table_[old_to_new_cid[i]] = cls_by_old_cid[i];
}
delete[] cls_by_old_cid;
}
void SharedClassTable::Remap(intptr_t* old_to_new_cid) {
ASSERT(Thread::Current()->IsAtSafepoint());
const intptr_t num_cids = NumCids();
intptr_t* cls_by_old_cid = new intptr_t[num_cids];
for (intptr_t i = 0; i < num_cids; i++) {
cls_by_old_cid[i] = table_[i];
}
@ -202,7 +304,7 @@ void ClassTable::VisitObjectPointers(ObjectPointerVisitor* visitor) {
ASSERT(visitor != NULL);
visitor->set_gc_root_type("class table");
for (intptr_t i = 0; i < top_; i++) {
visitor->VisitPointer(reinterpret_cast<RawObject**>(&(table_[i].class_)));
visitor->VisitPointer(reinterpret_cast<RawObject**>(&(table_[i])));
}
visitor->clear_gc_root_type();
}
@ -249,17 +351,11 @@ void ClassTable::Print() {
}
void ClassTable::SetAt(intptr_t index, RawClass* raw_cls) {
// This is called by snapshot reader and class finalizer.
ASSERT(index < capacity_);
if (raw_cls == NULL) {
table_[index] = ClassAndSize(raw_cls, 0);
} else {
// Ensure we never change size for a given cid from one non-zero size to
// another non-zero size.
const intptr_t old_size = table_[index].size_;
const intptr_t new_size = Class::instance_size(raw_cls);
ASSERT(old_size == 0 || old_size == new_size);
table_[index] = ClassAndSize(raw_cls, new_size);
}
const intptr_t size = raw_cls == nullptr ? 0 : Class::instance_size(raw_cls);
shared_class_table_->SetSizeAt(index, size);
table_[index] = raw_cls;
}
ClassAndSize::ClassAndSize(RawClass* clazz) : class_(clazz) {
@ -437,32 +533,32 @@ void ClassHeapStats::PrintToJSONObject(const Class& cls,
obj->AddProperty64("bytesCurrent", bytes_current);
}
void ClassTable::UpdateAllocatedOldGC(intptr_t cid, intptr_t size) {
void SharedClassTable::UpdateAllocatedOldGC(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size != 0);
stats->recent.AddOldGC(size);
}
void ClassTable::UpdateAllocatedExternalNew(intptr_t cid, intptr_t size) {
void SharedClassTable::UpdateAllocatedExternalNew(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
stats->recent.AddNewExternal(size);
}
void ClassTable::UpdateAllocatedExternalOld(intptr_t cid, intptr_t size) {
void SharedClassTable::UpdateAllocatedExternalOld(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
stats->recent.AddOldExternal(size);
}
bool ClassTable::ShouldUpdateSizeForClassId(intptr_t cid) {
bool SharedClassTable::ShouldUpdateSizeForClassId(intptr_t cid) {
return !RawObject::IsVariableSizeClassId(cid);
}
ClassHeapStats* ClassTable::StatsWithUpdatedSize(intptr_t cid) {
if (!HasValidClassAt(cid) || (cid == kFreeListElement) ||
(cid == kForwardingCorpse) || (cid == kSmiCid)) {
if (!HasValidClassAt(cid) || cid == kFreeListElement ||
cid == kForwardingCorpse || cid == kSmiCid) {
return NULL;
}
Class& cls = Class::Handle(At(cid));
@ -470,48 +566,53 @@ ClassHeapStats* ClassTable::StatsWithUpdatedSize(intptr_t cid) {
// Not finalized.
return NULL;
}
return shared_class_table_->StatsWithUpdatedSize(cid, cls.instance_size());
}
ClassHeapStats* SharedClassTable::StatsWithUpdatedSize(intptr_t cid,
intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
if (ShouldUpdateSizeForClassId(cid)) {
stats->UpdateSize(cls.instance_size());
stats->UpdateSize(size);
}
stats->Verify();
return stats;
}
void ClassTable::ResetCountersOld() {
void SharedClassTable::ResetCountersOld() {
for (intptr_t i = 0; i < top_; i++) {
class_heap_stats_table_[i].ResetAtOldGC();
}
}
void ClassTable::ResetCountersNew() {
void SharedClassTable::ResetCountersNew() {
for (intptr_t i = 0; i < top_; i++) {
class_heap_stats_table_[i].ResetAtNewGC();
}
}
void ClassTable::UpdatePromoted() {
void SharedClassTable::UpdatePromoted() {
for (intptr_t i = 0; i < top_; i++) {
class_heap_stats_table_[i].UpdatePromotedAfterNewGC();
}
}
intptr_t ClassTable::ClassOffsetFor(intptr_t cid) {
intptr_t SharedClassTable::ClassOffsetFor(intptr_t cid) {
return cid * sizeof(ClassHeapStats); // NOLINT
}
intptr_t ClassTable::NewSpaceCounterOffsetFor(intptr_t cid) {
intptr_t SharedClassTable::NewSpaceCounterOffsetFor(intptr_t cid) {
const intptr_t class_offset = ClassOffsetFor(cid);
const intptr_t count_field_offset =
ClassHeapStats::allocated_since_gc_new_space_offset();
return class_offset + count_field_offset;
}
intptr_t ClassTable::StateOffsetFor(intptr_t cid) {
intptr_t SharedClassTable::StateOffsetFor(intptr_t cid) {
return ClassOffsetFor(cid) + ClassHeapStats::state_offset();
}
intptr_t ClassTable::NewSpaceSizeOffsetFor(intptr_t cid) {
intptr_t SharedClassTable::NewSpaceSizeOffsetFor(intptr_t cid) {
const uword class_offset = ClassOffsetFor(cid);
const uword size_field_offset =
ClassHeapStats::allocated_size_since_gc_new_space_offset();
@ -563,16 +664,21 @@ void ClassTable::AllocationProfilePrintJSON(JSONStream* stream, bool internal) {
}
}
void ClassTable::ResetAllocationAccumulators() {
void SharedClassTable::ResetAllocationAccumulators() {
for (intptr_t i = 1; i < top_; i++) {
ClassHeapStats* stats = StatsWithUpdatedSize(i);
if (stats != NULL) {
stats->ResetAccumulator();
if (HasValidClassAt(i)) {
const intptr_t size = table_[i];
ClassHeapStats* stats = StatsWithUpdatedSize(i, size);
if (stats != NULL) {
stats->ResetAccumulator();
}
}
}
}
void ClassTable::UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count) {
void SharedClassTable::UpdateLiveOld(intptr_t cid,
intptr_t size,
intptr_t count) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size >= 0);
@ -580,28 +686,28 @@ void ClassTable::UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count) {
stats->post_gc.AddOld(size, count);
}
void ClassTable::UpdateLiveNew(intptr_t cid, intptr_t size) {
void SharedClassTable::UpdateLiveNew(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size >= 0);
stats->post_gc.AddNew(size);
}
void ClassTable::UpdateLiveNewGC(intptr_t cid, intptr_t size) {
void SharedClassTable::UpdateLiveNewGC(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size >= 0);
stats->post_gc.AddNewGC(size);
}
void ClassTable::UpdateLiveOldExternal(intptr_t cid, intptr_t size) {
void SharedClassTable::UpdateLiveOldExternal(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size >= 0);
stats->post_gc.AddOldExternal(size);
}
void ClassTable::UpdateLiveNewExternal(intptr_t cid, intptr_t size) {
void SharedClassTable::UpdateLiveNewExternal(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size >= 0);

View file

@ -7,7 +7,9 @@
#include "platform/assert.h"
#include "platform/atomic.h"
#include "vm/bitfield.h"
#include "vm/class_id.h"
#include "vm/globals.h"
namespace dart {
@ -15,6 +17,8 @@ namespace dart {
class Class;
class ClassStats;
class ClassTable;
class Isolate;
class IsolateGroup;
class JSONArray;
class JSONObject;
class JSONStream;
@ -185,21 +189,180 @@ class ClassHeapStats {
};
#endif // !PRODUCT
// Registry of all known classes and their sizes.
//
// The GC will only need the information in this shared class table to scan
// object pointers.
class SharedClassTable {
public:
SharedClassTable();
~SharedClassTable();
// Thread-safe.
intptr_t SizeAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
return table_[index];
}
bool HasValidClassAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
ASSERT(table_[index] >= 0);
return table_[index] != 0;
}
void SetSizeAt(intptr_t index, intptr_t size) {
ASSERT(IsValidIndex(index));
// Ensure we never change size for a given cid from one non-zero size to
// another non-zero size.
RELEASE_ASSERT(table_[index] == 0 || table_[index] == size);
table_[index] = size;
}
bool IsValidIndex(intptr_t index) const { return index > 0 && index < top_; }
intptr_t NumCids() const { return top_; }
intptr_t Capacity() const { return capacity_; }
// Used to drop recently added classes.
void SetNumCids(intptr_t num_cids) {
ASSERT(num_cids <= top_);
top_ = num_cids;
}
// Called whenever a old GC occurs.
void ResetCountersOld();
// Called whenever a new GC occurs.
void ResetCountersNew();
// Called immediately after a new GC.
void UpdatePromoted();
#if !defined(PRODUCT)
// Called whenever a class is allocated in the runtime.
void UpdateAllocatedNew(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size != 0);
stats->recent.AddNew(size);
}
void UpdateAllocatedOld(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size != 0);
stats->recent.AddOld(size);
}
void UpdateAllocatedOldGC(intptr_t cid, intptr_t size);
void UpdateAllocatedExternalNew(intptr_t cid, intptr_t size);
void UpdateAllocatedExternalOld(intptr_t cid, intptr_t size);
void ResetAllocationAccumulators();
void SetTraceAllocationFor(intptr_t cid, bool trace) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
stats->set_trace_allocation(trace);
}
bool TraceAllocationFor(intptr_t cid) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
return stats->trace_allocation();
}
ClassHeapStats* StatsWithUpdatedSize(intptr_t cid, intptr_t size);
#endif // !defined(PRODUCT)
// Returns the newly allocated cid.
//
// [index] is kIllegalCid or a predefined cid.
intptr_t Register(intptr_t index, intptr_t size);
void AllocateIndex(intptr_t index);
void Unregister(intptr_t index);
void Remap(intptr_t* old_to_new_cids);
void FreeOldTables();
// Used by the generated code.
#ifndef PRODUCT
static intptr_t class_heap_stats_table_offset() {
return OFFSET_OF(SharedClassTable, class_heap_stats_table_);
}
#endif
// Used by the generated code.
static intptr_t ClassOffsetFor(intptr_t cid);
// Used by the generated code.
static intptr_t NewSpaceCounterOffsetFor(intptr_t cid);
// Used by the generated code.
static intptr_t StateOffsetFor(intptr_t cid);
// Used by the generated code.
static intptr_t NewSpaceSizeOffsetFor(intptr_t cid);
static const int kInitialCapacity = 512;
static const int kCapacityIncrement = 256;
private:
friend class ClassTable;
friend class GCMarker;
friend class MarkingWeakVisitor;
friend class Scavenger;
friend class ScavengerWeakVisitor;
friend class ClassHeapStatsTestHelper;
friend class HeapTestsHelper;
static bool ShouldUpdateSizeForClassId(intptr_t cid);
#ifndef PRODUCT
// May not have updated size for variable size classes.
ClassHeapStats* PreliminaryStatsAt(intptr_t cid) {
ASSERT(cid > 0);
ASSERT(cid < top_);
return &class_heap_stats_table_[cid];
}
void UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count = 1);
void UpdateLiveNew(intptr_t cid, intptr_t size);
void UpdateLiveNewGC(intptr_t cid, intptr_t size);
void UpdateLiveOldExternal(intptr_t cid, intptr_t size);
void UpdateLiveNewExternal(intptr_t cid, intptr_t size);
ClassHeapStats* class_heap_stats_table_ = nullptr;
#endif // !PRODUCT
void Grow(intptr_t new_capacity);
intptr_t top_;
intptr_t capacity_;
// Copy-on-write is used for table_, with old copies stored in old_tables_.
intptr_t* table_; // Maps the cid to the instance size.
MallocGrowableArray<intptr_t*>* old_tables_;
DISALLOW_COPY_AND_ASSIGN(SharedClassTable);
};
class ClassTable {
public:
ClassTable();
explicit ClassTable(SharedClassTable* shared_class_table_);
// Creates a shallow copy of the original class table for some read-only
// access, without support for stats data.
explicit ClassTable(ClassTable* original);
ClassTable(ClassTable* original, SharedClassTable* shared_class_table);
~ClassTable();
SharedClassTable* shared_class_table() const { return shared_class_table_; }
void CopyBeforeHotReload(ClassAndSize** copy, intptr_t* copy_num_cids) {
// The [IsolateReloadContext] will need to maintain a copy of the old class
// table until instances have been morphed.
const intptr_t bytes = sizeof(ClassAndSize) * NumCids();
*copy_num_cids = NumCids();
*copy = static_cast<ClassAndSize*>(malloc(bytes));
memmove(*copy, table_, bytes);
const intptr_t num_cids = NumCids();
const intptr_t bytes = sizeof(ClassAndSize) * num_cids;
auto class_and_size = static_cast<ClassAndSize*>(malloc(bytes));
for (intptr_t i = 0; i < num_cids; ++i) {
class_and_size[i] =
ClassAndSize(table_[i], shared_class_table_->table_[i]);
}
*copy_num_cids = num_cids;
*copy = class_and_size;
}
void ResetBeforeHotReload() {
@ -211,7 +374,7 @@ class ClassTable {
// to find the super class (e.g. `cls.SuperClass` will cause us to come
// here).
for (intptr_t i = 0; i < top_; ++i) {
table_[i].size_ = 0;
shared_class_table_->table_[i] = 0;
}
}
@ -222,7 +385,10 @@ class ClassTable {
// return, so we restore size information for all classes.
if (is_rollback) {
SetNumCids(num_old_cids);
memmove(table_, old_table, num_old_cids * sizeof(ClassAndSize));
for (intptr_t i = 0; i < num_old_cids; ++i) {
shared_class_table_->table_[i] = old_table[i].size_;
table_[i] = old_table[i].class_;
}
} else {
CopySizesFromClassObjects();
}
@ -237,43 +403,37 @@ class ClassTable {
// Thread-safe.
RawClass* At(intptr_t index) const {
ASSERT(IsValidIndex(index));
return table_[index].class_;
return table_[index];
}
intptr_t SizeAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
return table_[index].size_;
}
ClassAndSize PairAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
return table_[index];
return shared_class_table_->SizeAt(index);
}
void SetAt(intptr_t index, RawClass* raw_cls);
bool IsValidIndex(intptr_t index) const {
return (index > 0) && (index < top_);
return shared_class_table_->IsValidIndex(index);
}
bool HasValidClassAt(intptr_t index) const {
ASSERT(IsValidIndex(index));
return table_[index].class_ != NULL;
return table_[index] != nullptr;
}
intptr_t NumCids() const { return top_; }
intptr_t Capacity() const { return capacity_; }
intptr_t NumCids() const { return shared_class_table_->NumCids(); }
intptr_t Capacity() const { return shared_class_table_->Capacity(); }
// Used to drop recently added classes.
void SetNumCids(intptr_t num_cids) {
shared_class_table_->SetNumCids(num_cids);
ASSERT(num_cids <= top_);
top_ = num_cids;
}
void Register(const Class& cls);
void AllocateIndex(intptr_t index);
void Unregister(intptr_t index);
void Remap(intptr_t* old_to_new_cids);
@ -293,7 +453,9 @@ class ClassTable {
static intptr_t table_offset() { return OFFSET_OF(ClassTable, table_); }
// Used by the generated code.
static intptr_t ClassOffsetFor(intptr_t cid);
static intptr_t shared_class_table_offset() {
return OFFSET_OF(ClassTable, shared_class_table_);
}
#ifndef PRODUCT
// Describes layout of heap stats for code generation. See offset_extractor.cc
@ -304,76 +466,19 @@ class ClassTable {
};
#endif
#if defined(ARCH_IS_32_BIT)
static constexpr int kSizeOfClassPairLog2 = 3;
#else
static constexpr int kSizeOfClassPairLog2 = 4;
#endif
static_assert(
(1 << kSizeOfClassPairLog2) == sizeof(ClassAndSize),
"Mismatch between sizeof(ClassAndSize) and kSizeOfClassPairLog2");
#ifndef PRODUCT
// Called whenever a class is allocated in the runtime.
void UpdateAllocatedNew(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size != 0);
stats->recent.AddNew(size);
}
void UpdateAllocatedOld(intptr_t cid, intptr_t size) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
ASSERT(stats != NULL);
ASSERT(size != 0);
stats->recent.AddOld(size);
}
void UpdateAllocatedOldGC(intptr_t cid, intptr_t size);
void UpdateAllocatedExternalNew(intptr_t cid, intptr_t size);
void UpdateAllocatedExternalOld(intptr_t cid, intptr_t size);
// Called whenever a old GC occurs.
void ResetCountersOld();
// Called whenever a new GC occurs.
void ResetCountersNew();
// Called immediately after a new GC.
void UpdatePromoted();
// Used by the generated code.
static intptr_t class_heap_stats_table_offset() {
return OFFSET_OF(ClassTable, class_heap_stats_table_);
}
// Used by the generated code.
static intptr_t NewSpaceCounterOffsetFor(intptr_t cid);
// Used by the generated code.
static intptr_t StateOffsetFor(intptr_t cid);
// Used by the generated code.
static intptr_t NewSpaceSizeOffsetFor(intptr_t cid);
ClassHeapStats* StatsWithUpdatedSize(intptr_t cid);
void AllocationProfilePrintJSON(JSONStream* stream, bool internal);
void ResetAllocationAccumulators();
void PrintToJSONObject(JSONObject* object);
void SetTraceAllocationFor(intptr_t cid, bool trace) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
stats->set_trace_allocation(trace);
}
bool TraceAllocationFor(intptr_t cid) {
ClassHeapStats* stats = PreliminaryStatsAt(cid);
return stats->trace_allocation();
}
#endif // !PRODUCT
void AddOldTable(ClassAndSize* old_table);
// Deallocates table copies. Do not call during concurrent access to table.
void FreeOldTables();
private:
friend class GCMarker;
friend class MarkingWeakVisitor;
@ -381,33 +486,19 @@ class ClassTable {
friend class ScavengerWeakVisitor;
friend class ClassHeapStatsTestHelper;
friend class HeapTestsHelper;
static const int initial_capacity_ = 512;
static const int capacity_increment_ = 256;
static const int kInitialCapacity = SharedClassTable::kInitialCapacity;
static const int kCapacityIncrement = SharedClassTable::kCapacityIncrement;
static bool ShouldUpdateSizeForClassId(intptr_t cid);
void Grow(intptr_t index);
intptr_t top_;
intptr_t capacity_;
// Copy-on-write is used for table_, with old copies stored in old_tables_.
ClassAndSize* table_;
RawClass** table_;
MallocGrowableArray<ClassAndSize*>* old_tables_;
#ifndef PRODUCT
ClassHeapStats* class_heap_stats_table_;
// May not have updated size for variable size classes.
ClassHeapStats* PreliminaryStatsAt(intptr_t cid) {
ASSERT(cid > 0);
ASSERT(cid < top_);
return &class_heap_stats_table_[cid];
}
void UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count = 1);
void UpdateLiveNew(intptr_t cid, intptr_t size);
void UpdateLiveNewGC(intptr_t cid, intptr_t size);
void UpdateLiveOldExternal(intptr_t cid, intptr_t size);
void UpdateLiveNewExternal(intptr_t cid, intptr_t size);
#endif // !PRODUCT
MallocGrowableArray<RawClass**>* old_class_tables_;
SharedClassTable* shared_class_table_;
DISALLOW_COPY_AND_ASSIGN(ClassTable);
};

View file

@ -635,7 +635,6 @@ class FullSnapshotWriter {
ReAlloc alloc_;
intptr_t vm_isolate_snapshot_size_;
intptr_t isolate_snapshot_size_;
ForwardList* forward_list_;
ImageWriter* vm_image_writer_;
ImageWriter* isolate_image_writer_;

View file

@ -1984,12 +1984,13 @@ void Assembler::LoadClassId(Register result, Register object, Condition cond) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
LoadIsolate(result);
const intptr_t offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
LoadFromOffset(kWord, result, result, offset);
ldr(result,
Address(result, class_id, LSL, target::ClassTable::kSizeOfClassPairLog2));
LoadFromOffset(kWord, result, result, table_offset);
ldr(result, Address(result, class_id, LSL, kWordSizeLog2);
}
void Assembler::CompareClassId(Register object,
@ -3511,10 +3512,16 @@ void Assembler::LoadAllocationStatsAddress(Register dest, intptr_t cid) {
ASSERT(dest != kNoRegister);
ASSERT(dest != TMP);
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t class_offset = target::ClassTable::ClassOffsetFor(cid);
LoadIsolate(dest);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
ldr(dest, Address(dest, shared_table_offset));
ldr(dest, Address(dest, table_offset));
AddImmediate(dest, class_offset);
}

View file

@ -1116,12 +1116,12 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
LoadIsolate(result);
const intptr_t offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
LoadFromOffset(result, result, offset);
ASSERT(target::ClassTable::kSizeOfClassPairLog2 == 4);
add(class_id, class_id, Operand(class_id));
LoadFromOffset(result, result, table_offset);
ldr(result, Address(result, class_id, UXTX, Address::Scaled));
}
@ -1553,10 +1553,16 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
Register temp_reg,
Label* trace) {
ASSERT(cid > 0);
intptr_t state_offset = target::ClassTable::StateOffsetFor(cid);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t state_offset = target::ClassTable::StateOffsetFor(cid);
LoadIsolate(temp_reg);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
ldr(temp_reg, Address(temp_reg, shared_table_offset));
ldr(temp_reg, Address(temp_reg, table_offset));
AddImmediate(temp_reg, state_offset);
ldr(temp_reg, Address(temp_reg, 0));
@ -1566,10 +1572,17 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
void Assembler::UpdateAllocationStats(intptr_t cid) {
ASSERT(cid > 0);
intptr_t counter_offset = target::ClassTable::NewSpaceCounterOffsetFor(cid);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t counter_offset =
target::ClassTable::NewSpaceCounterOffsetFor(cid);
LoadIsolate(TMP2);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
ldr(TMP2, Address(TMP2, shared_table_offset));
ldr(TMP, Address(TMP2, table_offset));
AddImmediate(TMP2, TMP, counter_offset);
ldr(TMP, Address(TMP2, 0));
@ -1579,14 +1592,21 @@ void Assembler::UpdateAllocationStats(intptr_t cid) {
void Assembler::UpdateAllocationStatsWithSize(intptr_t cid, Register size_reg) {
ASSERT(cid > 0);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const uword class_offset = target::ClassTable::ClassOffsetFor(cid);
const uword count_field_offset =
target::ClassHeapStats::allocated_since_gc_new_space_offset();
const uword size_field_offset =
target::ClassHeapStats::allocated_size_since_gc_new_space_offset();
LoadIsolate(TMP2);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
ldr(TMP2, Address(TMP2, shared_table_offset));
ldr(TMP, Address(TMP2, table_offset));
AddImmediate(TMP2, TMP, class_offset);
ldr(TMP, Address(TMP2, count_field_offset));

View file

@ -2362,11 +2362,17 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
bool near_jump) {
ASSERT(cid > 0);
Address state_address(kNoRegister, 0);
intptr_t state_offset = target::ClassTable::StateOffsetFor(cid);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t state_offset = target::ClassTable::StateOffsetFor(cid);
ASSERT(temp_reg != kNoRegister);
LoadIsolate(temp_reg);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
movl(temp_reg, Address(temp_reg, shared_table_offset));
movl(temp_reg, Address(temp_reg, table_offset));
state_address = Address(temp_reg, state_offset);
testb(state_address,
@ -2378,11 +2384,17 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
void Assembler::UpdateAllocationStats(intptr_t cid, Register temp_reg) {
ASSERT(cid > 0);
intptr_t counter_offset = target::ClassTable::NewSpaceCounterOffsetFor(cid);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t counter_offset =
target::ClassTable::NewSpaceCounterOffsetFor(cid);
ASSERT(temp_reg != kNoRegister);
LoadIsolate(temp_reg);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
movl(temp_reg, Address(temp_reg, shared_table_offset));
movl(temp_reg, Address(temp_reg, table_offset));
incl(Address(temp_reg, counter_offset));
}
@ -2617,12 +2629,12 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
LoadIsolate(result);
const intptr_t offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
movl(result, Address(result, offset));
ASSERT(target::ClassTable::kSizeOfClassPairLog2 == 3);
movl(result, Address(result, class_id, TIMES_8, 0));
movl(result, Address(result, table_offset));
movl(result, Address(result, class_id, TIMES_4, 0));
}
void Assembler::CompareClassId(Register object,

View file

@ -1851,11 +1851,16 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
Label* trace,
bool near_jump) {
ASSERT(cid > 0);
intptr_t state_offset = target::ClassTable::StateOffsetFor(cid);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t state_offset = target::ClassTable::StateOffsetFor(cid);
Register temp_reg = TMP;
LoadIsolate(temp_reg);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
movq(temp_reg, Address(temp_reg, shared_table_offset));
movq(temp_reg, Address(temp_reg, table_offset));
testb(Address(temp_reg, state_offset),
Immediate(target::ClassHeapStats::TraceAllocationMask()));
@ -1866,11 +1871,17 @@ void Assembler::MaybeTraceAllocation(intptr_t cid,
void Assembler::UpdateAllocationStats(intptr_t cid) {
ASSERT(cid > 0);
intptr_t counter_offset = target::ClassTable::NewSpaceCounterOffsetFor(cid);
const intptr_t shared_table_offset =
target::Isolate::class_table_offset() +
target::ClassTable::shared_class_table_offset();
const intptr_t table_offset =
target::SharedClassTable::class_heap_stats_table_offset();
const intptr_t counter_offset =
target::ClassTable::NewSpaceCounterOffsetFor(cid);
Register temp_reg = TMP;
LoadIsolate(temp_reg);
intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::class_heap_stats_table_offset();
movq(temp_reg, Address(temp_reg, shared_table_offset));
movq(temp_reg, Address(temp_reg, table_offset));
incq(Address(temp_reg, counter_offset));
}
@ -2125,14 +2136,11 @@ void Assembler::LoadClassId(Register result, Register object) {
void Assembler::LoadClassById(Register result, Register class_id) {
ASSERT(result != class_id);
const intptr_t table_offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
LoadIsolate(result);
const intptr_t offset = target::Isolate::class_table_offset() +
target::ClassTable::table_offset();
movq(result, Address(result, offset));
ASSERT(target::ClassTable::kSizeOfClassPairLog2 == 4);
// TIMES_16 is not a real scale factor on x64, so we double the class id
// and use TIMES_8.
addq(class_id, class_id);
movq(result, Address(result, table_offset));
movq(result, Address(result, class_id, TIMES_8, 0));
}

View file

@ -804,7 +804,6 @@ class Assembler : public AssemblerBase {
// Loading and comparing classes of objects.
void LoadClassId(Register result, Register object);
// Overwrites class_id register (it will be tagged afterwards).
void LoadClassById(Register result, Register class_id);
void CompareClassId(Register object,

View file

@ -755,15 +755,22 @@ class Isolate : public AllStatic {
#endif // !defined(PRODUCT)
};
class SharedClassTable : public AllStatic {
public:
static word class_heap_stats_table_offset();
};
class ClassTable : public AllStatic {
public:
static word table_offset();
static word shared_class_table_offset();
#if !defined(PRODUCT)
static word ClassOffsetFor(intptr_t cid);
static word StateOffsetFor(intptr_t cid);
static word class_heap_stats_table_offset();
static word NewSpaceCounterOffsetFor(intptr_t cid);
static word NewSpaceSizeOffsetFor(intptr_t cid);
static word SharedTableOffsetFor();
static word SizeOffsetFor(intptr_t cid, bool is_new);
#endif // !defined(PRODUCT)
static const word kSizeOfClassPairLog2;
};

View file

@ -22,8 +22,6 @@ static constexpr dart::compiler::target::word ObjectPool_element_size = 4;
static constexpr dart::compiler::target::word Array_kMaxElements = 268435455;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
65533;
static constexpr dart::compiler::target::word ClassTable_kSizeOfClassPairLog2 =
3;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 0;
static constexpr dart::compiler::target::word
@ -85,7 +83,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word
ClassHeapStats_allocated_size_since_gc_new_space_offset = 52;
static constexpr dart::compiler::target::word ClassHeapStats_state_offset = 160;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 20;
static constexpr dart::compiler::target::word ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -138,12 +140,12 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 20;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 28;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 16;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 36;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 40;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 20;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 36;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 64;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 16;
static constexpr dart::compiler::target::word
@ -339,8 +341,6 @@ static constexpr dart::compiler::target::word TypeArguments_element_size = 4;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 168;
static constexpr dart::compiler::target::word
ClassTable_class_heap_stats_table_offset = 16;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
4, 12, 8, 16};
static constexpr dart::compiler::target::word
@ -378,8 +378,6 @@ static constexpr dart::compiler::target::word Array_kMaxElements =
576460752303423487;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
32765;
static constexpr dart::compiler::target::word ClassTable_kSizeOfClassPairLog2 =
4;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@ -442,7 +440,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word
ClassHeapStats_allocated_size_since_gc_new_space_offset = 104;
static constexpr dart::compiler::target::word ClassHeapStats_state_offset = 272;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 40;
static constexpr dart::compiler::target::word ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -495,12 +497,12 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 40;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 52;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 72;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 80;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 40;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 112;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 72;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 128;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
static constexpr dart::compiler::target::word
@ -698,8 +700,6 @@ static constexpr dart::compiler::target::word TypeArguments_element_size = 8;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 288;
static constexpr dart::compiler::target::word
ClassTable_class_heap_stats_table_offset = 32;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@ -736,8 +736,6 @@ static constexpr dart::compiler::target::word ObjectPool_element_size = 4;
static constexpr dart::compiler::target::word Array_kMaxElements = 268435455;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
65533;
static constexpr dart::compiler::target::word ClassTable_kSizeOfClassPairLog2 =
3;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 6;
static constexpr dart::compiler::target::word
@ -799,7 +797,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word
ClassHeapStats_allocated_size_since_gc_new_space_offset = 52;
static constexpr dart::compiler::target::word ClassHeapStats_state_offset = 160;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 20;
static constexpr dart::compiler::target::word ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -852,12 +854,12 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 20;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 28;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 16;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 36;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 40;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 20;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 36;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 64;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 16;
static constexpr dart::compiler::target::word
@ -1053,8 +1055,6 @@ static constexpr dart::compiler::target::word TypeArguments_element_size = 4;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 168;
static constexpr dart::compiler::target::word
ClassTable_class_heap_stats_table_offset = 16;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
4, 12, 8, 16};
static constexpr dart::compiler::target::word
@ -1088,8 +1088,6 @@ static constexpr dart::compiler::target::word Array_kMaxElements =
576460752303423487;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
32765;
static constexpr dart::compiler::target::word ClassTable_kSizeOfClassPairLog2 =
4;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 8;
static constexpr dart::compiler::target::word
@ -1152,7 +1150,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word
ClassHeapStats_allocated_size_since_gc_new_space_offset = 104;
static constexpr dart::compiler::target::word ClassHeapStats_state_offset = 272;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 40;
static constexpr dart::compiler::target::word ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -1205,12 +1207,12 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 40;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 52;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 72;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 80;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 40;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 112;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 72;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 128;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
static constexpr dart::compiler::target::word
@ -1408,8 +1410,6 @@ static constexpr dart::compiler::target::word TypeArguments_element_size = 8;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 288;
static constexpr dart::compiler::target::word
ClassTable_class_heap_stats_table_offset = 32;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@ -1448,8 +1448,6 @@ static constexpr dart::compiler::target::word Array_kMaxElements =
576460752303423487;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
32765;
static constexpr dart::compiler::target::word ClassTable_kSizeOfClassPairLog2 =
4;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 0;
static constexpr dart::compiler::target::word
@ -1512,7 +1510,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word
ClassHeapStats_allocated_size_since_gc_new_space_offset = 104;
static constexpr dart::compiler::target::word ClassHeapStats_state_offset = 272;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 40;
static constexpr dart::compiler::target::word ClassTable_table_offset = 16;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 40;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 24;
@ -1565,12 +1567,12 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 40;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 52;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 32;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 72;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 80;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 40;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 48;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 64;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 112;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 72;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 128;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 32;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 32;
static constexpr dart::compiler::target::word
@ -1700,8 +1702,6 @@ static constexpr dart::compiler::target::word TypeArguments_element_size = 8;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 288;
static constexpr dart::compiler::target::word
ClassTable_class_heap_stats_table_offset = 32;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
8, 24, 16, 32};
static constexpr dart::compiler::target::word
@ -1734,8 +1734,6 @@ static constexpr dart::compiler::target::word ObjectPool_element_size = 4;
static constexpr dart::compiler::target::word Array_kMaxElements = 268435455;
static constexpr dart::compiler::target::word Array_kMaxNewSpaceElements =
65533;
static constexpr dart::compiler::target::word ClassTable_kSizeOfClassPairLog2 =
3;
static constexpr dart::compiler::target::word
Instructions_kMonomorphicEntryOffsetJIT = 0;
static constexpr dart::compiler::target::word
@ -1797,7 +1795,11 @@ static constexpr dart::compiler::target::word
static constexpr dart::compiler::target::word
ClassHeapStats_allocated_size_since_gc_new_space_offset = 52;
static constexpr dart::compiler::target::word ClassHeapStats_state_offset = 160;
static constexpr dart::compiler::target::word
ClassTable_shared_class_table_offset = 20;
static constexpr dart::compiler::target::word ClassTable_table_offset = 8;
static constexpr dart::compiler::target::word
SharedClassTable_class_heap_stats_table_offset = 0;
static constexpr dart::compiler::target::word Closure_context_offset = 20;
static constexpr dart::compiler::target::word
Closure_delayed_type_arguments_offset = 12;
@ -1850,12 +1852,12 @@ static constexpr dart::compiler::target::word ICData_owner_offset = 20;
static constexpr dart::compiler::target::word ICData_state_bits_offset = 28;
static constexpr dart::compiler::target::word
ICData_receivers_static_type_offset = 16;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 36;
static constexpr dart::compiler::target::word Isolate_class_table_offset = 40;
static constexpr dart::compiler::target::word Isolate_current_tag_offset = 20;
static constexpr dart::compiler::target::word Isolate_default_tag_offset = 24;
static constexpr dart::compiler::target::word Isolate_ic_miss_code_offset = 28;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 32;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 56;
static constexpr dart::compiler::target::word Isolate_object_store_offset = 36;
static constexpr dart::compiler::target::word Isolate_single_step_offset = 64;
static constexpr dart::compiler::target::word Isolate_user_tag_offset = 16;
static constexpr dart::compiler::target::word LinkedHashMap_data_offset = 16;
static constexpr dart::compiler::target::word
@ -1984,8 +1986,6 @@ static constexpr dart::compiler::target::word TypeArguments_element_size = 4;
static constexpr dart::compiler::target::word ClassTable_elements_start_offset =
0;
static constexpr dart::compiler::target::word ClassTable_element_size = 168;
static constexpr dart::compiler::target::word
ClassTable_class_heap_stats_table_offset = 16;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
4, 12, 8, 16};
static constexpr dart::compiler::target::word

View file

@ -34,7 +34,6 @@
ARRAY(ObjectPool, element_offset) \
CONSTANT(Array, kMaxElements) \
CONSTANT(Array, kMaxNewSpaceElements) \
CONSTANT(ClassTable, kSizeOfClassPairLog2) \
CONSTANT(Instructions, kMonomorphicEntryOffsetJIT) \
CONSTANT(Instructions, kPolymorphicEntryOffsetJIT) \
CONSTANT(Instructions, kMonomorphicEntryOffsetAOT) \
@ -71,7 +70,9 @@
NOT_IN_PRODUCT( \
FIELD(ClassHeapStats, allocated_size_since_gc_new_space_offset)) \
NOT_IN_PRODUCT(FIELD(ClassHeapStats, state_offset)) \
FIELD(ClassTable, shared_class_table_offset) \
FIELD(ClassTable, table_offset) \
NOT_IN_PRODUCT(FIELD(SharedClassTable, class_heap_stats_table_offset)) \
FIELD(Closure, context_offset) \
FIELD(Closure, delayed_type_arguments_offset) \
FIELD(Closure, function_offset) \
@ -244,7 +245,6 @@
NOT_IN_PRODUCT(ARRAY_STRUCTFIELD( \
ClassTable, NewSpaceSizeOffsetFor, ClassOffsetFor, \
ClassHeapStats::allocated_size_since_gc_new_space_offset())) \
NOT_IN_PRODUCT(FIELD(ClassTable, class_heap_stats_table_offset)) \
RANGE(Code, entry_point_offset, CodeEntryKind, CodeEntryKind::kNormal, \
CodeEntryKind::kMonomorphicUnchecked, \
[](CodeEntryKind value) { return true; }) \

View file

@ -2782,12 +2782,8 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
// Non-Closure handling.
{
__ Bind(&not_closure);
if (n == 1) {
__ SmiTag(kInstanceCidOrFunction);
} else {
ASSERT(n >= 2);
if (n >= 2) {
Label has_no_type_arguments;
// [LoadClassById] also tags [kInstanceCidOrFunction] as a side-effect.
__ LoadClassById(RDI, kInstanceCidOrFunction);
__ movq(kInstanceInstantiatorTypeArgumentsReg, kNullReg);
__ movl(
@ -2806,6 +2802,7 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
__ movq(kInstanceDelayedFunctionTypeArgumentsReg, kNullReg);
}
}
__ SmiTag(kInstanceCidOrFunction);
}
Label found, not_found, next_iteration;

View file

@ -82,7 +82,7 @@ class ClassHeapStatsTestHelper {
public:
static ClassHeapStats* GetHeapStatsForCid(ClassTable* class_table,
intptr_t cid) {
return class_table->PreliminaryStatsAt(cid);
return class_table->shared_class_table()->PreliminaryStatsAt(cid);
}
static void DumpClassHeapStats(ClassHeapStats* stats) {

View file

@ -90,7 +90,7 @@ class MarkingVisitorBase : public ObjectPointerVisitor {
: ObjectPointerVisitor(isolate),
thread_(Thread::Current()),
#ifndef PRODUCT
num_classes_(isolate->class_table()->Capacity()),
num_classes_(isolate->shared_class_table()->Capacity()),
class_stats_count_(new intptr_t[num_classes_]),
class_stats_size_(new intptr_t[num_classes_]),
#endif // !PRODUCT
@ -372,7 +372,8 @@ static bool IsUnreachable(const RawObject* raw_obj) {
class MarkingWeakVisitor : public HandleVisitor {
public:
explicit MarkingWeakVisitor(Thread* thread)
: HandleVisitor(thread), class_table_(thread->isolate()->class_table()) {}
: HandleVisitor(thread),
class_table_(thread->isolate()->shared_class_table()) {}
void VisitHandle(uword addr) {
FinalizablePersistentHandle* handle =
@ -394,7 +395,7 @@ class MarkingWeakVisitor : public HandleVisitor {
}
private:
ClassTable* class_table_;
SharedClassTable* class_table_;
DISALLOW_COPY_AND_ASSIGN(MarkingWeakVisitor);
};
@ -724,7 +725,7 @@ void GCMarker::FinalizeResultsFrom(MarkingVisitorType* visitor) {
#ifndef PRODUCT
// Class heap stats are not themselves thread-safe yet, so we update the
// stats while holding stats_mutex_.
ClassTable* table = heap_->isolate()->class_table();
auto table = heap_->isolate()->shared_class_table();
for (intptr_t i = 0; i < table->NumCids(); ++i) {
const intptr_t count = visitor->live_count(i);
if (count > 0) {

View file

@ -541,7 +541,8 @@ void PageSpace::AllocateExternal(intptr_t cid, intptr_t size) {
intptr_t size_in_words = size >> kWordSizeLog2;
AtomicOperations::IncrementBy(&(usage_.external_in_words), size_in_words);
NOT_IN_PRODUCT(
heap_->isolate()->class_table()->UpdateAllocatedExternalOld(cid, size));
heap_->isolate()->shared_class_table()->UpdateAllocatedExternalOld(cid,
size));
}
void PageSpace::PromoteExternal(intptr_t cid, intptr_t size) {
@ -1097,7 +1098,7 @@ void PageSpace::CollectGarbageAtSafepoint(bool compact,
return;
}
NOT_IN_PRODUCT(isolate->class_table()->ResetCountersOld());
NOT_IN_PRODUCT(isolate->shared_class_table()->ResetCountersOld());
marker_->MarkObjects(this);
usage_.used_in_words = marker_->marked_words() + allocated_black_in_words_;
allocated_black_in_words_ = 0;

View file

@ -281,7 +281,7 @@ class ScavengerWeakVisitor : public HandleVisitor {
ScavengerWeakVisitor(Thread* thread, Scavenger* scavenger)
: HandleVisitor(thread),
scavenger_(scavenger),
class_table_(thread->isolate()->class_table()) {
class_table_(thread->isolate()->shared_class_table()) {
ASSERT(scavenger->heap_->isolate() == thread->isolate());
}
@ -307,7 +307,7 @@ class ScavengerWeakVisitor : public HandleVisitor {
private:
Scavenger* scavenger_;
ClassTable* class_table_;
SharedClassTable* class_table_;
DISALLOW_COPY_AND_ASSIGN(ScavengerWeakVisitor);
};
@ -486,7 +486,7 @@ intptr_t Scavenger::NewSizeInWords(intptr_t old_size_in_words) const {
}
SemiSpace* Scavenger::Prologue(Isolate* isolate) {
NOT_IN_PRODUCT(isolate->class_table()->ResetCountersNew());
NOT_IN_PRODUCT(isolate->shared_class_table()->ResetCountersNew());
isolate->ReleaseStoreBuffers();
AbandonTLABs(isolate);
@ -593,7 +593,7 @@ void Scavenger::Epilogue(Isolate* isolate, SemiSpace* from) {
heap_->UpdateGlobalMaxUsed();
}
NOT_IN_PRODUCT(isolate->class_table()->UpdatePromoted());
NOT_IN_PRODUCT(isolate->shared_class_table()->UpdatePromoted());
}
bool Scavenger::ShouldPerformIdleScavenge(int64_t deadline) {
@ -708,7 +708,7 @@ void Scavenger::IterateWeakRoots(Isolate* isolate, HandleVisitor* visitor) {
void Scavenger::ProcessToSpace(ScavengerVisitor* visitor) {
Thread* thread = Thread::Current();
NOT_IN_PRODUCT(ClassTable* class_table = thread->isolate()->class_table());
NOT_IN_PRODUCT(auto class_table = visitor->isolate()->shared_class_table());
// Iterate until all work has been drained.
while ((resolved_top_ < top_) || PromotedStackHasMore()) {
@ -1161,7 +1161,8 @@ void Scavenger::AllocateExternal(intptr_t cid, intptr_t size) {
ASSERT(size >= 0);
external_size_ += size;
NOT_IN_PRODUCT(
heap_->isolate()->class_table()->UpdateAllocatedExternalNew(cid, size));
heap_->isolate()->shared_class_table()->UpdateAllocatedExternalNew(cid,
size));
}
void Scavenger::FreeExternal(intptr_t size) {

View file

@ -264,7 +264,7 @@ DART_FORCE_INLINE static bool TryAllocate(Thread* thread,
RawObject** result) {
const uword start = thread->top();
#ifndef PRODUCT
ClassTable* table = thread->isolate()->class_table();
auto table = thread->isolate()->shared_class_table();
if (UNLIKELY(table->TraceAllocationFor(class_id))) {
return false;
}

View file

@ -1050,7 +1050,8 @@ Isolate::Isolate(IsolateGroup* isolate_group,
current_tag_(UserTag::null()),
default_tag_(UserTag::null()),
ic_miss_code_(Code::null()),
class_table_(),
shared_class_table_(new SharedClassTable()),
class_table_(shared_class_table_.get()),
store_buffer_(new StoreBuffer()),
#if !defined(TARGET_ARCH_DBC) && !defined(DART_PRECOMPILED_RUNTIME)
native_callback_trampolines_(),

View file

@ -340,6 +340,9 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
SafepointHandler* safepoint_handler() const {
return group()->safepoint_handler();
}
SharedClassTable* shared_class_table() { return shared_class_table_.get(); }
ClassTable* class_table() { return &class_table_; }
static intptr_t class_table_offset() {
return OFFSET_OF(Isolate, class_table_);
@ -1023,6 +1026,7 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
RawUserTag* current_tag_;
RawUserTag* default_tag_;
RawCode* ic_miss_code_;
std::unique_ptr<SharedClassTable> shared_class_table_;
ObjectStore* object_store_ = nullptr;
ClassTable class_table_;
bool single_step_ = false;

View file

@ -145,6 +145,8 @@ class IsolateReloadContext {
// All zone allocated objects must be allocated from this zone.
Zone* zone() const { return zone_; }
bool UseSavedClassTableForGC() const { return saved_class_table_ != nullptr; }
bool reload_skipped() const { return reload_skipped_; }
bool reload_aborted() const { return reload_aborted_; }
RawError* error() const;

View file

@ -2202,7 +2202,7 @@ RawObject* Object::Allocate(intptr_t cls_id, intptr_t size, Heap::Space space) {
}
}
#ifndef PRODUCT
ClassTable* class_table = thread->isolate()->class_table();
auto class_table = thread->isolate()->shared_class_table();
if (space == Heap::kNew) {
class_table->UpdateAllocatedNew(cls_id, size);
} else {
@ -3298,7 +3298,7 @@ void Class::DisableAllCHAOptimizedCode() {
bool Class::TraceAllocation(Isolate* isolate) const {
#ifndef PRODUCT
ClassTable* class_table = isolate->class_table();
auto class_table = isolate->shared_class_table();
return class_table->TraceAllocationFor(id());
#else
return false;
@ -3310,7 +3310,7 @@ void Class::SetTraceAllocation(bool trace_allocation) const {
Isolate* isolate = Isolate::Current();
const bool changed = trace_allocation != this->TraceAllocation(isolate);
if (changed) {
ClassTable* class_table = isolate->class_table();
auto class_table = isolate->shared_class_table();
class_table->SetTraceAllocationFor(id(), trace_allocation);
DisableAllocationStub();
}

View file

@ -9,6 +9,7 @@
#include "vm/heap/become.h"
#include "vm/heap/freelist.h"
#include "vm/isolate.h"
#include "vm/isolate_reload.h"
#include "vm/object.h"
#include "vm/runtime_entry.h"
#include "vm/visitor.h"
@ -57,11 +58,11 @@ void RawObject::Validate(Isolate* isolate) const {
}
}
intptr_t class_id = ClassIdTag::decode(tags);
if (!isolate->class_table()->IsValidIndex(class_id)) {
if (!isolate->shared_class_table()->IsValidIndex(class_id)) {
FATAL1("Invalid class id encountered %" Pd "\n", class_id);
}
if ((class_id == kNullCid) &&
(isolate->class_table()->At(class_id) == NULL)) {
if (class_id == kNullCid &&
isolate->shared_class_table()->HasValidClassAt(class_id)) {
// Null class not yet initialized; skip.
return;
}
@ -212,9 +213,19 @@ intptr_t RawObject::HeapSizeFromClass() const {
// TODO(koda): Add Size(ClassTable*) interface to allow caching in loops.
Isolate* isolate = Isolate::Current();
#if defined(DEBUG)
ClassTable* class_table = isolate->class_table();
auto class_table = isolate->shared_class_table();
#if !defined(DART_PRECOMPILED_RUNTIME)
auto reload_context = isolate->reload_context();
const bool use_saved_class_table =
reload_context != nullptr ? reload_context->UseSavedClassTableForGC()
: false;
#else
const bool use_saved_class_table = false;
#endif
ASSERT(use_saved_class_table || class_table->SizeAt(class_id) > 0);
if (!class_table->IsValidIndex(class_id) ||
!class_table->HasValidClassAt(class_id)) {
(!class_table->HasValidClassAt(class_id) && !use_saved_class_table)) {
FATAL2("Invalid class id: %" Pd " from tags %x\n", class_id,
ptr()->tags_);
}

View file

@ -3943,7 +3943,7 @@ static bool GetAllocationProfileImpl(Thread* thread,
Isolate* isolate = thread->isolate();
if (should_reset_accumulator) {
isolate->UpdateLastAllocationProfileAccumulatorResetTimestamp();
isolate->class_table()->ResetAllocationAccumulators();
isolate->shared_class_table()->ResetAllocationAccumulators();
}
if (should_collect) {
isolate->UpdateLastAllocationProfileGCTimestamp();