2012-05-01 09:20:40 +00:00
|
|
|
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
|
|
|
|
// for details. All rights reserved. Use of this source code is governed by a
|
|
|
|
// BSD-style license that can be found in the LICENSE file.
|
|
|
|
|
2016-10-26 07:26:03 +00:00
|
|
|
#ifndef RUNTIME_VM_CLASS_TABLE_H_
|
|
|
|
#define RUNTIME_VM_CLASS_TABLE_H_
|
2012-05-01 09:20:40 +00:00
|
|
|
|
|
|
|
#include "platform/assert.h"
|
2018-03-28 23:16:09 +00:00
|
|
|
#include "platform/atomic.h"
|
2019-09-10 12:48:16 +00:00
|
|
|
|
2015-07-10 19:19:01 +00:00
|
|
|
#include "vm/bitfield.h"
|
2019-09-10 12:48:16 +00:00
|
|
|
#include "vm/class_id.h"
|
2012-05-30 12:25:49 +00:00
|
|
|
#include "vm/globals.h"
|
2012-05-01 09:20:40 +00:00
|
|
|
|
|
|
|
namespace dart {
|
|
|
|
|
|
|
|
class Class;
|
2014-01-22 19:21:22 +00:00
|
|
|
class ClassStats;
|
2018-04-26 09:38:25 +00:00
|
|
|
class ClassTable;
|
2019-09-10 12:48:16 +00:00
|
|
|
class Isolate;
|
|
|
|
class IsolateGroup;
|
2014-01-22 19:21:22 +00:00
|
|
|
class JSONArray;
|
2014-03-20 18:56:45 +00:00
|
|
|
class JSONObject;
|
2014-01-22 19:21:22 +00:00
|
|
|
class JSONStream;
|
2016-11-08 21:54:47 +00:00
|
|
|
template <typename T>
|
|
|
|
class MallocGrowableArray;
|
2012-05-01 09:20:40 +00:00
|
|
|
class ObjectPointerVisitor;
|
|
|
|
class RawClass;
|
2014-01-22 19:21:22 +00:00
|
|
|
|
2018-04-26 09:38:25 +00:00
|
|
|
class ClassAndSize {
|
|
|
|
public:
|
|
|
|
ClassAndSize() : class_(NULL), size_(0) {}
|
|
|
|
explicit ClassAndSize(RawClass* clazz);
|
|
|
|
ClassAndSize(RawClass* clazz, intptr_t size) : class_(clazz), size_(size) {}
|
|
|
|
RawClass* get_raw_class() const { return class_; }
|
|
|
|
intptr_t size() const { return size_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
RawClass* class_;
|
|
|
|
intptr_t size_;
|
|
|
|
|
|
|
|
friend class ClassTable;
|
2019-04-30 17:07:20 +00:00
|
|
|
friend class IsolateReloadContext; // For VisitObjectPointers.
|
2018-04-26 09:38:25 +00:00
|
|
|
};
|
|
|
|
|
2016-07-12 21:15:02 +00:00
|
|
|
#ifndef PRODUCT
|
2016-11-08 21:54:47 +00:00
|
|
|
template <typename T>
|
2014-03-03 15:39:06 +00:00
|
|
|
class AllocStats {
|
|
|
|
public:
|
|
|
|
T new_count;
|
|
|
|
T new_size;
|
2017-09-25 18:33:07 +00:00
|
|
|
T new_external_size;
|
2014-03-03 15:39:06 +00:00
|
|
|
T old_count;
|
|
|
|
T old_size;
|
2017-09-25 18:33:07 +00:00
|
|
|
T old_external_size;
|
2014-03-03 15:39:06 +00:00
|
|
|
|
|
|
|
void ResetNew() {
|
|
|
|
new_count = 0;
|
|
|
|
new_size = 0;
|
2017-09-25 18:33:07 +00:00
|
|
|
new_external_size = 0;
|
2019-02-21 01:27:29 +00:00
|
|
|
old_external_size = 0;
|
2014-03-03 15:39:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void AddNew(T size) {
|
Fix the following tsan errors:
1. Race updating the allocation stats for a class
WARNING: ThreadSanitizer: data race (pid=16346)
Write of size 8 at 0x7bac00011930 by thread T4:
#0 dart::AllocStats<long>::AddOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.h:56 (dart+0x000000769278)
#1 dart::ClassTable::UpdateAllocatedOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:423 (dart+0x00000076923a)
#2 dart::Object::Allocate(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:1880 (dart+0x00000092ddff)
#3 dart::Array::New(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:21586 (dart+0x00000098d3fd)
Previous write of size 8 at 0x7bac00011930 by thread T3:
#0 dart::AllocStats<long>::AddOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.h:56 (dart+0x000000769278)
#1 dart::ClassTable::UpdateAllocatedOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:423 (dart+0x00000076923a)
#2 dart::Object::Allocate(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:1880 (dart+0x00000092ddff)
#3 dart::Array::New(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:21586 (dart+0x00000098d3fd)
#4 dart::Array::New(long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:21575 (dart+0x000000932f6d)
2. Race while updating c++ table entry into Object::builtin_vtables_
WARNING: ThreadSanitizer: data race (pid=16346)
Atomic write of size 8 at 0x00000210c970 by main thread:
#0 __tsan_atomic64_compare_exchange_val <null> (dart+0x0000005bad8a)
#1 dart::AtomicOperations::CompareAndSwapWord(unsigned long*, unsigned long, unsigned long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/atomic_linux.h:63 (dart+0x00000074b744)
#2 dart::ClassTable::Register(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:127 (dart+0x000000767cbc)
#3 dart::Isolate::RegisterClass(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/isolate.cc:170 (dart+0x0000008e0046)
#4 dart::RawClass* dart::Class::New<dart::Array>() /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:2037 (dart+0x000000931055)
Previous read of size 8 at 0x00000210c970 by thread T3:
#0 dart::ClassTable::Register(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:130 (dart+0x000000767cc9)
#1 dart::Isolate::RegisterClass(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/isolate.cc:170 (dart+0x0000008e0046)
#2 dart::RawClass* dart::Class::New<dart::Array>() /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:2037 (dart+0x000000931055)
BUG=
R=regis@google.com
Review-Url: https://codereview.chromium.org/2602123002 .
2016-12-29 21:20:58 +00:00
|
|
|
AtomicOperations::IncrementBy(&new_count, 1);
|
|
|
|
AtomicOperations::IncrementBy(&new_size, size);
|
2014-03-03 15:39:06 +00:00
|
|
|
}
|
|
|
|
|
2019-02-27 22:01:00 +00:00
|
|
|
void AddNewGC(T size) {
|
|
|
|
new_count += 1;
|
|
|
|
new_size += size;
|
|
|
|
}
|
|
|
|
|
2017-09-25 18:33:07 +00:00
|
|
|
void AddNewExternal(T size) {
|
|
|
|
AtomicOperations::IncrementBy(&new_external_size, size);
|
|
|
|
}
|
|
|
|
|
2014-03-03 15:39:06 +00:00
|
|
|
void ResetOld() {
|
|
|
|
old_count = 0;
|
|
|
|
old_size = 0;
|
2017-09-25 18:33:07 +00:00
|
|
|
old_external_size = 0;
|
2019-02-21 01:27:29 +00:00
|
|
|
new_external_size = 0;
|
2014-03-03 15:39:06 +00:00
|
|
|
}
|
|
|
|
|
2015-09-16 22:09:05 +00:00
|
|
|
void AddOld(T size, T count = 1) {
|
Fix the following tsan errors:
1. Race updating the allocation stats for a class
WARNING: ThreadSanitizer: data race (pid=16346)
Write of size 8 at 0x7bac00011930 by thread T4:
#0 dart::AllocStats<long>::AddOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.h:56 (dart+0x000000769278)
#1 dart::ClassTable::UpdateAllocatedOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:423 (dart+0x00000076923a)
#2 dart::Object::Allocate(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:1880 (dart+0x00000092ddff)
#3 dart::Array::New(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:21586 (dart+0x00000098d3fd)
Previous write of size 8 at 0x7bac00011930 by thread T3:
#0 dart::AllocStats<long>::AddOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.h:56 (dart+0x000000769278)
#1 dart::ClassTable::UpdateAllocatedOld(long, long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:423 (dart+0x00000076923a)
#2 dart::Object::Allocate(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:1880 (dart+0x00000092ddff)
#3 dart::Array::New(long, long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:21586 (dart+0x00000098d3fd)
#4 dart::Array::New(long, dart::Heap::Space) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:21575 (dart+0x000000932f6d)
2. Race while updating c++ table entry into Object::builtin_vtables_
WARNING: ThreadSanitizer: data race (pid=16346)
Atomic write of size 8 at 0x00000210c970 by main thread:
#0 __tsan_atomic64_compare_exchange_val <null> (dart+0x0000005bad8a)
#1 dart::AtomicOperations::CompareAndSwapWord(unsigned long*, unsigned long, unsigned long) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/atomic_linux.h:63 (dart+0x00000074b744)
#2 dart::ClassTable::Register(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:127 (dart+0x000000767cbc)
#3 dart::Isolate::RegisterClass(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/isolate.cc:170 (dart+0x0000008e0046)
#4 dart::RawClass* dart::Class::New<dart::Array>() /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:2037 (dart+0x000000931055)
Previous read of size 8 at 0x00000210c970 by thread T3:
#0 dart::ClassTable::Register(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/class_table.cc:130 (dart+0x000000767cc9)
#1 dart::Isolate::RegisterClass(dart::Class const&) /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/isolate.cc:170 (dart+0x0000008e0046)
#2 dart::RawClass* dart::Class::New<dart::Array>() /usr/local/google/home/asiva/workspace/dart-ws3/sdk/out/DebugX64/../../runtime/vm/object.cc:2037 (dart+0x000000931055)
BUG=
R=regis@google.com
Review-Url: https://codereview.chromium.org/2602123002 .
2016-12-29 21:20:58 +00:00
|
|
|
AtomicOperations::IncrementBy(&old_count, count);
|
|
|
|
AtomicOperations::IncrementBy(&old_size, size);
|
2014-03-03 15:39:06 +00:00
|
|
|
}
|
|
|
|
|
2019-02-27 22:01:00 +00:00
|
|
|
void AddOldGC(T size, T count = 1) {
|
|
|
|
old_count += count;
|
|
|
|
old_size += size;
|
|
|
|
}
|
|
|
|
|
2017-09-25 18:33:07 +00:00
|
|
|
void AddOldExternal(T size) {
|
|
|
|
AtomicOperations::IncrementBy(&old_external_size, size);
|
|
|
|
}
|
|
|
|
|
2014-03-03 15:39:06 +00:00
|
|
|
void Reset() {
|
2017-09-25 18:33:07 +00:00
|
|
|
ResetNew();
|
|
|
|
ResetOld();
|
2014-03-03 15:39:06 +00:00
|
|
|
}
|
2014-04-23 18:17:03 +00:00
|
|
|
|
|
|
|
// For classes with fixed instance size we do not emit code to update
|
|
|
|
// the size statistics. Update them by calling this method.
|
|
|
|
void UpdateSize(intptr_t instance_size) {
|
|
|
|
ASSERT(instance_size > 0);
|
|
|
|
old_size = old_count * instance_size;
|
|
|
|
new_size = new_count * instance_size;
|
|
|
|
}
|
2014-11-20 19:20:17 +00:00
|
|
|
|
|
|
|
void Verify() {
|
|
|
|
ASSERT(new_count >= 0);
|
|
|
|
ASSERT(new_size >= 0);
|
2017-09-25 18:33:07 +00:00
|
|
|
ASSERT(new_external_size >= 0);
|
2014-11-20 19:20:17 +00:00
|
|
|
ASSERT(old_count >= 0);
|
|
|
|
ASSERT(old_size >= 0);
|
2017-09-25 18:33:07 +00:00
|
|
|
ASSERT(old_external_size >= 0);
|
2014-11-20 19:20:17 +00:00
|
|
|
}
|
2014-03-03 15:39:06 +00:00
|
|
|
};
|
2014-01-22 19:21:22 +00:00
|
|
|
|
|
|
|
class ClassHeapStats {
|
|
|
|
public:
|
2014-03-03 15:39:06 +00:00
|
|
|
// Snapshot before GC.
|
|
|
|
AllocStats<intptr_t> pre_gc;
|
2014-01-22 19:21:22 +00:00
|
|
|
// Live after GC.
|
2014-03-03 15:39:06 +00:00
|
|
|
AllocStats<intptr_t> post_gc;
|
|
|
|
// Allocations since the last GC.
|
|
|
|
AllocStats<intptr_t> recent;
|
|
|
|
// Accumulated (across GC) allocations .
|
|
|
|
AllocStats<int64_t> accumulated;
|
|
|
|
// Snapshot of recent at the time of the last reset.
|
|
|
|
AllocStats<intptr_t> last_reset;
|
2014-08-20 17:57:12 +00:00
|
|
|
// Promoted from new to old by last new GC.
|
|
|
|
intptr_t promoted_count;
|
|
|
|
intptr_t promoted_size;
|
2014-01-22 19:21:22 +00:00
|
|
|
|
|
|
|
static intptr_t allocated_since_gc_new_space_offset() {
|
2014-03-03 15:39:06 +00:00
|
|
|
return OFFSET_OF(ClassHeapStats, recent) +
|
|
|
|
OFFSET_OF(AllocStats<intptr_t>, new_count);
|
2014-01-22 19:21:22 +00:00
|
|
|
}
|
|
|
|
static intptr_t allocated_since_gc_old_space_offset() {
|
2014-03-03 15:39:06 +00:00
|
|
|
return OFFSET_OF(ClassHeapStats, recent) +
|
|
|
|
OFFSET_OF(AllocStats<intptr_t>, old_count);
|
2014-01-22 19:21:22 +00:00
|
|
|
}
|
|
|
|
static intptr_t allocated_size_since_gc_new_space_offset() {
|
2014-03-03 15:39:06 +00:00
|
|
|
return OFFSET_OF(ClassHeapStats, recent) +
|
|
|
|
OFFSET_OF(AllocStats<intptr_t>, new_size);
|
2014-01-22 19:21:22 +00:00
|
|
|
}
|
|
|
|
static intptr_t allocated_size_since_gc_old_space_offset() {
|
2014-03-03 15:39:06 +00:00
|
|
|
return OFFSET_OF(ClassHeapStats, recent) +
|
|
|
|
OFFSET_OF(AllocStats<intptr_t>, old_size);
|
2014-01-22 19:21:22 +00:00
|
|
|
}
|
2016-11-08 21:54:47 +00:00
|
|
|
static intptr_t state_offset() { return OFFSET_OF(ClassHeapStats, state_); }
|
|
|
|
static intptr_t TraceAllocationMask() { return (1 << kTraceAllocationBit); }
|
2014-01-22 19:21:22 +00:00
|
|
|
|
|
|
|
void Initialize();
|
|
|
|
void ResetAtNewGC();
|
|
|
|
void ResetAtOldGC();
|
2014-03-03 15:39:06 +00:00
|
|
|
void ResetAccumulator();
|
2014-08-20 17:57:12 +00:00
|
|
|
void UpdatePromotedAfterNewGC();
|
2014-01-22 19:21:22 +00:00
|
|
|
void UpdateSize(intptr_t instance_size);
|
2016-07-01 19:42:05 +00:00
|
|
|
#ifndef PRODUCT
|
2019-06-13 23:45:52 +00:00
|
|
|
void PrintToJSONObject(const Class& cls,
|
|
|
|
JSONObject* obj,
|
|
|
|
bool internal) const;
|
2016-07-01 19:42:05 +00:00
|
|
|
#endif
|
2014-11-20 20:18:02 +00:00
|
|
|
void Verify();
|
2014-08-20 17:57:12 +00:00
|
|
|
|
2016-11-08 21:54:47 +00:00
|
|
|
bool trace_allocation() const { return TraceAllocationBit::decode(state_); }
|
2015-07-10 19:19:01 +00:00
|
|
|
|
|
|
|
void set_trace_allocation(bool trace_allocation) {
|
|
|
|
state_ = TraceAllocationBit::update(trace_allocation, state_);
|
|
|
|
}
|
|
|
|
|
2014-08-20 17:57:12 +00:00
|
|
|
private:
|
2015-07-10 19:19:01 +00:00
|
|
|
enum StateBits {
|
|
|
|
kTraceAllocationBit = 0,
|
|
|
|
};
|
|
|
|
|
2016-11-08 21:54:47 +00:00
|
|
|
class TraceAllocationBit
|
|
|
|
: public BitField<intptr_t, bool, kTraceAllocationBit, 1> {};
|
2015-07-10 19:19:01 +00:00
|
|
|
|
2014-08-20 17:57:12 +00:00
|
|
|
// Recent old at start of last new GC (used to compute promoted_*).
|
|
|
|
intptr_t old_pre_new_gc_count_;
|
|
|
|
intptr_t old_pre_new_gc_size_;
|
2015-07-10 19:19:01 +00:00
|
|
|
intptr_t state_;
|
2016-07-21 23:04:57 +00:00
|
|
|
intptr_t align_; // Make SIMARM and ARM agree on the size of ClassHeapStats.
|
2014-01-22 19:21:22 +00:00
|
|
|
};
|
2016-07-12 21:15:02 +00:00
|
|
|
#endif // !PRODUCT
|
2012-05-01 09:20:40 +00:00
|
|
|
|
2019-09-10 12:48:16 +00:00
|
|
|
// Registry of all known classes and their sizes.
|
|
|
|
//
|
|
|
|
// The GC will only need the information in this shared class table to scan
|
|
|
|
// object pointers.
|
|
|
|
class SharedClassTable {
|
|
|
|
public:
|
|
|
|
SharedClassTable();
|
|
|
|
~SharedClassTable();
|
|
|
|
|
|
|
|
// Thread-safe.
|
|
|
|
intptr_t SizeAt(intptr_t index) const {
|
|
|
|
ASSERT(IsValidIndex(index));
|
|
|
|
return table_[index];
|
|
|
|
}
|
|
|
|
|
|
|
|
bool HasValidClassAt(intptr_t index) const {
|
|
|
|
ASSERT(IsValidIndex(index));
|
|
|
|
ASSERT(table_[index] >= 0);
|
|
|
|
return table_[index] != 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetSizeAt(intptr_t index, intptr_t size) {
|
|
|
|
ASSERT(IsValidIndex(index));
|
|
|
|
// Ensure we never change size for a given cid from one non-zero size to
|
|
|
|
// another non-zero size.
|
|
|
|
RELEASE_ASSERT(table_[index] == 0 || table_[index] == size);
|
|
|
|
table_[index] = size;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsValidIndex(intptr_t index) const { return index > 0 && index < top_; }
|
|
|
|
|
|
|
|
intptr_t NumCids() const { return top_; }
|
|
|
|
intptr_t Capacity() const { return capacity_; }
|
|
|
|
|
|
|
|
// Used to drop recently added classes.
|
|
|
|
void SetNumCids(intptr_t num_cids) {
|
|
|
|
ASSERT(num_cids <= top_);
|
|
|
|
top_ = num_cids;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Called whenever a old GC occurs.
|
|
|
|
void ResetCountersOld();
|
|
|
|
// Called whenever a new GC occurs.
|
|
|
|
void ResetCountersNew();
|
|
|
|
// Called immediately after a new GC.
|
|
|
|
void UpdatePromoted();
|
|
|
|
|
|
|
|
#if !defined(PRODUCT)
|
|
|
|
// Called whenever a class is allocated in the runtime.
|
|
|
|
void UpdateAllocatedNew(intptr_t cid, intptr_t size) {
|
|
|
|
ClassHeapStats* stats = PreliminaryStatsAt(cid);
|
|
|
|
ASSERT(stats != NULL);
|
|
|
|
ASSERT(size != 0);
|
|
|
|
stats->recent.AddNew(size);
|
|
|
|
}
|
|
|
|
void UpdateAllocatedOld(intptr_t cid, intptr_t size) {
|
|
|
|
ClassHeapStats* stats = PreliminaryStatsAt(cid);
|
|
|
|
ASSERT(stats != NULL);
|
|
|
|
ASSERT(size != 0);
|
|
|
|
stats->recent.AddOld(size);
|
|
|
|
}
|
|
|
|
void UpdateAllocatedOldGC(intptr_t cid, intptr_t size);
|
|
|
|
void UpdateAllocatedExternalNew(intptr_t cid, intptr_t size);
|
|
|
|
void UpdateAllocatedExternalOld(intptr_t cid, intptr_t size);
|
|
|
|
|
|
|
|
void ResetAllocationAccumulators();
|
|
|
|
|
|
|
|
void SetTraceAllocationFor(intptr_t cid, bool trace) {
|
|
|
|
ClassHeapStats* stats = PreliminaryStatsAt(cid);
|
|
|
|
stats->set_trace_allocation(trace);
|
|
|
|
}
|
|
|
|
bool TraceAllocationFor(intptr_t cid) {
|
|
|
|
ClassHeapStats* stats = PreliminaryStatsAt(cid);
|
|
|
|
return stats->trace_allocation();
|
|
|
|
}
|
|
|
|
|
|
|
|
ClassHeapStats* StatsWithUpdatedSize(intptr_t cid, intptr_t size);
|
|
|
|
#endif // !defined(PRODUCT)
|
|
|
|
|
|
|
|
// Returns the newly allocated cid.
|
|
|
|
//
|
|
|
|
// [index] is kIllegalCid or a predefined cid.
|
|
|
|
intptr_t Register(intptr_t index, intptr_t size);
|
|
|
|
void AllocateIndex(intptr_t index);
|
|
|
|
void Unregister(intptr_t index);
|
|
|
|
|
|
|
|
void Remap(intptr_t* old_to_new_cids);
|
|
|
|
|
|
|
|
void FreeOldTables();
|
|
|
|
|
|
|
|
// Used by the generated code.
|
|
|
|
#ifndef PRODUCT
|
|
|
|
static intptr_t class_heap_stats_table_offset() {
|
|
|
|
return OFFSET_OF(SharedClassTable, class_heap_stats_table_);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// Used by the generated code.
|
|
|
|
static intptr_t ClassOffsetFor(intptr_t cid);
|
|
|
|
|
|
|
|
// Used by the generated code.
|
|
|
|
static intptr_t NewSpaceCounterOffsetFor(intptr_t cid);
|
|
|
|
|
|
|
|
// Used by the generated code.
|
|
|
|
static intptr_t StateOffsetFor(intptr_t cid);
|
|
|
|
|
|
|
|
// Used by the generated code.
|
|
|
|
static intptr_t NewSpaceSizeOffsetFor(intptr_t cid);
|
|
|
|
|
|
|
|
static const int kInitialCapacity = 512;
|
|
|
|
static const int kCapacityIncrement = 256;
|
|
|
|
|
|
|
|
private:
|
|
|
|
friend class ClassTable;
|
|
|
|
friend class GCMarker;
|
|
|
|
friend class MarkingWeakVisitor;
|
|
|
|
friend class Scavenger;
|
|
|
|
friend class ScavengerWeakVisitor;
|
|
|
|
friend class ClassHeapStatsTestHelper;
|
|
|
|
friend class HeapTestsHelper;
|
|
|
|
|
|
|
|
static bool ShouldUpdateSizeForClassId(intptr_t cid);
|
|
|
|
|
|
|
|
#ifndef PRODUCT
|
|
|
|
// May not have updated size for variable size classes.
|
|
|
|
ClassHeapStats* PreliminaryStatsAt(intptr_t cid) {
|
|
|
|
ASSERT(cid > 0);
|
|
|
|
ASSERT(cid < top_);
|
|
|
|
return &class_heap_stats_table_[cid];
|
|
|
|
}
|
|
|
|
void UpdateLiveOld(intptr_t cid, intptr_t size, intptr_t count = 1);
|
|
|
|
void UpdateLiveNew(intptr_t cid, intptr_t size);
|
|
|
|
void UpdateLiveNewGC(intptr_t cid, intptr_t size);
|
|
|
|
void UpdateLiveOldExternal(intptr_t cid, intptr_t size);
|
|
|
|
void UpdateLiveNewExternal(intptr_t cid, intptr_t size);
|
|
|
|
|
|
|
|
ClassHeapStats* class_heap_stats_table_ = nullptr;
|
|
|
|
#endif // !PRODUCT
|
|
|
|
|
|
|
|
void Grow(intptr_t new_capacity);
|
|
|
|
|
|
|
|
intptr_t top_;
|
|
|
|
intptr_t capacity_;
|
|
|
|
|
|
|
|
// Copy-on-write is used for table_, with old copies stored in old_tables_.
|
|
|
|
intptr_t* table_; // Maps the cid to the instance size.
|
|
|
|
MallocGrowableArray<intptr_t*>* old_tables_;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(SharedClassTable);
|
|
|
|
};
|
|
|
|
|
2012-05-01 09:20:40 +00:00
|
|
|
class ClassTable {
|
|
|
|
public:
|
2019-09-10 12:48:16 +00:00
|
|
|
explicit ClassTable(SharedClassTable* shared_class_table_);
|
|
|
|
|
2015-01-21 18:13:12 +00:00
|
|
|
// Creates a shallow copy of the original class table for some read-only
|
|
|
|
// access, without support for stats data.
|
2019-09-10 12:48:16 +00:00
|
|
|
ClassTable(ClassTable* original, SharedClassTable* shared_class_table);
|
2012-05-01 09:20:40 +00:00
|
|
|
~ClassTable();
|
|
|
|
|
2019-09-10 12:48:16 +00:00
|
|
|
SharedClassTable* shared_class_table() const { return shared_class_table_; }
|
|
|
|
|
2019-09-06 22:06:30 +00:00
|
|
|
void CopyBeforeHotReload(ClassAndSize** copy, intptr_t* copy_num_cids) {
|
|
|
|
// The [IsolateReloadContext] will need to maintain a copy of the old class
|
|
|
|
// table until instances have been morphed.
|
2019-09-10 12:48:16 +00:00
|
|
|
const intptr_t num_cids = NumCids();
|
|
|
|
const intptr_t bytes = sizeof(ClassAndSize) * num_cids;
|
|
|
|
auto class_and_size = static_cast<ClassAndSize*>(malloc(bytes));
|
|
|
|
for (intptr_t i = 0; i < num_cids; ++i) {
|
|
|
|
class_and_size[i] =
|
|
|
|
ClassAndSize(table_[i], shared_class_table_->table_[i]);
|
|
|
|
}
|
|
|
|
*copy_num_cids = num_cids;
|
|
|
|
*copy = class_and_size;
|
2019-09-06 22:06:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ResetBeforeHotReload() {
|
|
|
|
// The [IsolateReloadContext] is now source-of-truth for GC.
|
|
|
|
//
|
|
|
|
// Though we cannot clear out the class pointers, because a hot-reload
|
|
|
|
// contains only a diff: If e.g. a class included in the hot-reload has a
|
|
|
|
// super class not included in the diff, it will look up in this class table
|
|
|
|
// to find the super class (e.g. `cls.SuperClass` will cause us to come
|
|
|
|
// here).
|
|
|
|
for (intptr_t i = 0; i < top_; ++i) {
|
2019-09-10 12:48:16 +00:00
|
|
|
shared_class_table_->table_[i] = 0;
|
2019-09-06 22:06:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void ResetAfterHotReload(ClassAndSize* old_table,
|
|
|
|
intptr_t num_old_cids,
|
|
|
|
bool is_rollback) {
|
|
|
|
// The [IsolateReloadContext] is no longer source-of-truth for GC after we
|
|
|
|
// return, so we restore size information for all classes.
|
|
|
|
if (is_rollback) {
|
|
|
|
SetNumCids(num_old_cids);
|
2019-09-10 12:48:16 +00:00
|
|
|
for (intptr_t i = 0; i < num_old_cids; ++i) {
|
|
|
|
shared_class_table_->table_[i] = old_table[i].size_;
|
|
|
|
table_[i] = old_table[i].class_;
|
|
|
|
}
|
2019-09-06 22:06:30 +00:00
|
|
|
} else {
|
|
|
|
CopySizesFromClassObjects();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Can't free this table immediately as another thread (e.g., concurrent
|
|
|
|
// marker or sweeper) may be between loading the table pointer and loading
|
|
|
|
// the table element. The table will be freed at the next major GC or
|
|
|
|
// isolate shutdown.
|
|
|
|
AddOldTable(old_table);
|
|
|
|
}
|
|
|
|
|
2015-01-21 18:13:12 +00:00
|
|
|
// Thread-safe.
|
2012-05-01 09:20:40 +00:00
|
|
|
RawClass* At(intptr_t index) const {
|
2018-04-26 09:38:25 +00:00
|
|
|
ASSERT(IsValidIndex(index));
|
2019-09-10 12:48:16 +00:00
|
|
|
return table_[index];
|
2018-04-26 09:38:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
intptr_t SizeAt(intptr_t index) const {
|
2019-09-10 12:48:16 +00:00
|
|
|
return shared_class_table_->SizeAt(index);
|
2012-05-01 09:20:40 +00:00
|
|
|
}
|
|
|
|
|
2018-04-26 09:38:25 +00:00
|
|
|
void SetAt(intptr_t index, RawClass* raw_cls);
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2015-08-04 23:46:08 +00:00
|
|
|
bool IsValidIndex(intptr_t index) const {
|
2019-09-10 12:48:16 +00:00
|
|
|
return shared_class_table_->IsValidIndex(index);
|
2012-05-22 18:06:34 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 22:31:49 +00:00
|
|
|
bool HasValidClassAt(intptr_t index) const {
|
|
|
|
ASSERT(IsValidIndex(index));
|
2019-09-10 12:48:16 +00:00
|
|
|
return table_[index] != nullptr;
|
2012-08-21 22:31:49 +00:00
|
|
|
}
|
|
|
|
|
2019-09-10 12:48:16 +00:00
|
|
|
intptr_t NumCids() const { return shared_class_table_->NumCids(); }
|
|
|
|
intptr_t Capacity() const { return shared_class_table_->Capacity(); }
|
2012-08-21 22:31:49 +00:00
|
|
|
|
2016-05-17 19:19:06 +00:00
|
|
|
// Used to drop recently added classes.
|
|
|
|
void SetNumCids(intptr_t num_cids) {
|
2019-09-10 12:48:16 +00:00
|
|
|
shared_class_table_->SetNumCids(num_cids);
|
|
|
|
|
2016-05-17 19:19:06 +00:00
|
|
|
ASSERT(num_cids <= top_);
|
|
|
|
top_ = num_cids;
|
|
|
|
}
|
|
|
|
|
2012-05-01 09:20:40 +00:00
|
|
|
void Register(const Class& cls);
|
2016-06-30 02:50:06 +00:00
|
|
|
void AllocateIndex(intptr_t index);
|
2016-02-22 22:05:03 +00:00
|
|
|
void Unregister(intptr_t index);
|
|
|
|
|
2016-08-26 23:17:05 +00:00
|
|
|
void Remap(intptr_t* old_to_new_cids);
|
|
|
|
|
2012-05-01 09:20:40 +00:00
|
|
|
void VisitObjectPointers(ObjectPointerVisitor* visitor);
|
|
|
|
|
2018-04-26 09:38:25 +00:00
|
|
|
// If a snapshot reader has populated the class table then the
|
|
|
|
// sizes in the class table are not correct. Iterates through the
|
|
|
|
// table, updating the sizes.
|
|
|
|
void CopySizesFromClassObjects();
|
|
|
|
|
2014-03-05 19:24:02 +00:00
|
|
|
void Validate();
|
|
|
|
|
2012-05-01 09:20:40 +00:00
|
|
|
void Print();
|
2013-09-20 00:59:42 +00:00
|
|
|
|
2015-07-13 13:42:16 +00:00
|
|
|
// Used by the generated code.
|
2016-11-08 21:54:47 +00:00
|
|
|
static intptr_t table_offset() { return OFFSET_OF(ClassTable, table_); }
|
2012-05-30 12:25:49 +00:00
|
|
|
|
2016-07-12 21:15:02 +00:00
|
|
|
// Used by the generated code.
|
2019-09-10 12:48:16 +00:00
|
|
|
static intptr_t shared_class_table_offset() {
|
|
|
|
return OFFSET_OF(ClassTable, shared_class_table_);
|
|
|
|
}
|
2016-07-12 21:15:02 +00:00
|
|
|
|
2019-06-03 22:14:16 +00:00
|
|
|
#ifndef PRODUCT
|
|
|
|
// Describes layout of heap stats for code generation. See offset_extractor.cc
|
|
|
|
struct ArrayLayout {
|
|
|
|
static intptr_t elements_start_offset() { return 0; }
|
|
|
|
|
|
|
|
static constexpr intptr_t kElementSize = sizeof(ClassHeapStats);
|
|
|
|
};
|
|
|
|
#endif
|
|
|
|
|
2016-07-12 21:15:02 +00:00
|
|
|
#ifndef PRODUCT
|
2014-01-22 19:21:22 +00:00
|
|
|
|
2014-06-20 20:17:42 +00:00
|
|
|
ClassHeapStats* StatsWithUpdatedSize(intptr_t cid);
|
2014-01-22 19:21:22 +00:00
|
|
|
|
2019-06-13 23:45:52 +00:00
|
|
|
void AllocationProfilePrintJSON(JSONStream* stream, bool internal);
|
2014-01-22 19:21:22 +00:00
|
|
|
|
2016-07-12 21:15:02 +00:00
|
|
|
void PrintToJSONObject(JSONObject* object);
|
|
|
|
#endif // !PRODUCT
|
|
|
|
|
2018-04-26 09:38:25 +00:00
|
|
|
void AddOldTable(ClassAndSize* old_table);
|
2015-01-21 18:13:12 +00:00
|
|
|
// Deallocates table copies. Do not call during concurrent access to table.
|
|
|
|
void FreeOldTables();
|
|
|
|
|
2012-05-01 09:20:40 +00:00
|
|
|
private:
|
2015-09-16 22:09:05 +00:00
|
|
|
friend class GCMarker;
|
2017-09-25 18:33:07 +00:00
|
|
|
friend class MarkingWeakVisitor;
|
2019-02-27 22:01:00 +00:00
|
|
|
friend class Scavenger;
|
2017-09-25 18:33:07 +00:00
|
|
|
friend class ScavengerWeakVisitor;
|
2014-01-22 19:21:22 +00:00
|
|
|
friend class ClassHeapStatsTestHelper;
|
2019-02-21 01:27:29 +00:00
|
|
|
friend class HeapTestsHelper;
|
2019-09-10 12:48:16 +00:00
|
|
|
static const int kInitialCapacity = SharedClassTable::kInitialCapacity;
|
|
|
|
static const int kCapacityIncrement = SharedClassTable::kCapacityIncrement;
|
2012-05-01 09:20:40 +00:00
|
|
|
|
2019-09-10 12:48:16 +00:00
|
|
|
void Grow(intptr_t index);
|
2014-01-22 19:21:22 +00:00
|
|
|
|
2012-05-01 09:20:40 +00:00
|
|
|
intptr_t top_;
|
|
|
|
intptr_t capacity_;
|
|
|
|
|
2015-01-21 18:13:12 +00:00
|
|
|
// Copy-on-write is used for table_, with old copies stored in old_tables_.
|
2019-09-10 12:48:16 +00:00
|
|
|
RawClass** table_;
|
2018-04-26 09:38:25 +00:00
|
|
|
MallocGrowableArray<ClassAndSize*>* old_tables_;
|
2019-09-10 12:48:16 +00:00
|
|
|
MallocGrowableArray<RawClass**>* old_class_tables_;
|
|
|
|
SharedClassTable* shared_class_table_;
|
2012-05-01 09:20:40 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(ClassTable);
|
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace dart
|
|
|
|
|
2016-10-26 07:26:03 +00:00
|
|
|
#endif // RUNTIME_VM_CLASS_TABLE_H_
|