[vm] Reland two dispatch table related changes as a single change.

These changes were originally submitted separately on different days,
and a major performance regression was seen after the first change
when creating snapshots that led to both being reverted. However,
that performance regression should be addressed by the followup.

First change:
"[vm] Treat the dispatch table as a root in the snapshot.

Additional changes:
* Only serialize a dispatch table in precompiled snapshots.
* Add information in v8 snapshot profiles for the dispatch table.
* Fix a typo in a field name.
* Print the number of Instructions objects (or payloads, for
  precompiled bare instructions mode) in the fake cluster for
  the data section.
* Fix v8 snapshots profiles so objects in memory mapped segments
  and only those are prefixed with "(RO) ".
* Add names for Instructions objects in v8 snapshot profiles
  when we can use the assembly namer.
* Add command line flag for old #define'd false flag."

Second change:
"[vm/aot] Keep GC-visible references to dispatch table Code entries.

This change splits dispatch table handling into four distinct
parts:

* The dispatch table generator does not make a dispatch table
  directly, but rather creates an Array that contains the Code
  objects for dispatch table entries.
* The precompiler takes this Array and puts it in the object
  store, which makes it a new GC root.
* The serializer takes this information and serializes the
  dispatch table information in the same form as before.
* The deserializer creates a DispatchTable object and populates
  it using the serialized information.

The change in the precompiler ensures that the Code objects
used in the dispatch table have GC-visible references. Thus,
even if all other references to them from the other GC roots
were removed, they would be accessible in the serializer in
the case of a GC pass between the precompiler and serializer.

This change also means that the serializer can retrieve and
trace the Code objects directly rather than first looking up
the Code objects by their entry point."

Bug: https://github.com/dart-lang/sdk/issues/41022
Change-Id: I52c83b0536fc588da0bef9aed1f0c72e8ee4663f
Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-release-x64-try,vm-kernel-precomp-linux-release-simarm-try,vm-kernel-precomp-linux-release-simarm64-try,vm-kernel-precomp-linux-release-simarm_x64-try,vm-kernel-precomp-android-release-arm64-try,vm-kernel-precomp-android-release-arm_x64-try,vm-kernel-precomp-mac-release-simarm64-try,vm-kernel-precomp-win-release-x64-try
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/139285
Commit-Queue: Teagan Strickland <sstrickl@google.com>
Reviewed-by: Alexander Aprelev <aam@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Teagan Strickland 2020-03-13 17:19:52 +00:00 committed by commit-bot@chromium.org
parent 8ed772d37f
commit 2195c3282a
14 changed files with 619 additions and 457 deletions

View file

@ -27,10 +27,15 @@
#include "vm/timeline.h"
#include "vm/version.h"
#define LOG_SECTION_BOUNDARIES false
namespace dart {
#if !defined(DART_PRECOMPILED_RUNTIME)
DEFINE_FLAG(bool,
print_cluster_information,
false,
"Print information about clusters written to snapshot");
#endif
#if defined(DART_PRECOMPILER)
DEFINE_FLAG(charp,
write_v8_snapshot_profile_to,
@ -101,27 +106,34 @@ void Deserializer::InitializeHeader(RawObject* raw,
#if !defined(DART_PRECOMPILED_RUNTIME)
void SerializationCluster::WriteAndMeasureAlloc(Serializer* serializer) {
if (LOG_SECTION_BOUNDARIES) {
OS::PrintErr("Data + %" Px ": Alloc %s\n", serializer->bytes_written(),
name_);
}
intptr_t start_size = serializer->bytes_written() + serializer->GetDataSize();
intptr_t start_size = serializer->bytes_written();
intptr_t start_data = serializer->GetDataSize();
intptr_t start_objects = serializer->next_ref_index();
WriteAlloc(serializer);
intptr_t stop_size = serializer->bytes_written() + serializer->GetDataSize();
intptr_t stop_size = serializer->bytes_written();
intptr_t stop_data = serializer->GetDataSize();
intptr_t stop_objects = serializer->next_ref_index();
size_ += (stop_size - start_size);
if (FLAG_print_cluster_information) {
const int hex_size = kWordSize * 2;
OS::PrintErr("Snapshot 0x%0*.*" Px " (%" Pd "), ", hex_size, hex_size,
start_size, stop_size - start_size);
OS::PrintErr("Data 0x%0*.*" Px " (%" Pd "): ", hex_size, hex_size,
start_data, stop_data - start_data);
OS::PrintErr("Alloc %s (%" Pd ")\n", name(), stop_objects - start_objects);
}
size_ += (stop_size - start_size) + (stop_data - start_data);
num_objects_ += (stop_objects - start_objects);
}
void SerializationCluster::WriteAndMeasureFill(Serializer* serializer) {
if (LOG_SECTION_BOUNDARIES) {
OS::PrintErr("Data + %" Px ": Fill %s\n", serializer->bytes_written(),
name_);
}
intptr_t start = serializer->bytes_written();
WriteFill(serializer);
intptr_t stop = serializer->bytes_written();
if (FLAG_print_cluster_information) {
const int hex_size = kWordSize * 2;
OS::PrintErr("Snapshot 0x%0*.*" Px " (%" Pd "): Fill %s\n", hex_size,
hex_size, start, stop - start, name());
}
size_ += (stop - start);
}
@ -2008,8 +2020,11 @@ class PcDescriptorsDeserializationCluster : public DeserializationCluster {
// PcDescriptor, CompressedStackMaps, OneByteString, TwoByteString
class RODataSerializationCluster : public SerializationCluster {
public:
RODataSerializationCluster(const char* name, intptr_t cid)
: SerializationCluster(name), cid_(cid) {}
RODataSerializationCluster(Zone* zone, const char* type, intptr_t cid)
: SerializationCluster(ImageWriter::TagObjectTypeAsReadOnly(zone, type)),
cid_(cid),
objects_(),
type_(type) {}
~RODataSerializationCluster() {}
void Trace(Serializer* s, RawObject* object) {
@ -2037,9 +2052,9 @@ class RODataSerializationCluster : public SerializationCluster {
RawObject* object = objects_[i];
s->AssignRef(object);
if (cid_ == kOneByteStringCid || cid_ == kTwoByteStringCid) {
s->TraceStartWritingObject(name(), object, String::RawCast(object));
s->TraceStartWritingObject(type_, object, String::RawCast(object));
} else {
s->TraceStartWritingObject(name(), object, nullptr);
s->TraceStartWritingObject(type_, object, nullptr);
}
uint32_t offset = s->GetDataOffset(object);
s->TraceDataOffset(offset);
@ -2060,6 +2075,7 @@ class RODataSerializationCluster : public SerializationCluster {
private:
const intptr_t cid_;
GrowableArray<RawObject*> objects_;
const char* const type_;
};
#endif // !DART_PRECOMPILED_RUNTIME
@ -4565,6 +4581,23 @@ void Serializer::TraceEndWritingObject() {
}
}
const char* Serializer::ReadOnlyObjectType(intptr_t cid) {
switch (cid) {
case kPcDescriptorsCid:
return "PcDescriptors";
case kCodeSourceMapCid:
return "CodeSourceMap";
case kCompressedStackMapsCid:
return "CompressedStackMaps";
case kOneByteStringCid:
return "OneByteString";
case kTwoByteStringCid:
return "TwoByteString";
default:
return nullptr;
}
}
SerializationCluster* Serializer::NewClusterForClass(intptr_t cid) {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
@ -4586,18 +4619,8 @@ SerializationCluster* Serializer::NewClusterForClass(intptr_t cid) {
}
if (Snapshot::IncludesCode(kind_)) {
switch (cid) {
case kPcDescriptorsCid:
return new (Z) RODataSerializationCluster("(RO)PcDescriptors", cid);
case kCodeSourceMapCid:
return new (Z) RODataSerializationCluster("(RO)CodeSourceMap", cid);
case kCompressedStackMapsCid:
return new (Z)
RODataSerializationCluster("(RO)CompressedStackMaps", cid);
case kOneByteStringCid:
return new (Z) RODataSerializationCluster("(RO)OneByteString", cid);
case kTwoByteStringCid:
return new (Z) RODataSerializationCluster("(RO)TwoByteString", cid);
if (auto const type = ReadOnlyObjectType(cid)) {
return new (Z) RODataSerializationCluster(Z, type, cid);
}
}
@ -4792,13 +4815,6 @@ intptr_t Serializer::GetDataSize() const {
return image_writer_->data_size();
}
intptr_t Serializer::GetTextSize() const {
if (image_writer_ == NULL) {
return 0;
}
return image_writer_->text_size();
}
void Serializer::Push(RawObject* object) {
if (!object->IsHeapObject()) {
RawSmi* smi = Smi::RawCast(object);
@ -5018,6 +5034,127 @@ void Serializer::Serialize() {
}
}
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
// The serialized format of the dispatch table is a sequence of variable-length
// integers (the built-in variable-length integer encoding/decoding of
// the stream). Each encoded integer e is interpreted thus:
// -kRecentCount .. -1 Pick value from the recent values buffer at index -1-e.
// 0 Empty (unused) entry.
// 1 .. kMaxRepeat Repeat previous entry e times.
// kIndexBase or higher Pick entry point from the object at index e-kIndexBase
// in the snapshot code cluster. Also put it in the recent
// values buffer at the next round-robin index.
// Constants for serialization format. Chosen such that repeats and recent
// values are encoded as single bytes in SLEB128 encoding.
static constexpr intptr_t kDispatchTableSpecialEncodingBits = 6;
static constexpr intptr_t kDispatchTableRecentCount =
1 << kDispatchTableSpecialEncodingBits;
static constexpr intptr_t kDispatchTableRecentMask =
(1 << kDispatchTableSpecialEncodingBits) - 1;
static constexpr intptr_t kDispatchTableMaxRepeat =
(1 << kDispatchTableSpecialEncodingBits) - 1;
static constexpr intptr_t kDispatchTableIndexBase = kDispatchTableMaxRepeat + 1;
#endif // defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
void Serializer::WriteDispatchTable(const Array& entries) {
#if defined(DART_PRECOMPILER)
if (kind() != Snapshot::kFullAOT) return;
const intptr_t bytes_before = bytes_written();
const intptr_t table_length = entries.IsNull() ? 0 : entries.Length();
ASSERT(table_length <= compiler::target::kWordMax);
WriteUnsigned(table_length);
if (table_length == 0) {
dispatch_table_size_ = bytes_written() - bytes_before;
return;
}
auto const code_cluster =
reinterpret_cast<CodeSerializationCluster*>(clusters_by_cid_[kCodeCid]);
ASSERT(code_cluster != nullptr);
// Reference IDs in a cluster are allocated sequentially, so we can use the
// first code object's reference ID to calculate the cluster index.
const intptr_t first_code_id =
WriteRefId(code_cluster->discovered_objects()->At(0));
// The first object in the code cluster must have its reference ID allocated.
ASSERT(first_code_id != 0 && first_code_id != WriteRefId(Code::null()));
// If instructions can be deduped, the code order table in the deserializer
// may not contain all Code objects in the snapshot. Thus, we write the ID
// for the first code object here so we can retrieve it during deserialization
// and calculate the snapshot ID for Code objects from the cluster index.
//
// We could just use the snapshot reference ID of the Code object itself
// instead of the cluster index and avoid this. However, since entries are
// SLEB128 encoded, the size delta for serializing the first ID once is less
// than the size delta of serializing the ID plus kIndexBase for each entry,
// even when Code objects are allocated before all other non-base objects.
//
// We could also map Code objects to the first Code object in the cluster with
// the same entry point and serialize that ID instead, but that loses
// information about which Code object was originally referenced.
ASSERT(first_code_id <= compiler::target::kWordMax);
WriteUnsigned(first_code_id);
RawCode* previous_code = nullptr;
RawCode* recent[kDispatchTableRecentCount] = {nullptr};
intptr_t recent_index = 0;
intptr_t repeat_count = 0;
for (intptr_t i = 0; i < table_length; i++) {
auto const code = Code::RawCast(entries.At(i));
// First, see if we're repeating the previous entry (invalid, recent, or
// encoded).
if (code == previous_code) {
if (++repeat_count == kDispatchTableMaxRepeat) {
Write(kDispatchTableMaxRepeat);
repeat_count = 0;
}
continue;
}
// Emit any outsanding repeat count before handling the new code value.
if (repeat_count > 0) {
Write(repeat_count);
repeat_count = 0;
}
previous_code = code;
// The invalid entry can be repeated, but is never part of the recent list
// since it already encodes to a single byte..
if (code == Code::null()) {
Write(0);
continue;
}
// Check against the recent entries, and write an encoded reference to
// the recent entry if found.
intptr_t found_index = 0;
for (; found_index < kDispatchTableRecentCount; found_index++) {
if (recent[found_index] == code) break;
}
if (found_index < kDispatchTableRecentCount) {
Write(~found_index);
continue;
}
// We have a non-repeated, non-recent entry, so encode the reference ID of
// the code object and emit that.
auto const object_id = WriteRefId(code);
// Make sure that this code object has an allocated reference ID.
ASSERT(object_id != 0 && object_id != WriteRefId(Code::null()));
// Use the index in the code cluster, not in the snapshot..
auto const encoded = kDispatchTableIndexBase + (object_id - first_code_id);
ASSERT(encoded <= compiler::target::kWordMax);
Write(encoded);
recent[recent_index] = code;
recent_index = (recent_index + 1) & kDispatchTableRecentMask;
}
if (repeat_count > 0) {
Write(repeat_count);
}
dispatch_table_size_ = bytes_written() - bytes_before;
#endif // defined(DART_PRECOMPILER)
}
void Serializer::PrintSnapshotSizes() {
#if !defined(DART_PRECOMPILED_RUNTIME)
@ -5030,19 +5167,37 @@ void Serializer::PrintSnapshotSizes() {
clusters_by_size.Add(cluster);
}
}
if (GetTextSize() != 0) {
intptr_t text_size = 0;
if (image_writer_ != nullptr) {
auto const text_object_count = image_writer_->GetTextObjectCount();
text_size = image_writer_->text_size();
intptr_t trampoline_count, trampoline_size;
image_writer_->GetTrampolineInfo(&trampoline_count, &trampoline_size);
auto const instructions_count = text_object_count - trampoline_count;
auto const instructions_size = text_size - trampoline_size;
clusters_by_size.Add(new (zone_) FakeSerializationCluster(
"(RO)Instructions", 0, GetTextSize()));
ImageWriter::TagObjectTypeAsReadOnly(zone_, "Instructions"),
instructions_count, instructions_size));
if (trampoline_size > 0) {
clusters_by_size.Add(new (zone_) FakeSerializationCluster(
ImageWriter::TagObjectTypeAsReadOnly(zone_, "Trampoline"),
trampoline_count, trampoline_size));
}
}
// An null dispatch table will serialize to a single byte.
if (dispatch_table_size_ > 1) {
// The dispatch_table_size_ will be 0 if the snapshot did not include a
// dispatch table (i.e., the VM snapshot). For a precompiled isolate
// snapshot, we always serialize at least _one_ byte for the DispatchTable.
if (dispatch_table_size_ > 0) {
const auto& dispatch_table_entries = Array::Handle(
zone_, isolate()->object_store()->dispatch_table_code_entries());
auto const entry_count =
dispatch_table_entries.IsNull() ? 0 : dispatch_table_entries.Length();
clusters_by_size.Add(new (zone_) FakeSerializationCluster(
"(RO)DispatchTable", isolate()->dispatch_table()->length(),
dispatch_table_size_));
"DispatchTable", entry_count, dispatch_table_size_));
}
clusters_by_size.Sort(CompareClusters);
double total_size =
static_cast<double>(bytes_written() + GetDataSize() + GetTextSize());
static_cast<double>(bytes_written() + GetDataSize() + text_size);
double cumulative_fraction = 0.0;
for (intptr_t i = 0; i < clusters_by_size.length(); i++) {
SerializationCluster* cluster = clusters_by_size[i];
@ -5056,6 +5211,7 @@ void Serializer::PrintSnapshotSizes() {
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
#if !defined(DART_PRECOMPILED_RUNTIME)
void Serializer::AddVMIsolateBaseObjects() {
// These objects are always allocated by Object::InitOnce, so they are not
// written into the snapshot.
@ -5199,6 +5355,23 @@ void Serializer::WriteIsolateSnapshot(intptr_t num_base_objects,
Push(*p);
}
const auto& dispatch_table_entries =
Array::Handle(zone_, object_store->dispatch_table_code_entries());
// We should only have a dispatch table in precompiled mode.
ASSERT(dispatch_table_entries.IsNull() || kind() == Snapshot::kFullAOT);
#if defined(DART_PRECOMPILER)
// We treat the dispatch table as a root object and trace the Code objects it
// references. Otherwise, a non-empty entry could be invalid on
// deserialization if the corresponding Code object was not reachable from the
// existing snapshot roots.
if (!dispatch_table_entries.IsNull()) {
for (intptr_t i = 0; i < dispatch_table_entries.Length(); i++) {
Push(dispatch_table_entries.At(i));
}
}
#endif
Serialize();
// Write roots.
@ -5206,12 +5379,35 @@ void Serializer::WriteIsolateSnapshot(intptr_t num_base_objects,
WriteRootRef(*p, kObjectStoreFieldNames[p - from]);
}
// Serialize dispatch table.
GrowableArray<RawCode*>* code_objects =
static_cast<CodeSerializationCluster*>(clusters_by_cid_[kCodeCid])
->discovered_objects();
dispatch_table_size_ = DispatchTable::Serialize(
this, isolate()->dispatch_table(), *code_objects);
// The dispatch table is serialized only for precompiled snapshots.
WriteDispatchTable(dispatch_table_entries);
#if defined(DART_PRECOMPILER)
// If any bytes were written for the dispatch table, add it to the profile.
if (dispatch_table_size_ > 0 && profile_writer_ != nullptr) {
// Grab an unused ref index for a unique object id for the dispatch table.
const auto dispatch_table_id = next_ref_index_++;
const V8SnapshotProfileWriter::ObjectId dispatch_table_snapshot_id(
V8SnapshotProfileWriter::kSnapshot, dispatch_table_id);
profile_writer_->AddRoot(dispatch_table_snapshot_id, "dispatch_table");
profile_writer_->SetObjectTypeAndName(dispatch_table_snapshot_id,
"DispatchTable", nullptr);
profile_writer_->AttributeBytesTo(dispatch_table_snapshot_id,
dispatch_table_size_);
if (!dispatch_table_entries.IsNull()) {
for (intptr_t i = 0; i < dispatch_table_entries.Length(); i++) {
auto const code = Code::RawCast(dispatch_table_entries.At(i));
if (code == Code::null()) continue;
const V8SnapshotProfileWriter::ObjectId code_id(
V8SnapshotProfileWriter::kSnapshot, WriteRefId(code));
profile_writer_->AttributeReferenceTo(
dispatch_table_snapshot_id,
{code_id, V8SnapshotProfileWriter::Reference::kElement, i});
}
}
}
#endif
#if defined(DEBUG)
Write<int32_t>(kSectionMarker);
@ -5377,6 +5573,57 @@ DeserializationCluster* Deserializer::ReadCluster() {
return NULL;
}
void Deserializer::ReadDispatchTable() {
#if defined(DART_PRECOMPILED_RUNTIME)
const intptr_t length = ReadUnsigned();
if (length == 0) return;
// Not all Code objects may be in the code_order_table when instructions can
// be deduplicated. Thus, we serialize the reference ID of the first code
// object, from which we can get the reference ID for any code object.
const intptr_t first_code_id = ReadUnsigned();
auto const I = isolate();
auto code = I->object_store()->dispatch_table_null_error_stub();
ASSERT(code != Code::null());
uword null_entry = Code::EntryPointOf(code);
auto const table = new DispatchTable(length);
auto const array = table->array();
uword value = 0;
uword recent[kDispatchTableRecentCount] = {0};
intptr_t recent_index = 0;
intptr_t repeat_count = 0;
for (intptr_t i = 0; i < length; i++) {
if (repeat_count > 0) {
array[i] = value;
repeat_count--;
continue;
}
auto const encoded = Read<intptr_t>();
if (encoded == 0) {
value = null_entry;
} else if (encoded < 0) {
intptr_t r = ~encoded;
ASSERT(r < kDispatchTableRecentCount);
value = recent[r];
} else if (encoded <= kDispatchTableMaxRepeat) {
repeat_count = encoded - 1;
} else {
intptr_t cluster_index = encoded - kDispatchTableIndexBase;
code = Code::RawCast(Ref(first_code_id + cluster_index));
value = Code::EntryPointOf(code);
recent[recent_index] = value;
recent_index = (recent_index + 1) & kDispatchTableRecentMask;
}
array[i] = value;
}
ASSERT(repeat_count == 0);
I->set_dispatch_table(table);
#endif
}
RawApiError* Deserializer::VerifyImageAlignment() {
if (image_reader_ != nullptr) {
return image_reader_->VerifyAlignment();
@ -5776,11 +6023,8 @@ void Deserializer::ReadIsolateSnapshot(ObjectStore* object_store) {
*p = ReadRef();
}
// Deserialize dispatch table
const Array& code_array =
Array::Handle(zone_, object_store->code_order_table());
thread()->isolate()->set_dispatch_table(
DispatchTable::Deserialize(this, code_array));
// Deserialize dispatch table (when applicable)
ReadDispatchTable();
#if defined(DEBUG)
int32_t section_marker = Read<int32_t>();

View file

@ -246,36 +246,6 @@ class Serializer : public ThreadStackResource {
}
void Align(intptr_t alignment) { stream_.Align(alignment); }
private:
intptr_t WriteRefId(RawObject* object) {
intptr_t id = 0;
if (!object->IsHeapObject()) {
RawSmi* smi = Smi::RawCast(object);
id = smi_ids_.Lookup(smi)->id_;
if (id == 0) {
FATAL("Missing ref");
}
} else {
// The object id weak table holds image offsets for Instructions instead
// of ref indices.
ASSERT(!object->IsInstructions());
id = heap_->GetObjectId(object);
if (id == 0) {
if (object->IsCode() && !Snapshot::IncludesCode(kind_)) {
return WriteRefId(Object::null());
}
#if !defined(DART_PRECOMPILED_RUNTIME)
if (object->IsBytecode() && !Snapshot::IncludesBytecode(kind_)) {
return WriteRefId(Object::null());
}
#endif // !DART_PRECOMPILED_RUNTIME
FATAL("Missing ref");
}
}
return id;
}
public:
void WriteRootRef(RawObject* object, const char* name = nullptr) {
intptr_t id = WriteRefId(object);
WriteUnsigned(id);
@ -382,7 +352,8 @@ class Serializer : public ThreadStackResource {
uint32_t GetDataOffset(RawObject* object) const;
void TraceDataOffset(uint32_t offset);
intptr_t GetDataSize() const;
intptr_t GetTextSize() const;
void WriteDispatchTable(const Array& entries);
Snapshot::Kind kind() const { return kind_; }
intptr_t next_ref_index() const { return next_ref_index_; }
@ -390,6 +361,36 @@ class Serializer : public ThreadStackResource {
void DumpCombinedCodeStatistics();
private:
static const char* ReadOnlyObjectType(intptr_t cid);
intptr_t WriteRefId(RawObject* object) {
intptr_t id = 0;
if (!object->IsHeapObject()) {
RawSmi* smi = Smi::RawCast(object);
id = smi_ids_.Lookup(smi)->id_;
if (id == 0) {
FATAL("Missing ref");
}
} else {
// The object id weak table holds image offsets for Instructions instead
// of ref indices.
ASSERT(!object->IsInstructions());
id = heap_->GetObjectId(object);
if (id == 0) {
if (object->IsCode() && !Snapshot::IncludesCode(kind_)) {
return WriteRefId(Object::null());
}
#if !defined(DART_PRECOMPILED_RUNTIME)
if (object->IsBytecode() && !Snapshot::IncludesBytecode(kind_)) {
return WriteRefId(Object::null());
}
#endif // !DART_PRECOMPILED_RUNTIME
FATAL("Missing ref");
}
}
return id;
}
Heap* heap_;
Zone* zone_;
Snapshot::Kind kind_;
@ -593,6 +594,8 @@ class Deserializer : public ThreadStackResource {
DeserializationCluster* ReadCluster();
void ReadDispatchTable();
intptr_t next_index() const { return next_ref_index_; }
Heap* heap() const { return heap_; }
Snapshot::Kind kind() const { return kind_; }

View file

@ -2,13 +2,14 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#if defined(DART_PRECOMPILER) && !defined(DART_PRECOMPILED_RUNTIME)
#if defined(DART_PRECOMPILER)
#include "vm/compiler/aot/dispatch_table_generator.h"
#include <memory>
#include "vm/compiler/frontend/kernel_translation_helper.h"
#include "vm/dispatch_table.h"
#include "vm/stub_code.h"
#include "vm/thread.h"
@ -94,7 +95,10 @@ class CidInterval {
class SelectorRow {
public:
SelectorRow(Zone* zone, TableSelector* selector)
: selector_(selector), class_ranges_(zone, 0), ranges_(zone, 0) {}
: selector_(selector),
class_ranges_(zone, 0),
ranges_(zone, 0),
code_(Code::Handle(zone)) {}
TableSelector* selector() const { return selector_; }
@ -123,7 +127,7 @@ class SelectorRow {
selector_->offset = offset;
}
void FillTable(ClassTable* class_table, DispatchTable* table);
void FillTable(ClassTable* class_table, const Array& entries);
private:
TableSelector* selector_;
@ -131,6 +135,7 @@ class SelectorRow {
GrowableArray<CidInterval> class_ranges_;
GrowableArray<Interval> ranges_;
Code& code_;
};
class RowFitter {
@ -231,12 +236,10 @@ bool SelectorRow::Finalize() {
return true;
}
void SelectorRow::FillTable(ClassTable* class_table, DispatchTable* table) {
void SelectorRow::FillTable(ClassTable* class_table, const Array& entries) {
// Define the entries in the table by going top-down, which means more
// specific ones will override more general ones.
Code& code = Code::Handle();
// Sort by depth.
struct IntervalSorter {
static int Compare(const CidInterval* a, const CidInterval* b) {
@ -252,9 +255,9 @@ void SelectorRow::FillTable(ClassTable* class_table, DispatchTable* table) {
const Interval& range = cid_range.range();
const Function* function = cid_range.function();
if (function != nullptr && function->HasCode()) {
code = function->CurrentCode();
code_ = function->CurrentCode();
for (classid_t cid = range.begin(); cid < range.end(); cid++) {
table->SetCodeAt(selector()->offset + cid, code);
entries.SetAt(selector()->offset + cid, code_);
}
}
}
@ -634,17 +637,16 @@ void DispatchTableGenerator::ComputeSelectorOffsets() {
table_size_ = fitter.TableSize();
}
DispatchTable* DispatchTableGenerator::BuildTable() {
// Allocate the dispatch table and fill it in.
DispatchTable* dispatch_table = new DispatchTable(table_size_);
RawArray* DispatchTableGenerator::BuildCodeArray() {
auto& entries = Array::Handle(zone_, Array::New(table_size_, Heap::kOld));
for (intptr_t i = 0; i < table_rows_.length(); i++) {
table_rows_[i]->FillTable(classes_, dispatch_table);
table_rows_[i]->FillTable(classes_, entries);
}
return dispatch_table;
entries.MakeImmutable();
return entries.raw();
}
} // namespace compiler
} // namespace dart
#endif // defined(DART_PRECOMPILER) && !defined(DART_PRECOMPILED_RUNTIME)
#endif // defined(DART_PRECOMPILER)

View file

@ -6,9 +6,10 @@
#define RUNTIME_VM_COMPILER_AOT_DISPATCH_TABLE_GENERATOR_H_
#include "vm/compiler/frontend/kernel_translation_helper.h"
#include "vm/dispatch_table.h"
#include "vm/object.h"
#if !defined(DART_PRECOMPILED_RUNTIME)
namespace dart {
class ClassTable;
@ -84,8 +85,9 @@ class DispatchTableGenerator {
// Find suitable selectors and compute offsets for them.
void Initialize(ClassTable* table);
// Build up the table.
DispatchTable* BuildTable();
// Build up an array of Code objects, used to serialize the information
// deserialized as a DispatchTable at runtime.
RawArray* BuildCodeArray();
private:
void ReadTableSelectorInfo();
@ -93,7 +95,7 @@ class DispatchTableGenerator {
void SetupSelectorRows();
void ComputeSelectorOffsets();
Zone* zone_;
Zone* const zone_;
ClassTable* classes_;
int32_t num_selectors_;
int32_t num_classes_;
@ -107,4 +109,6 @@ class DispatchTableGenerator {
} // namespace compiler
} // namespace dart
#endif // !defined(DART_PRECOMPILED_RUNTIME)
#endif // RUNTIME_VM_COMPILER_AOT_DISPATCH_TABLE_GENERATOR_H_

View file

@ -402,6 +402,19 @@ void Precompiler::DoCompileAll() {
}
TraceForRetainedFunctions();
if (FLAG_use_bare_instructions && FLAG_use_table_dispatch) {
// Build the entries used to serialize the dispatch table before
// dropping functions, as we may clear references to Code objects.
const auto& entries =
Array::Handle(Z, dispatch_table_generator_->BuildCodeArray());
I->object_store()->set_dispatch_table_code_entries(entries);
// Delete the dispatch table generator to ensure there's no attempt
// to add new entries after this point.
delete dispatch_table_generator_;
dispatch_table_generator_ = nullptr;
}
DropFunctions();
DropFields();
TraceTypesFromRetainedClasses();
@ -441,14 +454,6 @@ void Precompiler::DoCompileAll() {
ProgramVisitor::Dedup();
if (FLAG_use_bare_instructions && FLAG_use_table_dispatch) {
I->set_dispatch_table(dispatch_table_generator_->BuildTable());
// Delete the dispatch table generator while the current zone
// is still alive.
delete dispatch_table_generator_;
dispatch_table_generator_ = nullptr;
}
zone_ = NULL;
}

View file

@ -280,9 +280,13 @@ static_assert((1 << kWordSizeLog2) == kWordSize,
using ObjectAlignment = dart::ObjectAlignment<kWordSize, kWordSizeLog2>;
const intptr_t kSmiBits = kBitsPerWord - 2;
const intptr_t kSmiMax = (static_cast<intptr_t>(1) << kSmiBits) - 1;
const intptr_t kSmiMin = -(static_cast<intptr_t>(1) << kSmiBits);
constexpr word kWordMax = (static_cast<uword>(1) << (kBitsPerWord - 1)) - 1;
constexpr word kWordMin = -(static_cast<uword>(1) << (kBitsPerWord - 1));
constexpr uword kUwordMax = static_cast<word>(-1);
constexpr int kSmiBits = kBitsPerWord - 2;
constexpr word kSmiMax = (static_cast<uword>(1) << kSmiBits) - 1;
constexpr word kSmiMin = -(static_cast<uword>(1) << kSmiBits);
// Information about heap pages.
extern const word kPageSize;

View file

@ -4,159 +4,46 @@
#include "vm/dispatch_table.h"
#include "vm/clustered_snapshot.h"
#include "vm/hash_map.h"
#include "vm/object.h"
#include "vm/object_store.h"
#include "platform/assert.h"
namespace dart {
// The serialized format of the dispatch table is a sequence of variable-length
// integers (using the built-in variable-length integer encoding/decoding of
// the stream). Each encoded integer e is interpreted thus:
// -kRecentCount .. -1 Pick value from the recent values buffer at index -1-e.
// 0 Empty (unused) entry.
// 1 .. kMaxRepeat Repeat previous entry e times.
// kIndexBase or higher Pick entry point from the code object at index
// e-kIndexBase in the code array and also put it into
// the recent values buffer at the next index round-robin.
// Constants for serialization format. Chosen such that repeats and recent
// values are encoded as single bytes.
static const intptr_t kMaxRepeat = 63;
static const intptr_t kRecentCount = 64; // Must be a power of two.
static const intptr_t kRecentMask = kRecentCount - 1;
static const intptr_t kIndexBase = kMaxRepeat + 1;
uword DispatchTable::EntryPointFor(const Code& code) {
return code.EntryPoint();
intptr_t DispatchTable::OriginElement() {
#if defined(TARGET_ARCH_X64)
// Max negative byte offset / 8
return 16;
#elif defined(TARGET_ARCH_ARM)
// Max negative load offset / 4
return 1023;
#elif defined(TARGET_ARCH_ARM64)
// Max consecutive sub immediate value
return 4096;
#else
// No AOT on IA32
UNREACHABLE();
return 0;
#endif
}
void DispatchTable::SetCodeAt(intptr_t index, const Code& code) {
ASSERT(index >= 0 && index < length());
// The table is built with the same representation as it has at runtime, that
// is, table entries are function entry points. This representation assumes
// that the code will not move between table building and serialization.
// This property is upheld by the fact that the GC does not move code around.
array_[index] = EntryPointFor(code);
intptr_t DispatchTable::LargestSmallOffset() {
#if defined(TARGET_ARCH_X64)
// Origin + Max positive byte offset / 8
return 31;
#elif defined(TARGET_ARCH_ARM)
// Origin + Max positive load offset / 4
return 2046;
#elif defined(TARGET_ARCH_ARM64)
// Origin + Max consecutive add immediate value
return 8192;
#else
// No AOT on IA32
UNREACHABLE();
return 0;
#endif
}
intptr_t DispatchTable::Serialize(Serializer* serializer,
const DispatchTable* table,
const GrowableArray<RawCode*>& code_objects) {
const intptr_t bytes_before = serializer->bytes_written();
if (table != nullptr) {
table->Serialize(serializer, code_objects);
} else {
serializer->Write<uint32_t>(0);
}
return serializer->bytes_written() - bytes_before;
}
void DispatchTable::Serialize(
Serializer* serializer,
const GrowableArray<RawCode*>& code_objects) const {
Code& code = Code::Handle();
IntMap<intptr_t> entry_to_index;
for (intptr_t i = 0; i < code_objects.length(); i++) {
code = code_objects[i];
const uword entry = EntryPointFor(code);
if (!entry_to_index.HasKey(entry)) {
entry_to_index.Insert(entry, i + 1);
}
}
uword prev_entry = 0;
uword recent[kRecentCount] = {0};
intptr_t recent_index = 0;
intptr_t repeat_count = 0;
serializer->Write<uint32_t>(length());
for (intptr_t i = 0; i < length(); i++) {
const uword entry = array_[i];
if (entry == prev_entry) {
if (++repeat_count == kMaxRepeat) {
serializer->Write<uint32_t>(kMaxRepeat);
repeat_count = 0;
}
} else {
if (repeat_count > 0) {
serializer->Write<uint32_t>(repeat_count);
repeat_count = 0;
}
if (entry == 0) {
serializer->Write<uint32_t>(0);
} else {
bool found_recent = false;
for (intptr_t r = 0; r < kRecentCount; r++) {
if (recent[r] == entry) {
serializer->Write<uint32_t>(~r);
found_recent = true;
break;
}
}
if (!found_recent) {
intptr_t index = entry_to_index.Lookup(entry) - 1;
ASSERT(index != -1);
ASSERT(EntryPointFor(Code::Handle(code_objects[index])) == entry);
serializer->Write<uint32_t>(kIndexBase + index);
recent[recent_index] = entry;
recent_index = (recent_index + 1) & kRecentMask;
}
}
}
prev_entry = entry;
}
if (repeat_count > 0) {
serializer->Write<uint32_t>(repeat_count);
}
}
DispatchTable* DispatchTable::Deserialize(Deserializer* deserializer,
const Array& code_array) {
const intptr_t length = deserializer->Read<uint32_t>();
if (length == 0) {
return nullptr;
}
DispatchTable* table = new DispatchTable(length);
Code& code = Code::Handle();
code =
deserializer->isolate()->object_store()->dispatch_table_null_error_stub();
uword null_entry = code.EntryPoint();
uword value = 0;
uword recent[kRecentCount] = {0};
intptr_t recent_index = 0;
intptr_t repeat_count = 0;
for (intptr_t i = 0; i < length; i++) {
if (repeat_count > 0) {
repeat_count--;
} else {
int32_t encoded = deserializer->Read<uint32_t>();
if (encoded == 0) {
value = null_entry;
} else if (encoded < 0) {
intptr_t r = ~encoded;
ASSERT(r < kRecentCount);
value = recent[r];
} else if (encoded <= kMaxRepeat) {
repeat_count = encoded - 1;
} else {
intptr_t index = encoded - kIndexBase;
code ^= code_array.At(index);
value = EntryPointFor(code);
recent[recent_index] = value;
recent_index = (recent_index + 1) & kRecentMask;
}
}
table->array_[i] = value;
}
ASSERT(repeat_count == 0);
return table;
const uword* DispatchTable::ArrayOrigin() const {
return &array_.get()[OriginElement()];
}
} // namespace dart

View file

@ -8,87 +8,30 @@
#include <memory>
#include "vm/globals.h"
#include "vm/growable_array.h"
namespace dart {
class Array;
class Code;
class Deserializer;
class RawCode;
class Serializer;
namespace compiler {
class DispatchTableGenerator;
}
class DispatchTable {
public:
explicit DispatchTable(intptr_t length)
: length_(length), array_(new uword[length]()) {}
intptr_t length() const { return length_; }
uword* array() const { return array_.get(); }
// The element of the dispatch table array to which the dispatch table
// register points.
static intptr_t OriginElement() {
#if defined(TARGET_ARCH_X64)
// Max negative byte offset / 8
return 16;
#elif defined(TARGET_ARCH_ARM)
// Max negative load offset / 4
return 1023;
#elif defined(TARGET_ARCH_ARM64)
// Max consecutive sub immediate value
return 4096;
#else
// No AOT on IA32
UNREACHABLE();
return 0;
#endif
}
// The largest offset that can use a more compact instruction sequence.
static intptr_t LargestSmallOffset() {
#if defined(TARGET_ARCH_X64)
// Origin + Max positive byte offset / 8
return 31;
#elif defined(TARGET_ARCH_ARM)
// Origin + Max positive load offset / 4
return 2046;
#elif defined(TARGET_ARCH_ARM64)
// Origin + Max consecutive add immediate value
return 8192;
#else
// No AOT on IA32
UNREACHABLE();
return 0;
#endif
}
static intptr_t OriginElement();
static intptr_t LargestSmallOffset();
// Dispatch table array pointer to put into the dispatch table register.
uword* ArrayOrigin() const { return &array()[OriginElement()]; }
void SetCodeAt(intptr_t index, const Code& code);
static intptr_t Serialize(Serializer* serializer,
const DispatchTable* table,
const GrowableArray<RawCode*>& code_objects);
static DispatchTable* Deserialize(Deserializer* deserializer,
const Array& code_array);
const uword* ArrayOrigin() const;
private:
friend class compiler::DispatchTableGenerator;
void Serialize(Serializer* serializer,
const GrowableArray<RawCode*>& code_objects) const;
static uword EntryPointFor(const Code& code);
uword* array() { return array_.get(); }
intptr_t length_;
std::unique_ptr<uword[]> array_;
friend class Deserializer; // For non-const array().
DISALLOW_COPY_AND_ASSIGN(DispatchTable);
};

View file

@ -309,7 +309,7 @@ void Dwarf::WriteCompilationUnit() {
uint8_t* buffer = nullptr;
WriteStream stream(&buffer, ZoneReallocate, 64 * KB);
AssemblyCodeNamer namer(zone_);
SnapshotTextObjectNamer namer(zone_);
if (asm_stream_ != nullptr) {
#if defined(TARGET_OS_MACOS) || defined(TARGET_OS_MACOS_IOS)
@ -375,7 +375,7 @@ void Dwarf::WriteCompilationUnit() {
intptr_t last_code_index = codes_.length() - 1;
const Code& last_code = *(codes_[last_code_index]);
PrintNamedAddressWithOffset(
namer.AssemblyNameFor(last_code_index, last_code), last_code.Size());
namer.SnapshotNameFor(last_code_index, last_code), last_code.Size());
}
if (elf_ != nullptr) {
addr(elf_->NextMemoryOffset());
@ -438,7 +438,7 @@ void Dwarf::WriteAbstractFunctions() {
void Dwarf::WriteConcreteFunctions() {
Function& function = Function::Handle(zone_);
Script& script = Script::Handle(zone_);
AssemblyCodeNamer namer(zone_);
SnapshotTextObjectNamer namer(zone_);
for (intptr_t i = 0; i < codes_.length(); i++) {
const Code& code = *(codes_[i]);
RELEASE_ASSERT(!code.IsNull());
@ -473,7 +473,7 @@ void Dwarf::WriteConcreteFunctions() {
// DW_AT_low_pc
if (asm_stream_ != nullptr) {
const char* asm_name = namer.AssemblyNameFor(i, code);
const char* asm_name = namer.SnapshotNameFor(i, code);
// DW_AT_low_pc
PrintNamedAddress(asm_name);
// DW_AT_high_pc
@ -581,7 +581,7 @@ void Dwarf::WriteInliningNode(InliningNode* node,
intptr_t root_code_index,
intptr_t root_code_address,
const Script& parent_script,
AssemblyCodeNamer* namer) {
SnapshotTextObjectNamer* namer) {
RELEASE_ASSERT(elf_ == nullptr || root_code_address >= 0);
intptr_t file = LookupScript(parent_script);
intptr_t line = node->call_pos.value();
@ -604,7 +604,7 @@ void Dwarf::WriteInliningNode(InliningNode* node,
if (asm_stream_ != nullptr) {
const char* asm_name =
namer->AssemblyNameFor(root_code_index, *codes_[root_code_index]);
namer->SnapshotNameFor(root_code_index, *codes_[root_code_index]);
// DW_AT_low_pc
PrintNamedAddressWithOffset(asm_name, node->start_pc_offset);
// DW_AT_high_pc
@ -739,14 +739,14 @@ void Dwarf::WriteLines() {
Array& functions = Array::Handle(zone_);
GrowableArray<const Function*> function_stack(zone_, 8);
GrowableArray<TokenPosition> token_positions(zone_, 8);
AssemblyCodeNamer namer(zone_);
SnapshotTextObjectNamer namer(zone_);
for (intptr_t i = 0; i < codes_.length(); i++) {
const Code& code = *(codes_[i]);
const char* asm_name = nullptr;
if (asm_stream_ != nullptr) {
asm_name = namer.AssemblyNameFor(i, code);
asm_name = namer.SnapshotNameFor(i, code);
}
intptr_t current_code_address = -1;
@ -825,7 +825,7 @@ void Dwarf::WriteLines() {
} else {
u1(DW_LNS_advance_pc);
if (asm_stream_ != nullptr) {
const char* previous_asm_name = namer.AssemblyNameFor(
const char* previous_asm_name = namer.SnapshotNameFor(
previous_code_index, *codes_[previous_code_index]);
Print(".uleb128 %s - %s + %" Pd "\n", asm_name, previous_asm_name,
current_pc_offset - previous_pc_offset);
@ -876,9 +876,9 @@ void Dwarf::WriteLines() {
u1(DW_LNS_advance_pc);
if (asm_stream_ != nullptr) {
const char* last_asm_name =
namer.AssemblyNameFor(last_code_index, last_code);
namer.SnapshotNameFor(last_code_index, last_code);
ASSERT(previous_code_index >= 0);
const char* previous_asm_name = namer.AssemblyNameFor(
const char* previous_asm_name = namer.SnapshotNameFor(
previous_code_index, *codes_[previous_code_index]);
Print(".uleb128 %s - %s + %" Pd "\n", last_asm_name, previous_asm_name,
last_code.Size() - previous_pc_offset);

View file

@ -16,7 +16,7 @@ namespace dart {
class Elf;
class InliningNode;
class AssemblyCodeNamer;
class SnapshotTextObjectNamer;
struct ScriptIndexPair {
// Typedefs needed for the DirectChainedHashMap template.
@ -406,7 +406,7 @@ class Dwarf : public ZoneAllocated {
intptr_t root_code_index,
intptr_t root_code_offset,
const Script& parent_script,
AssemblyCodeNamer* namer);
SnapshotTextObjectNamer* namer);
void WriteLines();
const char* Deobfuscate(const char* cstr);

View file

@ -78,12 +78,16 @@ bool ObjectOffsetTrait::IsKeyEqual(Pair pair, Key key) {
}
#if !defined(DART_PRECOMPILED_RUNTIME)
ImageWriter::ImageWriter(Heap* heap)
: heap_(heap),
ImageWriter::ImageWriter(Thread* t)
: heap_(t->heap()),
next_data_offset_(0),
next_text_offset_(0),
objects_(),
instructions_() {
instructions_(),
instructions_section_type_(
TagObjectTypeAsReadOnly(t->zone(), "InstructionsSection")),
instructions_type_(TagObjectTypeAsReadOnly(t->zone(), "Instructions")),
trampoline_type_(TagObjectTypeAsReadOnly(t->zone(), "Trampoline")) {
ResetOffsets();
}
@ -243,6 +247,42 @@ uint32_t ImageWriter::GetDataOffsetFor(RawObject* raw_object) {
return offset;
}
intptr_t ImageWriter::GetTextObjectCount() const {
return instructions_.length();
}
void ImageWriter::GetTrampolineInfo(intptr_t* count, intptr_t* size) const {
ASSERT(count != nullptr && size != nullptr);
*count = 0;
*size = 0;
for (auto const data : instructions_) {
if (data.trampoline_length != 0) {
*count += 1;
*size += data.trampoline_length;
}
}
}
// Returns nullptr if there is no profile writer.
const char* ImageWriter::ObjectTypeForProfile(const Object& object) const {
if (profile_writer_ == nullptr) return nullptr;
ASSERT(IsROSpace());
Thread* thread = Thread::Current();
REUSABLE_CLASS_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
Class& klass = thread->ClassHandle();
String& name = thread->StringHandle();
klass = object.clazz();
name = klass.UserVisibleName();
auto const name_str = name.ToCString();
return TagObjectTypeAsReadOnly(thread->zone(), name_str);
}
const char* ImageWriter::TagObjectTypeAsReadOnly(Zone* zone, const char* type) {
ASSERT(zone != nullptr && type != nullptr);
return OS::SCreate(zone, "(RO) %s", type);
}
#if defined(DART_PRECOMPILER)
void ImageWriter::DumpInstructionStats() {
std::unique_ptr<CombinedCodeStatistics> instruction_stats(
@ -461,7 +501,7 @@ AssemblyImageWriter::AssemblyImageWriter(Thread* thread,
void* callback_data,
bool strip,
Elf* debug_elf)
: ImageWriter(thread->heap()),
: ImageWriter(thread),
assembly_stream_(512 * KB, callback, callback_data),
assembly_dwarf_(nullptr),
debug_dwarf_(nullptr) {
@ -537,37 +577,47 @@ static const char* NameOfStubIsolateSpecificStub(ObjectStore* object_store,
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
const char* AssemblyCodeNamer::AssemblyNameFor(intptr_t code_index,
const Code& code) {
const char* SnapshotTextObjectNamer::SnapshotNameFor(intptr_t code_index,
const Code& code) {
ASSERT(!code.IsNull());
const char* prefix = FLAG_precompiled_mode ? "Precompiled_" : "";
owner_ = code.owner();
if (owner_.IsNull()) {
insns_ = code.instructions();
const char* name = StubCode::NameOfStub(insns_.EntryPoint());
if (name != nullptr) {
return OS::SCreate(zone_, "Precompiled_Stub_%s", name);
return OS::SCreate(zone_, "%sStub_%s", prefix, name);
}
name = NameOfStubIsolateSpecificStub(store_, code);
ASSERT(name != nullptr);
return OS::SCreate(zone_, "Precompiled__%s", name);
return OS::SCreate(zone_, "%s_%s", prefix, name);
} else if (owner_.IsClass()) {
string_ = Class::Cast(owner_).Name();
const char* name = string_.ToCString();
EnsureAssemblerIdentifier(const_cast<char*>(name));
return OS::SCreate(zone_, "Precompiled_AllocationStub_%s_%" Pd, name,
return OS::SCreate(zone_, "%sAllocationStub_%s_%" Pd, prefix, name,
code_index);
} else if (owner_.IsAbstractType()) {
const char* name = namer_.StubNameForType(AbstractType::Cast(owner_));
return OS::SCreate(zone_, "Precompiled_%s", name);
return OS::SCreate(zone_, "%s%s", prefix, name);
} else if (owner_.IsFunction()) {
const char* name = Function::Cast(owner_).ToQualifiedCString();
EnsureAssemblerIdentifier(const_cast<char*>(name));
return OS::SCreate(zone_, "Precompiled_%s_%" Pd, name, code_index);
return OS::SCreate(zone_, "%s%s_%" Pd, prefix, name, code_index);
} else {
UNREACHABLE();
}
}
const char* SnapshotTextObjectNamer::SnapshotNameFor(
intptr_t index,
const ImageWriter::InstructionsData& data) {
if (data.trampoline_bytes != nullptr) {
return OS::SCreate(zone_, "Trampoline_%" Pd "", index);
}
return SnapshotNameFor(index, *data.code_);
}
void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
#if defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
@ -614,6 +664,10 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
WriteWordLiteralText(0);
}
// Only valid if bare_instruction_payloads is true.
const V8SnapshotProfileWriter::ObjectId instructions_section_id(
offset_space_, bare_instruction_payloads ? Image::kHeaderSize : -1);
if (bare_instruction_payloads) {
const intptr_t section_size = image_size - Image::kHeaderSize;
// Add the RawInstructionsSection header.
@ -633,15 +687,14 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
WriteWordLiteralText(instructions_length);
if (profile_writer_ != nullptr) {
const intptr_t offset = Image::kHeaderSize;
const intptr_t non_instruction_bytes =
compiler::target::InstructionsSection::HeaderSize();
profile_writer_->SetObjectTypeAndName({offset_space_, offset},
"InstructionsSection",
/*name=*/nullptr);
profile_writer_->AttributeBytesTo({offset_space_, offset},
profile_writer_->SetObjectTypeAndName(instructions_section_id,
instructions_section_type_,
instructions_symbol);
profile_writer_->AttributeBytesTo(instructions_section_id,
non_instruction_bytes);
profile_writer_->AddRoot({offset_space_, offset});
profile_writer_->AddRoot(instructions_section_id);
}
}
@ -654,7 +707,7 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
FrameUnwindPrologue();
PcDescriptors& descriptors = PcDescriptors::Handle(zone);
AssemblyCodeNamer namer(zone);
SnapshotTextObjectNamer namer(zone);
intptr_t text_offset = 0;
ASSERT(offset_space_ != V8SnapshotProfileWriter::kSnapshot);
@ -663,28 +716,37 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const bool is_trampoline = data.trampoline_bytes != nullptr;
ASSERT((data.text_offset_ - instructions_[0].text_offset_) == text_offset);
if (bare_instruction_payloads && profile_writer_ != nullptr) {
const intptr_t instructions_sections_offset = Image::kHeaderSize;
const intptr_t offset = section_headers_size + text_offset;
profile_writer_->AttributeReferenceTo(
{offset_space_, instructions_sections_offset},
{{offset_space_, offset},
V8SnapshotProfileWriter::Reference::kElement,
text_offset});
intptr_t dwarf_index = i;
#if defined(DART_PRECOMPILER)
if (!is_trampoline && assembly_dwarf_ != nullptr) {
dwarf_index = assembly_dwarf_->AddCode(*data.code_);
}
#endif
const auto object_name = namer.SnapshotNameFor(dwarf_index, data);
if (profile_writer_ != nullptr) {
const auto object_offset = section_headers_size + text_offset;
const V8SnapshotProfileWriter::ObjectId object_id(offset_space_,
object_offset);
auto const type = is_trampoline ? trampoline_type_ : instructions_type_;
const intptr_t size = is_trampoline ? data.trampoline_length
: SizeInSnapshot(data.insns_->raw());
profile_writer_->SetObjectTypeAndName(object_id, type, object_name);
profile_writer_->AttributeBytesTo(object_id, size);
// If the object is wrapped in an InstructionSection, then add an
// element reference.
if (bare_instruction_payloads) {
profile_writer_->AttributeReferenceTo(
instructions_section_id,
{object_id, V8SnapshotProfileWriter::Reference::kElement,
text_offset});
}
}
if (is_trampoline) {
if (profile_writer_ != nullptr) {
const intptr_t offset = section_headers_size + text_offset;
profile_writer_->SetObjectTypeAndName({offset_space_, offset},
"Trampolines",
/*name=*/nullptr);
profile_writer_->AttributeBytesTo({offset_space_, offset},
data.trampline_length);
}
const auto start = reinterpret_cast<uword>(data.trampoline_bytes);
const auto end = start + data.trampline_length;
const auto end = start + data.trampoline_length;
text_offset += WriteByteSequence(start, end);
delete[] data.trampoline_bytes;
data.trampoline_bytes = nullptr;
@ -693,18 +755,9 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const intptr_t instr_start = text_offset;
const Instructions& insns = *data.insns_;
const Code& code = *data.code_;
descriptors = data.code_->pc_descriptors();
if (profile_writer_ != nullptr) {
const intptr_t offset = section_headers_size + text_offset;
profile_writer_->SetObjectTypeAndName({offset_space_, offset},
"Instructions",
/*name=*/nullptr);
profile_writer_->AttributeBytesTo({offset_space_, offset},
SizeInSnapshot(insns.raw()));
}
const auto& code = *data.code_;
const auto& insns = *data.insns_;
descriptors = code.pc_descriptors();
const uword payload_start = insns.PayloadStart();
@ -746,23 +799,15 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
compiler::target::Instructions::HeaderSize());
}
intptr_t dwarf_index = i;
#if defined(DART_PRECOMPILER)
// Create a label for use by DWARF.
if (assembly_dwarf_ != nullptr) {
dwarf_index = assembly_dwarf_->AddCode(code);
}
#endif
auto const assembly_name = namer.AssemblyNameFor(dwarf_index, code);
#if defined(DART_PRECOMPILER)
if (debug_dwarf_ != nullptr) {
auto const payload_offset = section_headers_size + text_offset;
debug_dwarf_->AddCode(code, assembly_name, payload_offset);
debug_dwarf_->AddCode(code, object_name, payload_offset);
}
#endif
// 2. Write a label at the entry point.
// Linux's perf uses these labels.
assembly_stream_.Print("%s:\n", assembly_name);
assembly_stream_.Print("%s:\n", object_name);
{
// 3. Write from the payload start to payload end. For AOT snapshots
@ -1010,7 +1055,7 @@ BlobImageWriter::BlobImageWriter(Thread* thread,
intptr_t bss_base,
Elf* elf,
Dwarf* elf_dwarf)
: ImageWriter(thread->heap()),
: ImageWriter(thread),
instructions_blob_stream_(instructions_blob_buffer, alloc, initial_size),
elf_(elf),
elf_dwarf_(elf_dwarf),
@ -1035,8 +1080,11 @@ intptr_t BlobImageWriter::WriteByteSequence(uword start, uword end) {
void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const bool bare_instruction_payloads =
FLAG_precompiled_mode && FLAG_use_bare_instructions;
const char* instructions_symbol =
vm ? "_kDartVmSnapshotInstructions" : "_kDartIsolateSnapshotInstructions";
auto const zone = Thread::Current()->zone();
#ifdef DART_PRECOMPILER
#if defined(DART_PRECOMPILER)
intptr_t segment_base = 0;
if (elf_ != nullptr) {
segment_base = elf_->NextMemoryOffset();
@ -1064,6 +1112,10 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
instructions_blob_stream_.WriteTargetWord(0);
}
// Only valid when bare_instructions_payloads is true.
const V8SnapshotProfileWriter::ObjectId instructions_section_id(
offset_space_, bare_instruction_payloads ? Image::kHeaderSize : -1);
if (bare_instruction_payloads) {
const intptr_t section_size = image_size - Image::kHeaderSize;
// Add the RawInstructionsSection header.
@ -1082,24 +1134,23 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
instructions_blob_stream_.WriteTargetWord(instructions_length);
if (profile_writer_ != nullptr) {
const intptr_t offset = Image::kHeaderSize;
const intptr_t non_instruction_bytes =
compiler::target::InstructionsSection::HeaderSize();
profile_writer_->SetObjectTypeAndName({offset_space_, offset},
"InstructionsSection",
/*name=*/nullptr);
profile_writer_->AttributeBytesTo({offset_space_, offset},
profile_writer_->SetObjectTypeAndName(instructions_section_id,
instructions_section_type_,
instructions_symbol);
profile_writer_->AttributeBytesTo(instructions_section_id,
non_instruction_bytes);
profile_writer_->AddRoot({offset_space_, offset});
profile_writer_->AddRoot(instructions_section_id);
}
}
intptr_t text_offset = 0;
#if defined(DART_PRECOMPILER)
PcDescriptors& descriptors = PcDescriptors::Handle();
AssemblyCodeNamer namer(Thread::Current()->zone());
auto& descriptors = PcDescriptors::Handle(zone);
#endif
SnapshotTextObjectNamer namer(zone);
NoSafepointScope no_safepoint;
for (intptr_t i = 0; i < instructions_.length(); i++) {
@ -1107,18 +1158,29 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const bool is_trampoline = data.trampoline_bytes != nullptr;
ASSERT((data.text_offset_ - instructions_[0].text_offset_) == text_offset);
if (bare_instruction_payloads && profile_writer_ != nullptr) {
const intptr_t instructions_sections_offset = Image::kHeaderSize;
profile_writer_->AttributeReferenceTo(
{offset_space_, instructions_sections_offset},
{{offset_space_, instructions_blob_stream_.Position()},
V8SnapshotProfileWriter::Reference::kElement,
text_offset});
const auto object_name = namer.SnapshotNameFor(i, data);
if (profile_writer_ != nullptr) {
const V8SnapshotProfileWriter::ObjectId object_id(
offset_space_, instructions_blob_stream_.Position());
auto const type = is_trampoline ? trampoline_type_ : instructions_type_;
const intptr_t size = is_trampoline ? data.trampoline_length
: SizeInSnapshot(data.insns_->raw());
profile_writer_->SetObjectTypeAndName(object_id, type, object_name);
profile_writer_->AttributeBytesTo(object_id, size);
// If the object is wrapped in an InstructionSection, then add an
// element reference.
if (bare_instruction_payloads) {
profile_writer_->AttributeReferenceTo(
instructions_section_id,
{object_id, V8SnapshotProfileWriter::Reference::kElement,
text_offset});
}
}
if (is_trampoline) {
const auto start = reinterpret_cast<uword>(data.trampoline_bytes);
const auto end = start + data.trampline_length;
const auto end = start + data.trampoline_length;
text_offset += WriteByteSequence(start, end);
delete[] data.trampoline_bytes;
data.trampoline_bytes = nullptr;
@ -1127,8 +1189,7 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const intptr_t instr_start = text_offset;
const Instructions& insns = *instructions_[i].insns_;
AutoTraceImage(insns, 0, &this->instructions_blob_stream_);
const auto& insns = *data.insns_;
const uword payload_start = insns.PayloadStart();
ASSERT(Utils::IsAligned(payload_start, sizeof(compiler::target::uword)));
@ -1191,13 +1252,12 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
#endif
#if defined(DART_PRECOMPILER)
const auto& code = *instructions_[i].code_;
auto const assembly_name = namer.AssemblyNameFor(i, code);
const auto& code = *data.code_;
if (elf_dwarf_ != nullptr) {
elf_dwarf_->AddCode(code, assembly_name, payload_offset);
elf_dwarf_->AddCode(code, object_name, payload_offset);
}
if (debug_dwarf_ != nullptr) {
debug_dwarf_->AddCode(code, assembly_name, payload_offset);
debug_dwarf_->AddCode(code, object_name, payload_offset);
}
// Don't patch the relocation if we're not generating ELF. The regular blobs
@ -1208,7 +1268,7 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const intptr_t current_stream_position =
instructions_blob_stream_.Position();
descriptors = data.code_->pc_descriptors();
descriptors = code.pc_descriptors();
PcDescriptors::Iterator iterator(
descriptors, /*kind_mask=*/RawPcDescriptors::kBSSRelocation);
@ -1252,8 +1312,6 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
ASSERT_EQUAL(instructions_blob_stream_.bytes_written(), image_size);
#ifdef DART_PRECOMPILER
const char* instructions_symbol =
vm ? "_kDartVmSnapshotInstructions" : "_kDartIsolateSnapshotInstructions";
if (elf_ != nullptr) {
auto const segment_base2 =
elf_->AddText(instructions_symbol, instructions_blob_stream_.buffer(),

View file

@ -149,7 +149,7 @@ struct ImageWriterCommand {
class ImageWriter : public ValueObject {
public:
explicit ImageWriter(Heap* heap);
explicit ImageWriter(Thread* thread);
virtual ~ImageWriter() {}
void ResetOffsets() {
@ -166,12 +166,20 @@ class ImageWriter : public ValueObject {
// [ImageWriterCommand]s.
void PrepareForSerialization(GrowableArray<ImageWriterCommand>* commands);
bool IsROSpace() const {
return offset_space_ == V8SnapshotProfileWriter::kVmData ||
offset_space_ == V8SnapshotProfileWriter::kVmText ||
offset_space_ == V8SnapshotProfileWriter::kIsolateData ||
offset_space_ == V8SnapshotProfileWriter::kIsolateText;
}
int32_t GetTextOffsetFor(RawInstructions* instructions, RawCode* code);
uint32_t GetDataOffsetFor(RawObject* raw_object);
void Write(WriteStream* clustered_stream, bool vm);
intptr_t data_size() const { return next_data_offset_; }
intptr_t text_size() const { return next_text_offset_; }
intptr_t GetTextObjectCount() const;
void GetTrampolineInfo(intptr_t* count, intptr_t* size) const;
void DumpStatistics();
@ -215,6 +223,10 @@ class ImageWriter : public ValueObject {
marked_tags);
}
// Returns nullptr if there is no profile writer.
const char* ObjectTypeForProfile(const Object& object) const;
static const char* TagObjectTypeAsReadOnly(Zone* zone, const char* type);
protected:
void WriteROData(WriteStream* stream);
virtual void WriteText(WriteStream* clustered_stream, bool vm) = 0;
@ -230,16 +242,16 @@ class ImageWriter : public ValueObject {
raw_code_(code),
text_offset_(text_offset),
trampoline_bytes(nullptr),
trampline_length(0) {}
trampoline_length(0) {}
InstructionsData(uint8_t* trampoline_bytes,
intptr_t trampline_length,
intptr_t trampoline_length,
intptr_t text_offset)
: raw_insns_(nullptr),
raw_code_(nullptr),
text_offset_(text_offset),
trampoline_bytes(trampoline_bytes),
trampline_length(trampline_length) {}
trampoline_length(trampoline_length) {}
union {
RawInstructions* raw_insns_;
@ -252,7 +264,7 @@ class ImageWriter : public ValueObject {
intptr_t text_offset_;
uint8_t* trampoline_bytes;
intptr_t trampline_length;
intptr_t trampoline_length;
};
struct ObjectData {
@ -273,9 +285,13 @@ class ImageWriter : public ValueObject {
V8SnapshotProfileWriter::IdSpace offset_space_ =
V8SnapshotProfileWriter::kSnapshot;
V8SnapshotProfileWriter* profile_writer_ = nullptr;
const char* const instructions_section_type_;
const char* const instructions_type_;
const char* const trampoline_type_;
template <class T>
friend class TraceImageObjectScope;
friend class SnapshotTextObjectNamer; // For InstructionsData.
private:
DISALLOW_COPY_AND_ASSIGN(ImageWriter);
@ -293,43 +309,34 @@ class TraceImageObjectScope {
intptr_t section_offset,
const T* stream,
const Object& object)
: writer_(writer),
stream_(stream),
: writer_(ASSERT_NOTNULL(writer)),
stream_(ASSERT_NOTNULL(stream)),
section_offset_(section_offset),
start_offset_(stream_->Position() - section_offset) {
if (writer_->profile_writer_ != nullptr) {
Thread* thread = Thread::Current();
REUSABLE_CLASS_HANDLESCOPE(thread);
REUSABLE_STRING_HANDLESCOPE(thread);
Class& klass = thread->ClassHandle();
String& name = thread->StringHandle();
klass = object.clazz();
name = klass.UserVisibleName();
ASSERT(writer_->offset_space_ != V8SnapshotProfileWriter::kSnapshot);
writer_->profile_writer_->SetObjectTypeAndName(
{writer_->offset_space_, start_offset_}, name.ToCString(), nullptr);
}
}
start_offset_(stream_->Position() - section_offset),
object_(object) {}
~TraceImageObjectScope() {
if (writer_->profile_writer_ != nullptr) {
ASSERT(writer_->offset_space_ != V8SnapshotProfileWriter::kSnapshot);
writer_->profile_writer_->AttributeBytesTo(
{writer_->offset_space_, start_offset_},
stream_->Position() - section_offset_ - start_offset_);
}
if (writer_->profile_writer_ == nullptr) return;
ASSERT(writer_->IsROSpace());
writer_->profile_writer_->SetObjectTypeAndName(
{writer_->offset_space_, start_offset_},
writer_->ObjectTypeForProfile(object_), nullptr);
writer_->profile_writer_->AttributeBytesTo(
{writer_->offset_space_, start_offset_},
stream_->Position() - section_offset_ - start_offset_);
}
private:
ImageWriter* writer_;
const T* stream_;
intptr_t section_offset_;
intptr_t start_offset_;
ImageWriter* const writer_;
const T* const stream_;
const intptr_t section_offset_;
const intptr_t start_offset_;
const Object& object_;
};
class AssemblyCodeNamer {
class SnapshotTextObjectNamer {
public:
explicit AssemblyCodeNamer(Zone* zone)
explicit SnapshotTextObjectNamer(Zone* zone)
: zone_(zone),
owner_(Object::Handle(zone)),
string_(String::Handle(zone)),
@ -338,7 +345,9 @@ class AssemblyCodeNamer {
const char* StubNameForType(const AbstractType& type) const;
const char* AssemblyNameFor(intptr_t code_index, const Code& code);
const char* SnapshotNameFor(intptr_t code_index, const Code& code);
const char* SnapshotNameFor(intptr_t index,
const ImageWriter::InstructionsData& data);
private:
Zone* const zone_;

View file

@ -186,6 +186,7 @@ class ObjectPointerVisitor;
RW(Code, array_write_barrier_stub) \
R_(Code, megamorphic_miss_code) \
R_(Function, megamorphic_miss_function) \
RW(Array, dispatch_table_code_entries) \
RW(Array, code_order_table) \
RW(Array, obfuscation_map) \
RW(Class, ffi_pointer_class) \

View file

@ -575,7 +575,9 @@ class Thread : public ThreadState {
}
const uword* dispatch_table_array() const { return dispatch_table_array_; }
void set_dispatch_table_array(uword* array) { dispatch_table_array_ = array; }
void set_dispatch_table_array(const uword* array) {
dispatch_table_array_ = array;
}
static bool CanLoadFromThread(const Object& object);
static intptr_t OffsetFromThread(const Object& object);
@ -881,7 +883,7 @@ class Thread : public ThreadState {
RelaxedAtomic<uword> stack_limit_;
uword write_barrier_mask_;
Isolate* isolate_;
uword* dispatch_table_array_;
const uword* dispatch_table_array_;
uword top_;
uword end_;
// Offsets up to this point can all fit in a byte on X64. All of the above