[vm] Clean up interfaces/encoding for CompressedStackMaps.

Change-Id: Ie897f44fc02e1e9ae37f2625f3cb0ac4df1595d6
Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-release-x64-try,vm-kernel-precomp-linux-release-simarm_x64-try,vm-kernel-precomp-mac-release-simarm64-try,vm-kernel-precomp-win-release-x64-try
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/135651
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Teagan Strickland 2020-02-14 17:57:47 +00:00 committed by commit-bot@chromium.org
parent 467e82b668
commit 6d04a0db25
10 changed files with 176 additions and 104 deletions

View file

@ -6,6 +6,8 @@
#include "vm/compiler/compiler_state.h"
#include "vm/log.h"
#include "vm/object_store.h"
#include "vm/zone_text_buffer.h"
namespace dart {
@ -85,6 +87,38 @@ RawCompressedStackMaps* CompressedStackMapsBuilder::Finalize() const {
return CompressedStackMaps::NewInlined(encoded_bytes_);
}
CompressedStackMapsIterator::CompressedStackMapsIterator(
const CompressedStackMaps& maps,
const CompressedStackMaps& global_table)
: maps_(maps),
bits_container_(maps_.UsesGlobalTable() ? global_table : maps_) {
ASSERT(!maps_.IsGlobalTable());
ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
}
CompressedStackMapsIterator::CompressedStackMapsIterator(
const CompressedStackMaps& maps)
: CompressedStackMapsIterator(
maps,
// Only look up the global table if the map will end up using it.
maps.UsesGlobalTable() ? CompressedStackMaps::Handle(
Thread::Current()
->isolate()
->object_store()
->canonicalized_stack_map_entries())
: Object::null_compressed_stack_maps()) {}
CompressedStackMapsIterator::CompressedStackMapsIterator(
const CompressedStackMapsIterator& it)
: maps_(it.maps_),
bits_container_(it.bits_container_),
next_offset_(it.next_offset_),
current_pc_offset_(it.current_pc_offset_),
current_global_table_offset_(it.current_global_table_offset_),
current_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
current_non_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
current_bits_offset_(it.current_bits_offset_) {}
// Decode unsigned integer in LEB128 format from the payload of |maps| and
// update |byte_index|.
uintptr_t CompressedStackMapsIterator::DecodeLEB128(
@ -187,6 +221,33 @@ void CompressedStackMapsIterator::LazyLoadGlobalTableEntry() {
current_bits_offset_ = offset;
}
const char* CompressedStackMapsIterator::ToCString(Zone* zone) const {
ZoneTextBuffer b(zone, 100);
CompressedStackMapsIterator it(*this);
// If we haven't loaded an entry yet, do so (but don't skip the current
// one if we have!)
if (!it.HasLoadedEntry()) {
if (!it.MoveNext()) return b.buffer();
}
bool first_entry = true;
do {
if (first_entry) {
first_entry = false;
} else {
b.AddString("\n");
}
b.Printf("0x%08x: ", it.pc_offset());
for (intptr_t i = 0, n = it.Length(); i < n; i++) {
b.AddString(it.IsObject(i) ? "1" : "0");
}
} while (it.MoveNext());
return b.buffer();
}
const char* CompressedStackMapsIterator::ToCString() const {
return ToCString(Thread::Current()->zone());
}
RawExceptionHandlers* ExceptionHandlerList::FinalizeExceptionHandlers(
uword entry_point) const {
intptr_t num_handlers = Length();

View file

@ -66,17 +66,12 @@ class CompressedStackMapsBuilder : public ZoneAllocated {
class CompressedStackMapsIterator : public ValueObject {
public:
// We use the null value to represent CompressedStackMaps with no
// entries, so the constructor allows them.
// entries, so any CompressedStackMaps arguments to constructors can be null.
CompressedStackMapsIterator(const CompressedStackMaps& maps,
const CompressedStackMaps& global_table)
: maps_(maps),
bits_container_(maps_.UsesGlobalTable() ? global_table : maps_) {
ASSERT(!maps_.IsGlobalTable());
ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
}
const CompressedStackMaps& global_table);
explicit CompressedStackMapsIterator(const CompressedStackMaps& maps);
explicit CompressedStackMapsIterator(const CompressedStackMaps& maps)
: CompressedStackMapsIterator(maps, CompressedStackMaps::Handle()) {}
explicit CompressedStackMapsIterator(const CompressedStackMapsIterator& it);
// Loads the next entry from [maps_], if any. If [maps_] is the null
// value, this always returns false.
@ -116,6 +111,9 @@ class CompressedStackMapsIterator : public ValueObject {
ASSERT(current_spill_slot_bit_count_ >= 0);
}
const char* ToCString(Zone* zone) const;
const char* ToCString() const;
private:
static uintptr_t DecodeLEB128(const CompressedStackMaps& data,
uintptr_t* byte_index);

View file

@ -750,6 +750,7 @@ class CodeSourceMap : public AllStatic {
class CompressedStackMaps : public AllStatic {
public:
static word HeaderSize();
static word InstanceSize();
static word NextFieldOffset();
};

View file

@ -389,6 +389,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 96;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 8;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
8;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
8;
static constexpr dart::compiler::target::word Context_InstanceSize = 12;
static constexpr dart::compiler::target::word Context_header_size = 12;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 12;
@ -852,6 +854,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 176;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
12;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
12;
static constexpr dart::compiler::target::word Context_InstanceSize = 24;
static constexpr dart::compiler::target::word Context_header_size = 24;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 16;
@ -1308,6 +1312,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 96;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 8;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
8;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
8;
static constexpr dart::compiler::target::word Context_InstanceSize = 12;
static constexpr dart::compiler::target::word Context_header_size = 12;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 12;
@ -1772,6 +1778,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 176;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
12;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
12;
static constexpr dart::compiler::target::word Context_InstanceSize = 24;
static constexpr dart::compiler::target::word Context_header_size = 24;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 16;
@ -2228,6 +2236,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 76;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 8;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
8;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
8;
static constexpr dart::compiler::target::word Context_InstanceSize = 12;
static constexpr dart::compiler::target::word Context_header_size = 12;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 12;
@ -2685,6 +2695,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 144;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
12;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
12;
static constexpr dart::compiler::target::word Context_InstanceSize = 24;
static constexpr dart::compiler::target::word Context_header_size = 24;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 16;
@ -3135,6 +3147,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 76;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 8;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
8;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
8;
static constexpr dart::compiler::target::word Context_InstanceSize = 12;
static constexpr dart::compiler::target::word Context_header_size = 12;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 12;
@ -3593,6 +3607,8 @@ static constexpr dart::compiler::target::word Code_InstanceSize = 144;
static constexpr dart::compiler::target::word CodeSourceMap_InstanceSize = 16;
static constexpr dart::compiler::target::word CompressedStackMaps_InstanceSize =
12;
static constexpr dart::compiler::target::word CompressedStackMaps_HeaderSize =
12;
static constexpr dart::compiler::target::word Context_InstanceSize = 24;
static constexpr dart::compiler::target::word Context_header_size = 24;
static constexpr dart::compiler::target::word ContextScope_InstanceSize = 16;
@ -4083,6 +4099,8 @@ static constexpr dart::compiler::target::word AOT_CodeSourceMap_InstanceSize =
8;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_InstanceSize = 8;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_HeaderSize = 8;
static constexpr dart::compiler::target::word AOT_Context_InstanceSize = 12;
static constexpr dart::compiler::target::word AOT_Context_header_size = 12;
static constexpr dart::compiler::target::word AOT_ContextScope_InstanceSize =
@ -4588,6 +4606,8 @@ static constexpr dart::compiler::target::word AOT_CodeSourceMap_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_InstanceSize = 12;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_HeaderSize = 12;
static constexpr dart::compiler::target::word AOT_Context_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
static constexpr dart::compiler::target::word AOT_ContextScope_InstanceSize =
@ -5098,6 +5118,8 @@ static constexpr dart::compiler::target::word AOT_CodeSourceMap_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_InstanceSize = 12;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_HeaderSize = 12;
static constexpr dart::compiler::target::word AOT_Context_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
static constexpr dart::compiler::target::word AOT_ContextScope_InstanceSize =
@ -5598,6 +5620,8 @@ static constexpr dart::compiler::target::word AOT_CodeSourceMap_InstanceSize =
8;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_InstanceSize = 8;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_HeaderSize = 8;
static constexpr dart::compiler::target::word AOT_Context_InstanceSize = 12;
static constexpr dart::compiler::target::word AOT_Context_header_size = 12;
static constexpr dart::compiler::target::word AOT_ContextScope_InstanceSize =
@ -6096,6 +6120,8 @@ static constexpr dart::compiler::target::word AOT_CodeSourceMap_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_InstanceSize = 12;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_HeaderSize = 12;
static constexpr dart::compiler::target::word AOT_Context_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
static constexpr dart::compiler::target::word AOT_ContextScope_InstanceSize =
@ -6599,6 +6625,8 @@ static constexpr dart::compiler::target::word AOT_CodeSourceMap_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_InstanceSize = 12;
static constexpr dart::compiler::target::word
AOT_CompressedStackMaps_HeaderSize = 12;
static constexpr dart::compiler::target::word AOT_Context_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Context_header_size = 24;
static constexpr dart::compiler::target::word AOT_ContextScope_InstanceSize =

View file

@ -267,6 +267,7 @@
SIZEOF(Code, InstanceSize, RawCode) \
SIZEOF(CodeSourceMap, InstanceSize, RawCodeSourceMap) \
SIZEOF(CompressedStackMaps, InstanceSize, RawCompressedStackMaps) \
SIZEOF(CompressedStackMaps, HeaderSize, RawCompressedStackMaps) \
SIZEOF(Context, InstanceSize, RawContext) \
SIZEOF(Context, header_size, RawContext) \
SIZEOF(ContextScope, InstanceSize, RawContextScope) \

View file

@ -157,10 +157,8 @@ static intptr_t InstructionsSizeInSnapshot(RawInstructions* raw) {
#if defined(IS_SIMARM_X64)
static intptr_t CompressedStackMapsSizeInSnapshot(intptr_t payload_size) {
// We do not need to round the non-payload size up to a word boundary because
// currently sizeof(RawCompressedStackMaps) is 12, even on 64-bit.
const intptr_t unrounded_size_in_bytes =
compiler::target::kWordSize + sizeof(uint32_t) + payload_size;
compiler::target::CompressedStackMaps::HeaderSize() + payload_size;
return Utils::RoundUp(unrounded_size_in_bytes,
compiler::target::ObjectAlignment::kObjectAlignment);
}
@ -198,7 +196,8 @@ intptr_t ImageWriter::SizeInSnapshot(RawObject* raw_object) {
case kCompressedStackMapsCid: {
RawCompressedStackMaps* raw_maps =
static_cast<RawCompressedStackMaps*>(raw_object);
return CompressedStackMapsSizeInSnapshot(raw_maps->ptr()->payload_size());
auto const payload_size = CompressedStackMaps::PayloadSizeOf(raw_maps);
return CompressedStackMapsSizeInSnapshot(payload_size);
}
case kOneByteStringCid:
case kTwoByteStringCid: {
@ -397,18 +396,17 @@ void ImageWriter::WriteROData(WriteStream* stream) {
"host object alignment is not double target object alignment");
if (obj.IsCompressedStackMaps()) {
const CompressedStackMaps& map = CompressedStackMaps::Cast(obj);
auto const object_start = stream->Position();
// Header layout is the same between 32-bit and 64-bit architecture, but
// we need to recalcuate the size in words.
const intptr_t payload_size = map.payload_size();
const intptr_t size_in_bytes =
CompressedStackMapsSizeInSnapshot(payload_size);
marked_tags = RawObject::SizeTag::update(size_in_bytes * 2, marked_tags);
stream->WriteTargetWord(marked_tags);
// We do not need to align the stream to a word boundary on 64-bit because
// sizeof(RawCompressedStackMaps) is 12, even there.
stream->WriteFixed<uint32_t>(map.raw()->ptr()->flags_and_size_);
ASSERT_EQUAL(stream->Position() - object_start,
compiler::target::CompressedStackMaps::HeaderSize());
stream->WriteBytes(map.raw()->ptr()->data(), payload_size);
stream->Align(compiler::target::ObjectAlignment::kObjectAlignment);
} else if (obj.IsString()) {

View file

@ -668,6 +668,7 @@ void Object::Init(Isolate* isolate) {
*null_type_arguments_ = TypeArguments::null();
*empty_type_arguments_ = TypeArguments::null();
*null_abstract_type_ = AbstractType::null();
*null_compressed_stack_maps_ = CompressedStackMaps::null();
// Initialize the empty and zero array handles to null_ in order to be able to
// check if the empty and zero arrays were allocated (RAW_NULL is not
@ -1136,6 +1137,8 @@ void Object::Init(Isolate* isolate) {
ASSERT(null_function_->IsFunction());
ASSERT(!null_type_arguments_->IsSmi());
ASSERT(null_type_arguments_->IsTypeArguments());
ASSERT(!null_compressed_stack_maps_->IsSmi());
ASSERT(null_compressed_stack_maps_->IsCompressedStackMaps());
ASSERT(!empty_array_->IsSmi());
ASSERT(empty_array_->IsArray());
ASSERT(!zero_array_->IsSmi());
@ -13487,8 +13490,11 @@ intptr_t CompressedStackMaps::Hashcode() const {
RawCompressedStackMaps* CompressedStackMaps::New(
const GrowableArray<uint8_t>& payload,
RawCompressedStackMaps::Kind kind) {
bool is_global_table,
bool uses_global_table) {
ASSERT(Object::compressed_stackmaps_class() != Class::null());
// We don't currently allow both flags to be true.
ASSERT(!is_global_table || !uses_global_table);
auto& result = CompressedStackMaps::Handle();
const uintptr_t payload_size = payload.length();
@ -13506,46 +13512,28 @@ RawCompressedStackMaps* CompressedStackMaps::New(
CompressedStackMaps::InstanceSize(payload_size), Heap::kOld);
NoSafepointScope no_safepoint;
result ^= raw;
result.set_payload_size(payload_size, kind);
result.StoreNonPointer(
&result.raw_ptr()->flags_and_size_,
RawCompressedStackMaps::GlobalTableBit::encode(is_global_table) |
RawCompressedStackMaps::UsesTableBit::encode(uses_global_table) |
RawCompressedStackMaps::SizeField::encode(payload_size));
auto cursor = result.UnsafeMutableNonPointer(result.raw_ptr()->data());
memcpy(cursor, payload.data(), payload.length()); // NOLINT
}
result.SetPayload(payload);
ASSERT(!result.IsGlobalTable() || !result.UsesGlobalTable());
return result.raw();
}
void CompressedStackMaps::SetPayload(
const GrowableArray<uint8_t>& payload) const {
const uintptr_t array_length = payload.length();
ASSERT(array_length <= payload_size());
NoSafepointScope no_safepoint;
uint8_t* payload_start = UnsafeMutableNonPointer(raw_ptr()->data());
for (uintptr_t i = 0; i < array_length; i++) {
payload_start[i] = payload.At(i);
}
}
const char* CompressedStackMaps::ToCString() const {
ASSERT(!IsGlobalTable());
auto const t = Thread::Current();
auto zone = t->zone();
ZoneTextBuffer b(zone, 100);
const auto& global_table = CompressedStackMaps::Handle(
zone, t->isolate()->object_store()->canonicalized_stack_map_entries());
CompressedStackMapsIterator it(*this, global_table);
bool first_entry = true;
while (it.MoveNext()) {
if (first_entry) {
first_entry = false;
} else {
b.AddString("\n");
}
b.Printf("0x%08x: ", it.pc_offset());
for (intptr_t i = 0, n = it.Length(); i < n; i++) {
b.AddString(it.IsObject(i) ? "1" : "0");
}
}
return b.buffer();
return it.ToCString(zone);
}
RawString* LocalVarDescriptors::GetName(intptr_t var_index) const {

View file

@ -413,6 +413,7 @@ class Object {
V(Instance, null_instance) \
V(Function, null_function) \
V(TypeArguments, null_type_arguments) \
V(CompressedStackMaps, null_compressed_stack_maps) \
V(TypeArguments, empty_type_arguments) \
V(Array, empty_array) \
V(Array, zero_array) \
@ -5444,10 +5445,14 @@ class CompressedStackMaps : public Object {
public:
static const intptr_t kHashBits = 30;
uintptr_t payload_size() const { return raw_ptr()->payload_size(); }
uintptr_t payload_size() const { return PayloadSizeOf(raw()); }
static uintptr_t PayloadSizeOf(const RawCompressedStackMaps* raw) {
return RawCompressedStackMaps::SizeField::decode(
raw->ptr()->flags_and_size_);
}
bool Equals(const CompressedStackMaps& other) const {
// Both the payload size and the kind of table must match.
// All of the table flags and payload size must match.
if (raw_ptr()->flags_and_size_ != other.raw_ptr()->flags_and_size_) {
return false;
}
@ -5461,7 +5466,7 @@ class CompressedStackMaps : public Object {
intptr_t Hashcode() const;
static intptr_t UnroundedSize(RawCompressedStackMaps* maps) {
return UnroundedSize(maps->ptr()->payload_size());
return UnroundedSize(CompressedStackMaps::PayloadSizeOf(maps));
}
static intptr_t UnroundedSize(intptr_t length) {
return sizeof(RawCompressedStackMaps) + length;
@ -5475,50 +5480,46 @@ class CompressedStackMaps : public Object {
return RoundedAllocationSize(UnroundedSize(length));
}
private:
static RawCompressedStackMaps* New(const GrowableArray<uint8_t>& bytes,
RawCompressedStackMaps::Kind kind);
bool UsesGlobalTable() const { return !IsNull() && UsesGlobalTable(raw()); }
static bool UsesGlobalTable(const RawCompressedStackMaps* raw) {
return RawCompressedStackMaps::UsesTableBit::decode(
raw->ptr()->flags_and_size_);
}
bool IsGlobalTable() const { return !IsNull() && IsGlobalTable(raw()); }
static bool IsGlobalTable(const RawCompressedStackMaps* raw) {
return RawCompressedStackMaps::GlobalTableBit::decode(
raw->ptr()->flags_and_size_);
}
static RawCompressedStackMaps* NewInlined(
const GrowableArray<uint8_t>& bytes) {
return New(bytes, RawCompressedStackMaps::kInlined);
return New(bytes, /*is_global_table=*/false, /*uses_global_table=*/false);
}
static RawCompressedStackMaps* NewUsingTable(
const GrowableArray<uint8_t>& bytes) {
return New(bytes, RawCompressedStackMaps::kUsesTable);
return New(bytes, /*is_global_table=*/false, /*uses_global_table=*/true);
}
static RawCompressedStackMaps* NewGlobalTable(
const GrowableArray<uint8_t>& bytes) {
return New(bytes, RawCompressedStackMaps::kGlobalTable);
return New(bytes, /*is_global_table=*/true, /*uses_global_table=*/false);
}
void set_payload_size(intptr_t payload_size,
RawCompressedStackMaps::Kind kind) const {
ASSERT(RawCompressedStackMaps::SizeField::is_valid(payload_size));
const uint32_t encoded_fields =
RawCompressedStackMaps::KindField::encode(kind) |
RawCompressedStackMaps::SizeField::encode(payload_size);
StoreNonPointer(&raw_ptr()->flags_and_size_, encoded_fields);
}
private:
static RawCompressedStackMaps* New(const GrowableArray<uint8_t>& bytes,
bool is_global_table,
bool uses_global_table);
bool UsesGlobalTable() const {
return !IsNull() && raw_ptr()->UsesGlobalTable();
}
bool IsGlobalTable() const { return !IsNull() && raw_ptr()->IsGlobalTable(); }
const uint8_t* Payload() const { return raw_ptr()->data(); }
void SetPayload(const GrowableArray<uint8_t>& payload) const;
uint8_t PayloadByte(uintptr_t offset) const {
ASSERT(offset >= 0 && offset < payload_size());
ASSERT(offset < payload_size());
return raw_ptr()->data()[offset];
}
FINAL_HEAP_OBJECT_IMPLEMENTATION(CompressedStackMaps, Object);
friend class Class;
friend class CompressedStackMapsBuilder;
friend class CompressedStackMapsIterator;
friend class ProgramVisitor;
friend class StackMapEntry;
friend class CompressedStackMapsIterator; // For PayloadByte
friend class StackMapEntry; // For PayloadByte
};
class ExceptionHandlers : public Object {

View file

@ -192,7 +192,7 @@ intptr_t RawObject::HeapSizeFromClass() const {
case kCompressedStackMapsCid: {
const RawCompressedStackMaps* maps =
reinterpret_cast<const RawCompressedStackMaps*>(this);
intptr_t length = maps->ptr()->payload_size();
intptr_t length = CompressedStackMaps::PayloadSizeOf(maps);
instance_size = CompressedStackMaps::InstanceSize(length);
break;
}
@ -565,7 +565,8 @@ VARIABLE_NULL_VISITOR(Instructions, Instructions::Size(raw_obj))
VARIABLE_NULL_VISITOR(InstructionsSection, InstructionsSection::Size(raw_obj))
VARIABLE_NULL_VISITOR(PcDescriptors, raw_obj->ptr()->length_)
VARIABLE_NULL_VISITOR(CodeSourceMap, raw_obj->ptr()->length_)
VARIABLE_NULL_VISITOR(CompressedStackMaps, raw_obj->ptr()->payload_size())
VARIABLE_NULL_VISITOR(CompressedStackMaps,
CompressedStackMaps::PayloadSizeOf(raw_obj))
VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->ptr()->length_))
VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->ptr()->length_))
// Abstract types don't have their visitor called.

View file

@ -1752,12 +1752,13 @@ class RawCompressedStackMaps : public RawObject {
// The low bits determine the expected payload contents, as described below.
uint32_t flags_and_size_;
// Variable length data follows here. There are three types of
// CompressedStackMaps (CSM):
// Variable length data follows here. The contents of the payload depend on
// the type of CompressedStackMaps (CSM) being represented. There are three
// major types of CSM:
//
// 1) kind == kInlined: CSMs that include all information about the stack
// maps. The payload for these contain tightly packed entries with the
// following information:
// 1) GlobalTableBit = false, UsesTableBit = false: CSMs that include all
// information about the stack maps. The payload for these contain tightly
// packed entries with the following information:
//
// * A header containing the following three pieces of information:
// * An unsigned integer representing the PC offset as a delta from the
@ -1769,17 +1770,20 @@ class RawCompressedStackMaps : public RawObject {
// * The body containing the bits for the stack map. The length of the body
// in bits is the sum of the spill slot and non-spill slot bit counts.
//
// 2) kind == kUsesTable: CSMs where the majority of the stack map information
// has been offloaded and canonicalized into a global table. The payload
// contains tightly packed entries with the following information:
// 2) GlobalTableBit = false, UsesTableBit = true: CSMs where the majority of
// the stack map information has been offloaded and canonicalized into a
// global table. The payload contains tightly packed entries with the
// following information:
//
// * A header containing just an unsigned integer representing the PC offset
// delta as described above.
// * The body is just an unsigned integer containing the offset into the
// payload for the global table.
//
// 3) kind == kGlobalTable: A CSM implementing the global table. Here, the
// payload contains tightly packed entries with the following information:
// 3) GlobalTableBit = true, UsesTableBit = false: A CSM implementing the
// global table. Here, the payload contains tightly packed entries with
// the following information:
//
// * A header containing the following two pieces of information:
// * An unsigned integer representing the number of bits used for
// spill slot entries.
@ -1799,23 +1803,14 @@ class RawCompressedStackMaps : public RawObject {
uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
enum Kind {
kInlined = 0b00,
kUsesTable = 0b01,
kGlobalTable = 0b10,
};
static const uintptr_t kKindBits = 2;
using KindField = BitField<uint32_t, Kind, 0, kKindBits>;
using SizeField = BitField<uint32_t, uint32_t, kKindBits, 32 - kKindBits>;
uint32_t payload_size() const { return SizeField::decode(flags_and_size_); }
bool UsesGlobalTable() const {
return KindField::decode(flags_and_size_) == kUsesTable;
}
bool IsGlobalTable() const {
return KindField::decode(flags_and_size_) == kGlobalTable;
}
class GlobalTableBit : public BitField<uint32_t, bool, 0, 1> {};
class UsesTableBit
: public BitField<uint32_t, bool, GlobalTableBit::kNextBit, 1> {};
class SizeField : public BitField<uint32_t,
uint32_t,
UsesTableBit::kNextBit,
sizeof(flags_and_size_) * kBitsPerByte -
UsesTableBit::kNextBit> {};
friend class ImageWriter;
};