[vm] Store relocated addresses for text sections in BSS.

The relocated addresses are populated during BSS::Initialize() for
natively loaded ELF snapshots, and during the non-native loader when it
is used. Putting this information in the BSS segment avoids having to
change the embedder interface, since we only need this information for
AOT snapshots. This also avoids depending on our ELF snapshot layout
to reverse-engineer the DSO base for ELF-compiled snapshots.

We now always print the DSO base for both the VM and isolate in
non-symbolic stack traces, not just for ELF-compiled snapshots. However,
we still only print the relocated addresses in individual stack frames
if we're guaranteed they match those in separately saved debugging
information.

Bug: https://github.com/dart-lang/sdk/issues/41880

Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-release-x64-try,vm-kernel-precomp-linux-product-x64-try,vm-kernel-precomp-linux-debug-x64-try,vm-kernel-precomp-win-release-x64-try,vm-kernel-precomp-mac-release-simarm64-try
Change-Id: I4837262f78e6e73a32eb7e24ef7a68ccb8ec2669
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/148441
Commit-Queue: Tess Strickland <sstrickl@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Tess Strickland 2020-05-26 08:47:30 +00:00 committed by commit-bot@chromium.org
parent 1dd99d3a39
commit 826576dcfc
11 changed files with 189 additions and 168 deletions

View file

@ -6,6 +6,7 @@
#include <bin/file.h>
#include <platform/elf.h>
#include <platform/globals.h>
#include <vm/bss_relocs.h>
#include <vm/cpu.h>
#include <vm/virtual_memory.h>
@ -183,6 +184,9 @@ class LoadedElf {
/// corresponding symbol was not found, or if the dynamic symbol table could
/// not be decoded.
///
/// Has the side effect of initializing the relocated addresses for the text
/// sections corresponding to non-null output parameters in the BSS segment.
///
/// On failure, the error may be retrieved by 'error()'.
bool ResolveSymbols(const uint8_t** vm_data,
const uint8_t** vm_instrs,
@ -235,6 +239,8 @@ class LoadedElf {
const char* dynamic_string_table_ = nullptr;
const dart::elf::Symbol* dynamic_symbol_table_ = nullptr;
uword dynamic_symbol_count_ = 0;
uword* vm_bss_ = nullptr;
uword* isolate_bss_ = nullptr;
DISALLOW_COPY_AND_ASSIGN(LoadedElf);
};
@ -462,11 +468,21 @@ bool LoadedElf::ReadSections() {
dynamic_symbol_table_ = reinterpret_cast<const dart::elf::Symbol*>(
base_->start() + header.memory_offset);
dynamic_symbol_count_ = header.file_size / sizeof(dart::elf::Symbol);
} else if (strcmp(name, ".bss") == 0) {
auto const bss_size =
(BSS::kVmEntryCount + BSS::kIsolateEntryCount) * kWordSize;
CHECK_ERROR(header.memory_offset != 0, ".bss must be loaded.");
CHECK_ERROR(header.file_size >= bss_size,
".bss does not have enough space.");
vm_bss_ = reinterpret_cast<uword*>(base_->start() + header.memory_offset);
isolate_bss_ = vm_bss_ + BSS::kVmEntryCount;
// We set applicable BSS entries in ResolveSymbols().
}
}
CHECK_ERROR(dynamic_string_table_ != nullptr, "Couldn't find .dynstr.");
CHECK_ERROR(dynamic_symbol_table_ != nullptr, "Couldn't find .dynsym.");
CHECK_ERROR(vm_bss_ != nullptr, "Couldn't find .bss.");
return true;
}
@ -488,10 +504,22 @@ bool LoadedElf::ResolveSymbols(const uint8_t** vm_data,
output = vm_data;
} else if (strcmp(name, kVmSnapshotInstructionsAsmSymbol) == 0) {
output = vm_instrs;
if (output != nullptr) {
// Store the value of the symbol in the VM BSS, as it contains the
// address of the VM instructions section relative to the DSO base.
BSS::InitializeBSSEntry(BSS::Relocation::InstructionsRelocatedAddress,
sym.value, vm_bss_);
}
} else if (strcmp(name, kIsolateSnapshotDataAsmSymbol) == 0) {
output = isolate_data;
} else if (strcmp(name, kIsolateSnapshotInstructionsAsmSymbol) == 0) {
output = isolate_instrs;
if (output != nullptr) {
// Store the value of the symbol in the isolate BSS, as it contains the
// address of the isolate instructions section relative to the DSO base.
BSS::InitializeBSSEntry(BSS::Relocation::InstructionsRelocatedAddress,
sym.value, isolate_bss_);
}
}
if (output != nullptr) {

View file

@ -3,28 +3,41 @@
// BSD-style license that can be found in the LICENSE file.
#include "vm/bss_relocs.h"
#include "vm/native_symbol.h"
#include "vm/runtime_entry.h"
#include "vm/thread.h"
namespace dart {
static void InitializeBSSEntry(BSS::Relocation relocation,
uword function_address,
uword* bss_start) {
void BSS::InitializeBSSEntry(BSS::Relocation relocation,
uword new_value,
uword* bss_start) {
std::atomic<uword>* slot = reinterpret_cast<std::atomic<uword>*>(
&bss_start[BSS::RelocationIndex(relocation)]);
uword old_value = slot->load(std::memory_order_relaxed);
uword new_value = function_address;
if (!slot->compare_exchange_strong(old_value, new_value,
std::memory_order_relaxed)) {
RELEASE_ASSERT(old_value == new_value);
}
}
void BSS::Initialize(Thread* current, uword* bss_start) {
InitializeBSSEntry(BSS::Relocation::DRT_GetThreadForNativeCallback,
reinterpret_cast<uword>(DLRT_GetThreadForNativeCallback),
bss_start);
void BSS::Initialize(Thread* current, uword* bss_start, bool vm) {
auto const instructions = reinterpret_cast<uword>(
current->isolate_group()->source()->snapshot_instructions);
uword dso_base;
// For non-natively loaded snapshots, this is instead initialized in
// LoadedElf::ResolveSymbols().
if (NativeSymbolResolver::LookupSharedObject(instructions, &dso_base)) {
InitializeBSSEntry(Relocation::InstructionsRelocatedAddress,
instructions - dso_base, bss_start);
}
if (!vm) {
// Fill values at isolate-only indices.
InitializeBSSEntry(Relocation::DRT_GetThreadForNativeCallback,
reinterpret_cast<uword>(DLRT_GetThreadForNativeCallback),
bss_start);
}
}
} // namespace dart

View file

@ -12,16 +12,33 @@ class Thread;
class BSS : public AllStatic {
public:
// Entries found in both the VM and isolate BSS come first. Each has its own
// portion of the BSS segment, so just the indices are shared, not the values
// stored at the index.
enum class Relocation : intptr_t {
DRT_GetThreadForNativeCallback = 0,
NumRelocations = 1
InstructionsRelocatedAddress,
// End of shared entries.
DRT_GetThreadForNativeCallback,
// End of isolate-only entries.
};
static intptr_t RelocationIndex(Relocation reloc) {
static constexpr intptr_t kVmEntryCount =
static_cast<intptr_t>(Relocation::InstructionsRelocatedAddress) + 1;
static constexpr intptr_t kIsolateEntryCount =
static_cast<intptr_t>(Relocation::DRT_GetThreadForNativeCallback) + 1;
static constexpr intptr_t RelocationIndex(Relocation reloc) {
return static_cast<intptr_t>(reloc);
}
static void Initialize(Thread* current, uword* bss);
static void Initialize(Thread* current, uword* bss, bool vm);
// Currently only used externally by LoadedElf::ResolveSymbols() to set the
// relocated address without changing the embedder interface.
static void InitializeBSSEntry(BSS::Relocation relocation,
uword new_value,
uword* bss_start);
};
} // namespace dart

View file

@ -6647,6 +6647,19 @@ ApiErrorPtr FullSnapshotReader::ReadVMSnapshot() {
deserializer.ReadVMSnapshot();
#if defined(DART_PRECOMPILED_RUNTIME)
// Initialize entries in the VM portion of the BSS segment.
ASSERT(Snapshot::IncludesCode(kind_));
Image image(instructions_image_);
if (image.bss_offset() != 0) {
// The const cast is safe because we're translating from the start of the
// instructions (read-only) to the start of the BSS (read-write).
uword* const bss_start = const_cast<uword*>(reinterpret_cast<const uword*>(
instructions_image_ + image.bss_offset()));
BSS::Initialize(thread_, bss_start, /*vm=*/true);
}
#endif // defined(DART_PRECOMPILED_RUNTIME)
return ApiError::null();
}
@ -6714,7 +6727,7 @@ ApiErrorPtr FullSnapshotReader::ReadProgramSnapshot() {
}
}
// Initialize symbols in the BSS, if present.
// Initialize entries in the isolate portion of the BSS segment.
ASSERT(Snapshot::IncludesCode(kind_));
Image image(instructions_image_);
if (image.bss_offset() != 0) {
@ -6722,7 +6735,7 @@ ApiErrorPtr FullSnapshotReader::ReadProgramSnapshot() {
// instructions (read-only) to the start of the BSS (read-write).
uword* const bss_start = const_cast<uword*>(reinterpret_cast<const uword*>(
instructions_image_ + image.bss_offset()));
BSS::Initialize(thread_, bss_start);
BSS::Initialize(thread_, bss_start, /*vm=*/false);
}
#endif // defined(DART_PRECOMPILED_RUNTIME)

View file

@ -6520,24 +6520,33 @@ Dart_CreateAppAOTSnapshotAsElf(Dart_StreamingWriteCallback callback,
Dwarf* debug_dwarf =
generate_debug ? new (Z) Dwarf(Z, nullptr, debug_elf) : nullptr;
// Here, both VM and isolate will be compiled into a single snapshot.
// In assembly generation, each serialized text section gets a separate
// pointer into the BSS segment and BSS slots are created for each, since
// we may not serialize both VM and isolate. Here, we always serialize both,
// so make a BSS segment large enough for both, with the VM entries coming
// first.
auto const isolate_offset = BSS::kVmEntryCount * compiler::target::kWordSize;
auto const bss_size =
isolate_offset + BSS::kIsolateEntryCount * compiler::target::kWordSize;
// Note that the BSS section must come first because it cannot be placed in
// between any two non-writable segments, due to a bug in Jelly Bean's ELF
// loader. See also Elf::WriteProgramTable().
const intptr_t bss_base =
elf->AddBSSData("_kDartBSSData", sizeof(compiler::target::uword));
const intptr_t vm_bss_base = elf->AddBSSData("_kDartBSSData", bss_size);
const intptr_t isolate_bss_base = vm_bss_base + isolate_offset;
// Add the BSS section to the separately saved debugging information, even
// though there will be no code in it to relocate, since it precedes the
// .text sections and thus affects their virtual addresses.
if (debug_dwarf != nullptr) {
debug_elf->AddBSSData("_kDartBSSData", sizeof(compiler::target::uword));
debug_elf->AddBSSData("_kDartBSSData", bss_size);
}
BlobImageWriter vm_image_writer(T, &vm_snapshot_instructions_buffer,
ApiReallocate, kInitialSize, debug_dwarf,
bss_base, elf, elf_dwarf);
vm_bss_base, elf, elf_dwarf);
BlobImageWriter isolate_image_writer(T, &isolate_snapshot_instructions_buffer,
ApiReallocate, kInitialSize, debug_dwarf,
bss_base, elf, elf_dwarf);
isolate_bss_base, elf, elf_dwarf);
FullSnapshotWriter writer(Snapshot::kFullAOT, &vm_snapshot_data_buffer,
&isolate_snapshot_data_buffer, ApiReallocate,
&vm_image_writer, &isolate_image_writer);

View file

@ -642,22 +642,6 @@ Elf::Elf(Zone* zone, StreamingWriteStream* stream, bool strip)
AddSection(shstrtab_, ".shstrtab");
}
// The VM segment comes after the program header segment and BSS segments,
// both of which are a single page.
static constexpr uword kVmSnapshotOffset = 2 * Elf::kPageSize;
// Find the relocated base of the loaded ELF snapshot. Returns 0 if there is
// no loaded ELF snapshot.
uword Elf::SnapshotRelocatedBaseAddress(uword vm_start) {
// We can't running from a loaded ELF snapshot if this is the case.
if (vm_start < kVmSnapshotOffset) return 0;
const Image vm_instructions_image(reinterpret_cast<const void*>(vm_start));
if (!vm_instructions_image.compiled_to_elf()) return 0;
return vm_start - kVmSnapshotOffset;
}
void Elf::AddSection(Section* section, const char* name) {
ASSERT(section_table_file_size_ < 0);
ASSERT(!shstrtab_->HasBeenFinalized());
@ -824,8 +808,6 @@ void Elf::Finalize() {
FinalizeProgramTable();
ComputeFileOffsets();
ASSERT(VerifySegmentOrder());
// Finally, write the ELF file contents.
WriteHeader();
WriteProgramTable();
@ -891,63 +873,6 @@ void Elf::ComputeFileOffsets() {
file_offset += section_table_file_size_;
}
bool Elf::VerifySegmentOrder() {
// We can only verify the segment order after FinalizeProgramTable(), since
// we assume that all segments have been added, including the two program
// table segments.
ASSERT(program_table_file_size_ > 0);
// Find the names and symbols for the .bss and .text segments.
auto const bss_section_name = shstrtab_->Lookup(".bss");
// For separate debugging information for assembly snapshots, no .bss section
// is added. However, we're only interested in strict segment orders when
// generating ELF snapshots, so only continue when there's a .bss section.
if (bss_section_name == -1) return true;
auto const text_section_name = shstrtab_->Lookup(".text");
ASSERT(text_section_name != -1);
auto const bss_symbol_name = dynstrtab_->Lookup("_kDartBSSData");
auto const vm_symbol_name =
dynstrtab_->Lookup(kVmSnapshotInstructionsAsmSymbol);
auto const isolate_symbol_name =
dynstrtab_->Lookup(kIsolateSnapshotInstructionsAsmSymbol);
ASSERT(bss_symbol_name != -1);
ASSERT(vm_symbol_name != -1);
ASSERT(isolate_symbol_name != -1);
auto const bss_symbol = dynsym_->FindSymbolWithNameIndex(bss_symbol_name);
auto const vm_symbol = dynsym_->FindSymbolWithNameIndex(vm_symbol_name);
auto const isolate_symbol =
dynsym_->FindSymbolWithNameIndex(isolate_symbol_name);
ASSERT(bss_symbol != nullptr);
ASSERT(vm_symbol != nullptr);
ASSERT(isolate_symbol != nullptr);
// Check that the first non-program table segments are in the expected order.
auto const bss_segment = segments_[2];
ASSERT_EQUAL(bss_segment->segment_type, elf::PT_LOAD);
ASSERT_EQUAL(bss_segment->section_name(), bss_section_name);
ASSERT_EQUAL(bss_segment->memory_offset(), bss_symbol->offset);
ASSERT_EQUAL(bss_segment->MemorySize(), bss_symbol->size);
auto const vm_segment = segments_[3];
ASSERT_EQUAL(vm_segment->segment_type, elf::PT_LOAD);
ASSERT_EQUAL(vm_segment->section_name(), text_section_name);
ASSERT_EQUAL(vm_segment->memory_offset(), vm_symbol->offset);
ASSERT_EQUAL(vm_segment->MemorySize(), vm_symbol->size);
auto const isolate_segment = segments_[4];
ASSERT_EQUAL(isolate_segment->segment_type, elf::PT_LOAD);
ASSERT_EQUAL(isolate_segment->section_name(), text_section_name);
ASSERT_EQUAL(isolate_segment->memory_offset(), isolate_symbol->offset);
ASSERT_EQUAL(isolate_segment->MemorySize(), isolate_symbol->size);
// Also make sure that the memory offset of the VM segment is as expected.
ASSERT_EQUAL(bss_segment->memory_offset(), kPageSize);
ASSERT(bss_segment->MemorySize() <= kPageSize);
ASSERT_EQUAL(vm_segment->memory_offset(), kVmSnapshotOffset);
return true;
}
void Elf::WriteHeader() {
#if defined(TARGET_ARCH_IS_32_BIT)
uint8_t size = elf::ELFCLASS32;

View file

@ -26,12 +26,6 @@ class Elf : public ZoneAllocated {
static const intptr_t kPageSize = 4096;
// Used by the non-symbolic stack frame printer to calculate the relocated
// base address of the loaded ELF snapshot given the start of the VM
// instructions. Only works for ELF snapshots written by Dart, not those
// compiled from assembly.
static uword SnapshotRelocatedBaseAddress(uword vm_start);
intptr_t NextMemoryOffset() const { return memory_offset_; }
intptr_t NextSectionIndex() const { return sections_.length(); }
intptr_t AddText(const char* name, const uint8_t* bytes, intptr_t size);
@ -92,7 +86,6 @@ class Elf : public ZoneAllocated {
void FinalizeProgramTable();
void ComputeFileOffsets();
bool VerifySegmentOrder();
void WriteHeader();
void WriteSectionTable();

View file

@ -906,10 +906,10 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
Align(Image::kBssAlignment);
assembly_stream_.Print("%s:\n", bss_symbol);
// Currently we only put one symbol in the data section, the address of
// DLRT_GetThreadForNativeCallback, which is populated when the snapshot is
// loaded.
WriteWordLiteralText(0);
auto const entry_count = vm ? BSS::kVmEntryCount : BSS::kIsolateEntryCount;
for (intptr_t i = 0; i < entry_count; i++) {
WriteWordLiteralText(0);
}
#endif
#if defined(TARGET_OS_LINUX) || defined(TARGET_OS_ANDROID) || \
@ -1095,10 +1095,12 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
instructions_blob_stream_.WriteTargetWord(image_size);
#if defined(DART_PRECOMPILER)
// Store the offset of the BSS section from the instructions section here.
// The lowest bit is set to indicate we compiled directly to ELF.
const word bss_offset = bss_base_ - segment_base;
// If not compiling to ELF (and thus no BSS segment), write 0.
const word bss_offset = elf_ != nullptr ? bss_base_ - segment_base : 0;
ASSERT_EQUAL(Utils::RoundDown(bss_offset, Image::kBssAlignment), bss_offset);
instructions_blob_stream_.WriteTargetWord(bss_offset | 0x1);
// Set the lowest bit if we are compiling to ELF.
const word compiled_to_elf = elf_ != nullptr ? 0x1 : 0x0;
instructions_blob_stream_.WriteTargetWord(bss_offset | compiled_to_elf);
#else
instructions_blob_stream_.WriteTargetWord(0); // No relocations.
#endif
@ -1277,12 +1279,12 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
insns.PayloadStart() + reloc_offset);
// Overwrite the relocation position in the instruction stream with the
// (positive) offset of the start of the payload from the start of the
// BSS segment plus the addend in the relocation.
instructions_blob_stream_.SetPosition(payload_offset + reloc_offset);
// offset of the BSS segment from the relocation position plus the
// addend in the relocation.
auto const text_offset = payload_offset + reloc_offset;
instructions_blob_stream_.SetPosition(text_offset);
const compiler::target::word offset =
bss_base_ - (segment_base + payload_offset + reloc_offset) + addend;
const compiler::target::word offset = bss_offset - text_offset + addend;
instructions_blob_stream_.WriteTargetWord(offset);
}

View file

@ -17,7 +17,9 @@ class NativeSymbolResolver : public AllStatic {
static void Init();
static void Cleanup();
static char* LookupSymbolName(uword pc, uword* start);
static bool LookupSharedObject(uword pc, uword* dso_base, char** dso_name);
static bool LookupSharedObject(uword pc,
uword* dso_base = nullptr,
char** dso_name = nullptr);
static void FreeSymbolName(char* name);
static void AddSymbols(const char* dso_name, void* buffer, size_t size);
};

View file

@ -23621,28 +23621,31 @@ StackTracePtr StackTrace::New(const Array& code_array,
// Prints the best representation(s) for the call address.
static void PrintNonSymbolicStackFrameBody(ZoneTextBuffer* buffer,
uword call_addr,
uword isolate_dso_base,
uword isolate_instructions,
uword vm_instructions) {
uword vm_instructions,
uword isolate_relocated_address) {
const Image vm_image(reinterpret_cast<const void*>(vm_instructions));
const Image isolate_image(
reinterpret_cast<const void*>(isolate_instructions));
if (isolate_image.contains(call_addr)) {
if (isolate_image.compiled_to_elf() && isolate_dso_base != 0) {
buffer->Printf(" virt %" Pp "", call_addr - isolate_dso_base);
auto const symbol_name = kIsolateSnapshotInstructionsAsmSymbol;
auto const offset = call_addr - isolate_instructions;
// Only print the relocated address of the call when we know the saved
// debugging information (if any) will have the same relocated address.
if (isolate_image.compiled_to_elf()) {
buffer->Printf(" virt %" Pp "", isolate_relocated_address + offset);
}
buffer->Printf(" %s+0x%" Px "", kIsolateSnapshotInstructionsAsmSymbol,
call_addr - isolate_instructions);
buffer->Printf(" %s+0x%" Px "", symbol_name, offset);
} else if (vm_image.contains(call_addr)) {
auto const offset = call_addr - vm_instructions;
// We currently don't print 'virt' entries for vm addresses, even if
// they were compiled to ELF, as we should never encounter these in
// non-symbolic stack traces (since stub addresses are stripped).
//
// In case they leak due to code issues elsewhere, we still print them as
// <vm symbol>+<offset>, just to distinguish from other cases.
buffer->Printf(" %s+0x%" Px "", kVmSnapshotInstructionsAsmSymbol,
call_addr - vm_instructions);
buffer->Printf(" %s+0x%" Px "", kVmSnapshotInstructionsAsmSymbol, offset);
} else {
// This case should never happen, since these are not addresses within the
// VM or app isolate instructions sections, so make it easy to notice.
@ -23703,6 +23706,16 @@ static void PrintSymbolicStackFrame(Zone* zone,
PrintSymbolicStackFrameBody(buffer, function_name, url, line, column);
}
// Find the relocated base of the given instructions section.
uword InstructionsRelocatedAddress(uword instructions_start) {
Image image(reinterpret_cast<const uint8_t*>(instructions_start));
auto const bss_start =
reinterpret_cast<const uword*>(instructions_start + image.bss_offset());
auto const index =
BSS::RelocationIndex(BSS::Relocation::InstructionsRelocatedAddress);
return bss_start[index];
}
const char* StackTrace::ToCString() const {
auto const T = Thread::Current();
auto const zone = T->zone();
@ -23721,38 +23734,10 @@ const char* StackTrace::ToCString() const {
T->isolate_group()->source()->snapshot_instructions);
auto const vm_instructions = reinterpret_cast<uword>(
Dart::vm_isolate()->group()->source()->snapshot_instructions);
uword isolate_dso_base;
if (!NativeSymbolResolver::LookupSharedObject(isolate_instructions,
&isolate_dso_base, nullptr)) {
// This isn't a natively loaded snapshot, so try to detect non-natively
// loaded compiled-to-ELF snapshots.
const Image vm_image(reinterpret_cast<const void*>(vm_instructions));
const Image isolate_image(
reinterpret_cast<const void*>(isolate_instructions));
if (vm_image.compiled_to_elf() && isolate_image.compiled_to_elf()) {
// If the VM and isolate were loaded from the same snapshot, then the
// isolate instructions will immediately follow the VM instructions in
// memory, and the VM section is always at a fixed offset from the DSO
// base in snapshots we generate.
const uword next_section_after_vm =
Utils::RoundUp(reinterpret_cast<uword>(vm_image.object_start()) +
vm_image.object_size(),
Elf::kPageSize);
if (isolate_instructions == next_section_after_vm) {
isolate_dso_base = Elf::SnapshotRelocatedBaseAddress(vm_instructions);
}
// If not, then we have no way of reverse-engineering the DSO base for
// the isolate without extending the embedder to return this information
// or encoding it somehow in the instructions image like the BSS offset.
//
// For the latter, the Image header size must match the HeapPage header
// size, and there's no remaining unused space in the Image header on
// 64-bit architectures. Thus, we'd have to increase the header size of
// both HeapPage and Image or create a special Object to put in the body
// of the Image to store extended header information.
}
}
auto const vm_relocated_address =
InstructionsRelocatedAddress(vm_instructions);
auto const isolate_relocated_address =
InstructionsRelocatedAddress(isolate_instructions);
if (FLAG_dwarf_stack_traces_mode) {
// The Dart standard requires the output of StackTrace.toString to include
// all pending activations with precise source locations (i.e., to expand
@ -23767,15 +23752,14 @@ const char* StackTrace::ToCString() const {
OSThread* thread = OSThread::Current();
buffer.Printf("pid: %" Pd ", tid: %" Pd ", name %s\n", OS::ProcessId(),
OSThread::ThreadIdToIntPtr(thread->id()), thread->name());
// Print the dso_base of the VM and isolate_instructions. We print both here
// as the VM and isolate may be loaded from different snapshot images.
buffer.Printf("isolate_dso_base: %" Px "",
isolate_instructions - isolate_relocated_address);
buffer.Printf(", vm_dso_base: %" Px "\n",
vm_instructions - vm_relocated_address);
buffer.Printf("isolate_instructions: %" Px "", isolate_instructions);
buffer.Printf(" vm_instructions: %" Px "", vm_instructions);
// Print the dso_base of the isolate_instructions, since printed stack
// traces won't include stub frames. If VM isolates ever start including
// Dart code, adjust this appropriately.
if (isolate_dso_base != 0) {
buffer.Printf(" isolate_dso_base: %" Px "", isolate_dso_base);
}
buffer.Printf("\n");
buffer.Printf(", vm_instructions: %" Px "\n", vm_instructions);
}
#endif
@ -23828,9 +23812,9 @@ const char* StackTrace::ToCString() const {
// This output is formatted like Android's debuggerd. Note debuggerd
// prints call addresses instead of return addresses.
buffer.Printf(" #%02" Pd " abs %" Pp "", frame_index, call_addr);
PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_dso_base,
isolate_instructions,
vm_instructions);
PrintNonSymbolicStackFrameBody(
&buffer, call_addr, isolate_instructions, vm_instructions,
isolate_relocated_address);
frame_index++;
continue;
} else if (function.IsNull()) {
@ -23838,9 +23822,9 @@ const char* StackTrace::ToCString() const {
// retained, so instead print the static symbol + offset like the
// non-symbolic stack traces.
PrintSymbolicStackFrameIndex(&buffer, frame_index);
PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_dso_base,
isolate_instructions,
vm_instructions);
PrintNonSymbolicStackFrameBody(
&buffer, call_addr, isolate_instructions, vm_instructions,
isolate_relocated_address);
frame_index++;
continue;
}

View file

@ -66,6 +66,17 @@ Future<void> checkStackTrace(String rawStack, Dwarf dwarf,
final virtualAddresses =
pcOffsets.map((o) => dwarf.virtualAddressOf(o)).toList();
// Some double-checks using other information in the non-symbolic stack trace.
final dsoBase = dsoBaseAddresses(rawLines).single;
final absolutes = absoluteAddresses(rawLines);
final relocatedAddresses = absolutes.map((a) => a - dsoBase);
final explicits = explicitVirtualAddresses(rawLines);
Expect.deepEquals(relocatedAddresses, virtualAddresses);
// Explicits will be empty if not generating ELF snapshots directly.
if (explicits.isNotEmpty) {
Expect.deepEquals(explicits, virtualAddresses);
}
final externalFramesInfo = <List<CallInfo>>[];
final allFramesInfo = <List<CallInfo>>[];
@ -247,3 +258,27 @@ List<String> extractCallStrings(List<List<CallInfo>> expectedCalls) {
}
return ret;
}
Iterable<int> parseUsingAddressRegExp(RegExp re, Iterable<String> lines) sync* {
for (final line in lines) {
final match = re.firstMatch(line);
if (match != null) {
yield int.parse(match.group(1), radix: 16);
}
}
}
final _absRE = RegExp(r'abs ([a-f\d]+)');
Iterable<int> absoluteAddresses(Iterable<String> lines) =>
parseUsingAddressRegExp(_absRE, lines);
final _virtRE = RegExp(r'virt ([a-f\d]+)');
Iterable<int> explicitVirtualAddresses(Iterable<String> lines) =>
parseUsingAddressRegExp(_virtRE, lines);
final _dsoBaseRE = RegExp(r'isolate_dso_base: ([a-f\d]+)');
Iterable<int> dsoBaseAddresses(Iterable<String> lines) =>
parseUsingAddressRegExp(_dsoBaseRE, lines);