LibJS: Lazily collect stack trace information

The previous implementation was calling `backtrace()` for every
function call, which is quite slow.

Instead, this implementation provides VM::stack_trace() which unwinds
the native stack, maps it through NativeExecutable::get_source_range
and combines it with source ranges from interpreted call frames.
This commit is contained in:
Simon Wanner 2023-11-01 00:39:28 +01:00 committed by Andreas Kling
parent 77dc7c4d36
commit 68f4d21de2
11 changed files with 96 additions and 46 deletions

View file

@ -6,6 +6,7 @@ shared_library("LibJS") {
# FIXME: Why does LibSyntax need to depend on WindowServer headers?
"//Userland",
]
cflags_cc = [ "-fno-omit-frame-pointer" ]
deps = [
"//AK",
"//Userland/Libraries/LibCore",

View file

@ -56,13 +56,6 @@ void Interpreter::visit_edges(Cell::Visitor& visitor)
}
}
Optional<InstructionStreamIterator const&> Interpreter::instruction_stream_iterator() const
{
if (m_current_executable && m_current_executable->native_executable())
return m_current_executable->native_executable()->instruction_stream_iterator(*m_current_executable);
return m_pc;
}
// 16.1.6 ScriptEvaluation ( scriptRecord ), https://tc39.es/ecma262/#sec-runtime-semantics-scriptevaluation
ThrowCompletionOr<Value> Interpreter::run(Script& script_record, JS::GCPtr<Environment> lexical_environment_override)
{
@ -371,6 +364,8 @@ Interpreter::ValueAndFrame Interpreter::run_and_return_frame(Executable& executa
else
push_call_frame(make<CallFrame>(), executable.number_of_registers);
vm().execution_context_stack().last()->executable = &executable;
if (auto native_executable = executable.get_or_create_native_executable()) {
native_executable->run(vm());

View file

@ -78,7 +78,7 @@ public:
Executable& current_executable() { return *m_current_executable; }
Executable const& current_executable() const { return *m_current_executable; }
BasicBlock const& current_block() const { return *m_current_block; }
Optional<InstructionStreamIterator const&> instruction_stream_iterator() const;
Optional<InstructionStreamIterator const&> instruction_stream_iterator() const { return m_pc; }
void visit_edges(Cell::Visitor&);

View file

@ -271,3 +271,4 @@ target_link_libraries(LibJS PRIVATE LibCore LibCrypto LibFileSystem LibRegex Lib
if("${CMAKE_SYSTEM_PROCESSOR}" STREQUAL "x86_64")
target_link_libraries(LibJS PRIVATE LibX86)
endif()
target_compile_options(LibJS PRIVATE -fno-omit-frame-pointer)

View file

@ -12,15 +12,6 @@
#include <LibX86/Disassembler.h>
#include <sys/mman.h>
#if __has_include(<execinfo.h>)
# include <execinfo.h>
# define EXECINFO_BACKTRACE
#endif
#if defined(AK_OS_ANDROID) && (__ANDROID_API__ < 33)
# undef EXECINFO_BACKTRACE
#endif
namespace JS::JIT {
NativeExecutable::NativeExecutable(void* code, size_t size, Vector<BytecodeMapping> mapping)
@ -159,32 +150,20 @@ BytecodeMapping const& NativeExecutable::find_mapping_entry(size_t native_offset
return m_mapping[nearby_index];
}
Optional<Bytecode::InstructionStreamIterator const&> NativeExecutable::instruction_stream_iterator([[maybe_unused]] Bytecode::Executable const& executable) const
Optional<UnrealizedSourceRange> NativeExecutable::get_source_range(Bytecode::Executable const& executable, FlatPtr address) const
{
#ifdef EXECINFO_BACKTRACE
void* buffer[10];
auto count = backtrace(buffer, 10);
auto start = bit_cast<FlatPtr>(m_code);
auto end = start + m_size;
for (auto i = 0; i < count; i++) {
auto address = bit_cast<FlatPtr>(buffer[i]);
if (address < start || address >= end)
continue;
// return address points after the call
// let's subtract 1 to make sure we don't hit the next bytecode
// (in practice that's not necessary, because our native_call() sequence continues)
auto offset = address - start - 1;
auto& entry = find_mapping_entry(offset);
if (entry.block_index < executable.basic_blocks.size()) {
auto const& block = *executable.basic_blocks[entry.block_index];
if (entry.bytecode_offset < block.size()) {
// This is rather clunky, but Interpreter::instruction_stream_iterator() gives out references, so we need to keep it alive.
m_instruction_stream_iterator = make<Bytecode::InstructionStreamIterator>(block.instruction_stream(), &executable, entry.bytecode_offset);
return *m_instruction_stream_iterator;
}
if (address < start || address >= end)
return {};
auto const& entry = find_mapping_entry(address - start - 1);
if (entry.block_index < executable.basic_blocks.size()) {
auto const& block = *executable.basic_blocks[entry.block_index];
if (entry.bytecode_offset < block.size()) {
auto iterator = Bytecode::InstructionStreamIterator { block.instruction_stream(), &executable, entry.bytecode_offset };
return iterator.source_range();
}
}
#endif
return {};
}

View file

@ -34,7 +34,7 @@ public:
void run(VM&) const;
void dump_disassembly(Bytecode::Executable const& executable) const;
BytecodeMapping const& find_mapping_entry(size_t native_offset) const;
Optional<Bytecode::InstructionStreamIterator const&> instruction_stream_iterator(Bytecode::Executable const& executable) const;
Optional<UnrealizedSourceRange> get_source_range(Bytecode::Executable const& executable, FlatPtr address) const;
ReadonlyBytes code_bytes() const { return { m_code, m_size }; }

View file

@ -75,13 +75,13 @@ ThrowCompletionOr<void> Error::install_error_cause(Value options)
void Error::populate_stack()
{
auto& vm = this->vm();
m_traceback.ensure_capacity(vm.execution_context_stack().size());
for (ssize_t i = vm.execution_context_stack().size() - 1; i >= 0; i--) {
auto context = vm.execution_context_stack()[i];
auto stack_trace = vm().stack_trace();
m_traceback.ensure_capacity(stack_trace.size());
for (auto& element : stack_trace) {
auto* context = element.execution_context;
UnrealizedSourceRange range = {};
if (context->instruction_stream_iterator.has_value())
range = context->instruction_stream_iterator->source_range();
if (element.source_range.has_value())
range = element.source_range.value();
TracebackFrame frame {
.function_name = context->function_name,
.source_range_storage = range,

View file

@ -51,9 +51,16 @@ public:
MarkedVector<Value> local_variables;
bool is_strict_mode { false };
RefPtr<Bytecode::Executable> executable;
// https://html.spec.whatwg.org/multipage/webappapis.html#skip-when-determining-incumbent-counter
// FIXME: Move this out of LibJS (e.g. by using the CustomData concept), as it's used exclusively by LibWeb.
size_t skip_when_determining_incumbent_counter { 0 };
};
struct StackTraceElement {
ExecutionContext* execution_context;
Optional<UnrealizedSourceRange> source_range;
};
}

View file

@ -15,6 +15,7 @@
#include <LibFileSystem/FileSystem.h>
#include <LibJS/AST.h>
#include <LibJS/Bytecode/Interpreter.h>
#include <LibJS/JIT/NativeExecutable.h>
#include <LibJS/Runtime/AbstractOperations.h>
#include <LibJS/Runtime/Array.h>
#include <LibJS/Runtime/BoundFunction.h>
@ -1135,4 +1136,66 @@ void VM::pop_execution_context()
on_call_stack_emptied();
}
#if ARCH(X86_64)
struct [[gnu::packed]] NativeStackFrame {
NativeStackFrame* prev;
FlatPtr return_address;
};
#endif
Vector<FlatPtr> VM::get_native_stack_trace() const
{
Vector<FlatPtr> buffer;
#if ARCH(X86_64)
// Manually walk the stack, because backtrace() does not traverse through JIT frames.
auto* frame = bit_cast<NativeStackFrame*>(__builtin_frame_address(0));
while (bit_cast<FlatPtr>(frame) < m_stack_info.top() && bit_cast<FlatPtr>(frame) >= m_stack_info.base()) {
buffer.append(frame->return_address);
frame = frame->prev;
}
#endif
return buffer;
}
static Optional<UnrealizedSourceRange> get_source_range(ExecutionContext const* context, Vector<FlatPtr> const& native_stack)
{
// native function
if (!context->executable)
return {};
auto const* native_executable = context->executable->native_executable();
if (!native_executable) {
// Interpreter frame
if (context->instruction_stream_iterator.has_value())
return context->instruction_stream_iterator->source_range();
return {};
}
// JIT frame
for (auto address : native_stack) {
auto range = native_executable->get_source_range(*context->executable, address);
if (range.has_value()) {
auto realized = range->realize();
return range;
}
}
return {};
}
Vector<StackTraceElement> VM::stack_trace() const
{
auto native_stack = get_native_stack_trace();
Vector<StackTraceElement> stack_trace;
for (ssize_t i = m_execution_context_stack.size() - 1; i >= 0; i--) {
auto* context = m_execution_context_stack[i];
stack_trace.append({
.execution_context = context,
.source_range = get_source_range(context, native_stack).value_or({}),
});
}
return stack_trace;
}
}

View file

@ -254,6 +254,8 @@ public:
// NOTE: This is meant as a temporary stopgap until everything is bytecode.
ThrowCompletionOr<Value> execute_ast_node(ASTNode const&);
Vector<StackTraceElement> stack_trace() const;
private:
using ErrorMessages = AK::Array<String, to_underlying(ErrorMessage::__Count)>;
@ -277,6 +279,8 @@ private:
void set_well_known_symbols(WellKnownSymbols well_known_symbols) { m_well_known_symbols = move(well_known_symbols); }
Vector<FlatPtr> get_native_stack_trace() const;
HashMap<String, GCPtr<PrimitiveString>> m_string_cache;
HashMap<DeprecatedString, GCPtr<PrimitiveString>> m_deprecated_string_cache;

View file

@ -5,7 +5,7 @@ const stackSetter = stackDescriptor.set;
describe("getter - normal behavior", () => {
test("basic functionality", () => {
const stackFrames = [
/^ at .*Error \(.*\/Error\.prototype\.stack\.js:\d+:\d+\)$/,
/^ at .*Error$/,
/^ at .+\/Error\/Error\.prototype\.stack\.js:\d+:\d+$/,
/^ at test \(.+\/test-common.js:\d+:\d+\)$/,
/^ at .+\/Error\/Error\.prototype\.stack\.js:6:9$/,