[vm/ffi] Move ffi-callback related state from Thread to ObjectStore, move jit trampolines from Isolate to IsolateGroup

The ffi-callback related information on the [Thread] object is metadata
corresponding to ffi-callback-trampoline [Function] objects. There is
nothing thread or isolate specific about it.

Moving it away from [Thread] is needed because an [Isolate] can
have different [Thread] objects across its lifetime (see [0]): When
the stack of an isolate is empty, we reserve now the right to
re-cycle the [Thread]. If the isolate later runs again, it may
get a new [Thread] object.

This CL moves this information from [Thread] to the [ObjectStore]. In
addition we make the compiler be responsible for populating this
metadata - instead of doing this per-call site of
`Pointer.fromFunction()`. It will be preserved across snapshot writing
& snapshot reading (for AppJIT as well as AppAOT).

Similarly the JIT trampolines that are on Isolate aren't isolate
specific and can go to [IsolateGroup]. This simplifies doing the above
as the compiler can allocate those as well.

The effect is that [Thread] object gets smaller, GC doesn't have to
visit the 2 slots per-thread. It comes at expense of 2 more loads
when invoking the callback.

[0] https://dart-review.googlesource.com/c/sdk/+/297920

TEST=Regression test is vm/ffi{,_2}/invoke_callback_after_suspension_test

Change-Id: Ifde46a9f6e79819b5c0e359c3d3998d1d93b9b1e
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/303700
Reviewed-by: Daco Harkes <dacoharkes@google.com>
Reviewed-by: Liam Appelbe <liama@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
Commit-Queue: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Martin Kustermann 2023-05-17 11:23:28 +00:00 committed by Commit Queue
parent 4c20cedd30
commit 2d230aa0b5
30 changed files with 701 additions and 565 deletions

View file

@ -81,17 +81,6 @@ DEFINE_NATIVE_ENTRY(Ffi_pointerFromFunction, 1, 1) {
: function.EnsureHasCode());
ASSERT(!code.IsNull());
#if defined(TARGET_ARCH_IA32)
// On ia32, store the stack delta that we need to use when returning.
const intptr_t stack_return_delta =
function.FfiCSignatureReturnsStruct() && CallingConventions::kUsesRet4
? compiler::target::kWordSize
: 0;
#else
const intptr_t stack_return_delta = 0;
#endif
thread->SetFfiCallbackCode(function, code, stack_return_delta);
uword entry_point = code.EntryPoint();
// In JIT we use one more indirection:
@ -105,8 +94,9 @@ DEFINE_NATIVE_ENTRY(Ffi_pointerFromFunction, 1, 1) {
// is why we use the jit trampoline).
#if !defined(DART_PRECOMPILED_RUNTIME)
if (NativeCallbackTrampolines::Enabled()) {
entry_point = isolate->native_callback_trampolines()->TrampolineForId(
function.FfiCallbackId());
entry_point =
isolate->group()->native_callback_trampolines()->TrampolineForId(
function.FfiCallbackId());
}
#endif

View file

@ -12,11 +12,10 @@
namespace dart {
static HMODULE ntdll_module;
static decltype(
&::RtlAddGrowableFunctionTable) add_growable_function_table_func_ = nullptr;
static decltype(
&::RtlDeleteGrowableFunctionTable) delete_growable_function_table_func_ =
nullptr;
static decltype(&::RtlAddGrowableFunctionTable)
add_growable_function_table_func_ = nullptr;
static decltype(&::RtlDeleteGrowableFunctionTable)
delete_growable_function_table_func_ = nullptr;
const intptr_t kReservedUnwindingRecordsSizeBytes = 64;
intptr_t UnwindingRecordsPlatform::SizeInBytes() {

View file

@ -6462,14 +6462,29 @@ class ProgramDeserializationRoots : public DeserializationRoots {
}
d->heap()->old_space()->EvaluateAfterLoading();
const Array& units =
Array::Handle(isolate_group->object_store()->loading_units());
auto object_store = isolate_group->object_store();
const Array& units = Array::Handle(object_store->loading_units());
if (!units.IsNull()) {
LoadingUnit& unit = LoadingUnit::Handle();
unit ^= units.At(LoadingUnit::kRootId);
unit.set_base_objects(refs);
}
#if !defined(DART_PRECOMPILED_RUNTIME)
// The JIT trampolines are allocated at runtime and not part of the
// snapshot. So if the snapshot contains ffi-callback-trampolines we'll
// have to allocate corresponding JIT trampolines.
auto* const tramps = isolate_group->native_callback_trampolines();
const auto& ffi_callback_code =
GrowableObjectArray::Handle(object_store->ffi_callback_code());
if (!ffi_callback_code.IsNull()) {
const intptr_t len = ffi_callback_code.Length();
while (tramps->next_callback_id() < len) {
tramps->AllocateTrampoline();
}
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
// Setup native resolver for bootstrap impl.
Bootstrap::SetupNativeResolver();
}

View file

@ -30,6 +30,7 @@
#include "vm/compiler/compiler_pass.h"
#include "vm/compiler/compiler_state.h"
#include "vm/compiler/compiler_timings.h"
#include "vm/compiler/ffi/callback.h"
#include "vm/compiler/frontend/flow_graph_builder.h"
#include "vm/compiler/frontend/kernel_to_il.h"
#include "vm/compiler/jit/compiler.h"
@ -3433,6 +3434,11 @@ void PrecompileParsedFunctionHelper::FinalizeCompilation(
function.set_unoptimized_code(code);
function.AttachCode(code);
}
if (function.IsFfiTrampoline() &&
function.FfiCallbackTarget() != Function::null()) {
compiler::ffi::SetFfiCallbackCode(thread(), function, code);
}
}
// Generate allocation stubs referenced by AllocateObject instructions.

View file

@ -1748,7 +1748,11 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// handles.
// Load the code object.
__ LoadFromOffset(R0, THR, compiler::target::Thread::callback_code_offset());
__ LoadFromOffset(R0, THR, compiler::target::Thread::isolate_group_offset());
__ LoadFromOffset(R0, R0,
compiler::target::IsolateGroup::object_store_offset());
__ LoadFromOffset(R0, R0,
compiler::target::ObjectStore::ffi_callback_code_offset());
__ LoadFieldFromOffset(R0, R0,
compiler::target::GrowableObjectArray::data_offset());
__ LoadFieldFromOffset(CODE_REG, R0,

View file

@ -1635,7 +1635,11 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// handles.
// Load the code object.
__ LoadFromOffset(R0, THR, compiler::target::Thread::callback_code_offset());
__ LoadFromOffset(R0, THR, compiler::target::Thread::isolate_group_offset());
__ LoadFromOffset(R0, R0,
compiler::target::IsolateGroup::object_store_offset());
__ LoadFromOffset(R0, R0,
compiler::target::ObjectStore::ffi_callback_code_offset());
__ LoadCompressedFieldFromOffset(
R0, R0, compiler::target::GrowableObjectArray::data_offset());
__ LoadCompressedFieldFromOffset(

View file

@ -1288,7 +1288,12 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Load the code object.
__ movl(EAX, compiler::Address(
THR, compiler::target::Thread::callback_code_offset()));
THR, compiler::target::Thread::isolate_group_offset()));
__ movl(EAX, compiler::Address(
EAX, compiler::target::IsolateGroup::object_store_offset()));
__ movl(EAX,
compiler::Address(
EAX, compiler::target::ObjectStore::ffi_callback_code_offset()));
__ movl(EAX, compiler::FieldAddress(
EAX, compiler::target::GrowableObjectArray::data_offset()));
__ movl(CODE_REG, compiler::FieldAddress(

View file

@ -1803,7 +1803,11 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// handles.
// Load the code object.
__ LoadFromOffset(A0, THR, compiler::target::Thread::callback_code_offset());
__ LoadFromOffset(A0, THR, compiler::target::Thread::isolate_group_offset());
__ LoadFromOffset(A0, A0,
compiler::target::IsolateGroup::object_store_offset());
__ LoadFromOffset(A0, A0,
compiler::target::ObjectStore::ffi_callback_code_offset());
__ LoadCompressedFieldFromOffset(
A0, A0, compiler::target::GrowableObjectArray::data_offset());
__ LoadCompressedFieldFromOffset(

View file

@ -1537,7 +1537,12 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Load the code object.
__ movq(RAX, compiler::Address(
THR, compiler::target::Thread::callback_code_offset()));
THR, compiler::target::Thread::isolate_group_offset()));
__ movq(RAX, compiler::Address(
RAX, compiler::target::IsolateGroup::object_store_offset()));
__ movq(RAX,
compiler::Address(
RAX, compiler::target::ObjectStore::ffi_callback_code_offset()));
__ LoadCompressed(
RAX, compiler::FieldAddress(
RAX, compiler::target::GrowableObjectArray::data_offset()));

View file

@ -103,6 +103,95 @@ FunctionPtr NativeCallbackFunction(const FunctionType& c_signature,
return function.ptr();
}
static void EnsureFfiCallbackMetadata(Thread* thread, intptr_t callback_id) {
static constexpr intptr_t kInitialCallbackIdsReserved = 16;
auto object_store = thread->isolate_group()->object_store();
auto zone = thread->zone();
auto& code_array =
GrowableObjectArray::Handle(zone, object_store->ffi_callback_code());
if (code_array.IsNull()) {
code_array =
GrowableObjectArray::New(kInitialCallbackIdsReserved, Heap::kOld);
object_store->set_ffi_callback_code(code_array);
}
#if defined(TARGET_ARCH_IA32)
auto& stack_array =
TypedData::Handle(zone, object_store->ffi_callback_stack_return());
if (stack_array.IsNull()) {
stack_array = TypedData::New(kTypedDataInt8ArrayCid,
kInitialCallbackIdsReserved, Heap::kOld);
object_store->set_ffi_callback_stack_return(stack_array);
}
ASSERT(code_array.Length() <= stack_array.Length());
#endif
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(DART_PRECOMPILER)
auto* const tramps = thread->isolate_group()->native_callback_trampolines();
ASSERT(code_array.Length() == tramps->next_callback_id());
#endif
if (code_array.Length() <= callback_id) {
// Ensure we've enough space in the arrays.
while (!(callback_id < code_array.Length())) {
code_array.Add(Code::null_object());
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(DART_PRECOMPILER)
tramps->AllocateTrampoline();
#endif
}
#if defined(TARGET_ARCH_IA32)
if (callback_id >= stack_array.Length()) {
const int32_t capacity = stack_array.Length();
if (callback_id >= capacity) {
// Ensure both that we grow enough and an exponential growth strategy.
const int32_t new_capacity =
Utils::Maximum(callback_id + 1, capacity * 2);
stack_array = TypedData::Grow(stack_array, new_capacity);
object_store->set_ffi_callback_stack_return(stack_array);
}
}
#endif // defined(TARGET_ARCH_IA32)
}
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(DART_PRECOMPILER)
// Verify invariants of the 3 arrays (still) hold.
ASSERT(code_array.Length() == tramps->next_callback_id());
#if defined(TARGET_ARCH_IA32)
ASSERT(code_array.Length() <= stack_array.Length());
#endif
#endif
ASSERT(callback_id < code_array.Length());
}
void SetFfiCallbackCode(Thread* thread,
const Function& ffi_trampoline,
const Code& code) {
auto zone = thread->zone();
const intptr_t callback_id = ffi_trampoline.FfiCallbackId();
EnsureFfiCallbackMetadata(thread, callback_id);
auto object_store = thread->isolate_group()->object_store();
const auto& code_array =
GrowableObjectArray::Handle(zone, object_store->ffi_callback_code());
code_array.SetAt(callback_id, code);
#if defined(TARGET_ARCH_IA32)
// On ia32, store the stack delta that we need to use when returning.
const intptr_t stack_return_delta =
ffi_trampoline.FfiCSignatureReturnsStruct() &&
CallingConventions::kUsesRet4
? compiler::target::kWordSize
: 0;
const auto& stack_delta_array =
TypedData::Handle(zone, object_store->ffi_callback_stack_return());
stack_delta_array.SetUint8(callback_id, stack_return_delta);
#endif // defined(TARGET_ARCH_IA32)
}
} // namespace ffi
} // namespace compiler

View file

@ -23,6 +23,14 @@ FunctionPtr NativeCallbackFunction(const FunctionType& c_signature,
const Function& dart_target,
const Instance& exceptional_return);
// Builds a mapping from `callback-id` to code object / ...
//
// This mapping is used when a ffi trampoline function is invoked in order to
// find it's corresponding [Code] object as well as other metadata.
void SetFfiCallbackCode(Thread* thread,
const Function& ffi_trampoline,
const Code& code);
} // namespace ffi
} // namespace compiler

View file

@ -22,6 +22,7 @@
#include "vm/compiler/cha.h"
#include "vm/compiler/compiler_pass.h"
#include "vm/compiler/compiler_state.h"
#include "vm/compiler/ffi/callback.h"
#include "vm/compiler/frontend/flow_graph_builder.h"
#include "vm/compiler/frontend/kernel_to_il.h"
#include "vm/compiler/jit/jit_call_specializer.h"
@ -469,6 +470,12 @@ CodePtr CompileParsedFunctionHelper::FinalizeCompilation(
function.SetUsageCounter(0);
}
}
if (function.IsFfiTrampoline() &&
function.FfiCallbackTarget() != Function::null()) {
compiler::ffi::SetFfiCallbackCode(thread(), function, code);
}
return code.ptr();
}

View file

@ -1206,8 +1206,6 @@ class Thread : public AllStatic {
static word saved_stack_limit_offset();
static word unboxed_runtime_arg_offset();
static word callback_code_offset();
static word callback_stack_return_offset();
static word tsan_utils_offset();
static word jump_to_frame_entry_point_offset();
@ -1319,6 +1317,9 @@ class ObjectStore : public AllStatic {
static word string_type_offset();
static word type_type_offset();
static word ffi_callback_code_offset();
static word ffi_callback_stack_return_offset();
static word suspend_state_await_offset();
static word suspend_state_await_with_type_check_offset();
static word suspend_state_handle_exception_offset();

File diff suppressed because it is too large Load diff

View file

@ -195,6 +195,8 @@
FIELD(ObjectStore, record_field_names_offset) \
FIELD(ObjectStore, string_type_offset) \
FIELD(ObjectStore, type_type_offset) \
FIELD(ObjectStore, ffi_callback_code_offset) \
FIELD(ObjectStore, ffi_callback_stack_return_offset) \
FIELD(ObjectStore, suspend_state_await_offset) \
FIELD(ObjectStore, suspend_state_await_with_type_check_offset) \
FIELD(ObjectStore, suspend_state_handle_exception_offset) \
@ -340,8 +342,6 @@
FIELD(Thread, write_barrier_entry_point_offset) \
FIELD(Thread, write_barrier_mask_offset) \
COMPRESSED_ONLY(FIELD(Thread, heap_base_offset)) \
FIELD(Thread, callback_code_offset) \
FIELD(Thread, callback_stack_return_offset) \
FIELD(Thread, next_task_id_offset) \
FIELD(Thread, random_offset) \
FIELD(Thread, jump_to_frame_entry_point_offset) \

View file

@ -107,34 +107,34 @@ class StubCodeCompiler {
#if defined(TARGET_ARCH_X64)
static constexpr intptr_t kNativeCallbackTrampolineSize = 10;
#if defined(DART_COMPRESSED_POINTERS)
static constexpr intptr_t kNativeCallbackSharedStubSize = 225;
static constexpr intptr_t kNativeCallbackSharedStubSize = 236;
#else
static constexpr intptr_t kNativeCallbackSharedStubSize = 217;
static constexpr intptr_t kNativeCallbackSharedStubSize = 228;
#endif
static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 2;
#elif defined(TARGET_ARCH_IA32)
static constexpr intptr_t kNativeCallbackTrampolineSize = 10;
static constexpr intptr_t kNativeCallbackSharedStubSize = 134;
static constexpr intptr_t kNativeCallbackSharedStubSize = 152;
static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 4;
#elif defined(TARGET_ARCH_ARM)
static constexpr intptr_t kNativeCallbackTrampolineSize = 12;
static constexpr intptr_t kNativeCallbackSharedStubSize = 140;
static constexpr intptr_t kNativeCallbackSharedStubSize = 148;
static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 4;
#elif defined(TARGET_ARCH_ARM64)
static constexpr intptr_t kNativeCallbackTrampolineSize = 12;
#if defined(DART_COMPRESSED_POINTERS)
static constexpr intptr_t kNativeCallbackSharedStubSize = 260;
static constexpr intptr_t kNativeCallbackSharedStubSize = 268;
#else
static constexpr intptr_t kNativeCallbackSharedStubSize = 236;
static constexpr intptr_t kNativeCallbackSharedStubSize = 244;
#endif
static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 2;
#elif defined(TARGET_ARCH_RISCV32)
static constexpr intptr_t kNativeCallbackTrampolineSize = 8;
static constexpr intptr_t kNativeCallbackSharedStubSize = 230;
static constexpr intptr_t kNativeCallbackSharedStubSize = 238;
static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 2;
#elif defined(TARGET_ARCH_RISCV64)
static constexpr intptr_t kNativeCallbackTrampolineSize = 8;
static constexpr intptr_t kNativeCallbackSharedStubSize = 202;
static constexpr intptr_t kNativeCallbackSharedStubSize = 210;
static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 2;
#else
#error What architecture?

View file

@ -450,7 +450,11 @@ void StubCodeCompiler::GenerateJITCallbackTrampolines(
COMPILE_ASSERT(!IsArgumentRegister(R8));
// Load the code object.
__ LoadFromOffset(R5, THR, compiler::target::Thread::callback_code_offset());
__ LoadFromOffset(R5, THR, compiler::target::Thread::isolate_group_offset());
__ LoadFromOffset(R5, R5,
compiler::target::IsolateGroup::object_store_offset());
__ LoadFromOffset(R5, R5,
compiler::target::ObjectStore::ffi_callback_code_offset());
__ LoadFieldFromOffset(R5, R5,
compiler::target::GrowableObjectArray::data_offset());
__ ldr(R5, __ ElementAddressForRegIndex(

View file

@ -517,7 +517,11 @@ void StubCodeCompiler::GenerateJITCallbackTrampolines(
COMPILE_ASSERT(!IsCalleeSavedRegister(R10) && !IsArgumentRegister(R10));
// Load the code object.
__ LoadFromOffset(R10, THR, compiler::target::Thread::callback_code_offset());
__ LoadFromOffset(R10, THR, compiler::target::Thread::isolate_group_offset());
__ LoadFromOffset(R10, R10,
compiler::target::IsolateGroup::object_store_offset());
__ LoadFromOffset(R10, R10,
compiler::target::ObjectStore::ffi_callback_code_offset());
#if defined(DART_COMPRESSED_POINTERS)
// Partially setup HEAP_BITS for LoadCompressed[FieldFromOffset].
ASSERT(IsAbiPreservedRegister(HEAP_BITS)); // Need to save and restore.

View file

@ -264,7 +264,12 @@ void StubCodeCompiler::GenerateJITCallbackTrampolines(
// Load the target from the thread.
__ movl(ECX, compiler::Address(
THR, compiler::target::Thread::callback_code_offset()));
THR, compiler::target::Thread::isolate_group_offset()));
__ movl(ECX, compiler::Address(
ECX, compiler::target::IsolateGroup::object_store_offset()));
__ movl(ECX,
compiler::Address(
ECX, compiler::target::ObjectStore::ffi_callback_code_offset()));
__ movl(ECX, compiler::FieldAddress(
ECX, compiler::target::GrowableObjectArray::data_offset()));
__ movl(ECX, __ ElementAddressForRegIndex(
@ -293,9 +298,15 @@ void StubCodeCompiler::GenerateJITCallbackTrampolines(
// - ECX free
// Load the return stack delta from the thread.
__ movl(ECX,
compiler::Address(
THR, compiler::target::Thread::callback_stack_return_offset()));
__ movl(ECX, compiler::Address(
THR, compiler::target::Thread::isolate_group_offset()));
__ movl(ECX, compiler::Address(
ECX, compiler::target::IsolateGroup::object_store_offset()));
__ movl(
ECX,
compiler::Address(
ECX,
compiler::target::ObjectStore::ffi_callback_stack_return_offset()));
__ popl(EBX); // Compiler callback id.
__ movzxb(EBX, __ ElementAddressForRegIndex(
/*external=*/false,

View file

@ -375,7 +375,11 @@ void StubCodeCompiler::GenerateJITCallbackTrampolines(
COMPILE_ASSERT(!IsCalleeSavedRegister(T3) && !IsArgumentRegister(T3));
// Load the code object.
__ LoadFromOffset(T2, THR, compiler::target::Thread::callback_code_offset());
__ LoadFromOffset(T2, THR, compiler::target::Thread::isolate_group_offset());
__ LoadFromOffset(T2, T2,
compiler::target::IsolateGroup::object_store_offset());
__ LoadFromOffset(T2, T2,
compiler::target::ObjectStore::ffi_callback_code_offset());
__ LoadCompressedFieldFromOffset(
T2, T2, compiler::target::GrowableObjectArray::data_offset());
__ LoadCompressed(

View file

@ -461,7 +461,12 @@ void StubCodeCompiler::GenerateJITCallbackTrampolines(
// Load the target from the thread.
__ movq(TMP, compiler::Address(
THR, compiler::target::Thread::callback_code_offset()));
THR, compiler::target::Thread::isolate_group_offset()));
__ movq(TMP, compiler::Address(
TMP, compiler::target::IsolateGroup::object_store_offset()));
__ movq(TMP,
compiler::Address(
TMP, compiler::target::ObjectStore::ffi_callback_code_offset()));
__ LoadCompressed(
TMP, compiler::FieldAddress(
TMP, compiler::target::GrowableObjectArray::data_offset()));

View file

@ -70,7 +70,6 @@ void NativeCallbackTrampolines::AllocateTrampoline() {
#if !defined(PRODUCT)
const char* name = "FfiJitCallbackTrampolines";
ASSERT(!Thread::Current()->OwnsSafepoint());
if (CodeObservers::AreActive()) {
const auto& comments = CreateCommentsFrom(&assembler);
CodeObservers::NotifyAll(name,

View file

@ -335,6 +335,9 @@ IsolateGroup::IsolateGroup(std::shared_ptr<IsolateGroupSource> source,
start_time_micros_(OS::GetCurrentMonotonicMicros()),
is_system_isolate_group_(source->flags.is_system_isolate),
random_(),
#if !defined(DART_PRECOMPILED_RUNTIME)
native_callback_trampolines_(),
#endif
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
last_reload_timestamp_(OS::GetCurrentTimeMillis()),
reload_every_n_stack_overflow_checks_(FLAG_reload_every),
@ -1570,9 +1573,6 @@ Isolate::Isolate(IsolateGroup* isolate_group,
finalizers_(GrowableObjectArray::null()),
isolate_group_(isolate_group),
isolate_object_store_(new IsolateObjectStore()),
#if !defined(DART_PRECOMPILED_RUNTIME)
native_callback_trampolines_(),
#endif
isolate_flags_(0),
#if !defined(PRODUCT)
last_resume_timestamp_(OS::GetCurrentTimeMillis()),

View file

@ -420,6 +420,12 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
bool is_system_isolate_group() const { return is_system_isolate_group_; }
#if !defined(DART_PRECOMPILED_RUNTIME)
NativeCallbackTrampolines* native_callback_trampolines() {
return &native_callback_trampolines_;
}
#endif
// IsolateGroup-specific flag handling.
static void FlagsInitialize(Dart_IsolateFlags* api_flags);
void FlagsCopyTo(Dart_IsolateFlags* api_flags);
@ -831,6 +837,10 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
bool is_system_isolate_group_;
Random random_;
#if !defined(DART_PRECOMPILED_RUNTIME)
NativeCallbackTrampolines native_callback_trampolines_;
#endif
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
int64_t last_reload_timestamp_;
std::shared_ptr<IsolateGroupReloadContext> group_reload_context_;
@ -1059,12 +1069,6 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
return OFFSET_OF(Isolate, finalizers_);
}
#if !defined(DART_PRECOMPILED_RUNTIME)
NativeCallbackTrampolines* native_callback_trampolines() {
return &native_callback_trampolines_;
}
#endif
Dart_EnvironmentCallback environment_callback() const {
return environment_callback_;
}
@ -1543,10 +1547,6 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
IdleTimeHandler idle_time_handler_;
std::unique_ptr<IsolateObjectStore> isolate_object_store_;
#if !defined(DART_PRECOMPILED_RUNTIME)
NativeCallbackTrampolines native_callback_trampolines_;
#endif
#define ISOLATE_FLAG_BITS(V) \
V(ErrorsFatal) \
V(IsRunnable) \

View file

@ -205,6 +205,8 @@ class ObjectPointerVisitor;
RW(ObjectPool, global_object_pool) \
RW(Array, unique_dynamic_targets) \
RW(GrowableObjectArray, megamorphic_cache_table) \
RW(GrowableObjectArray, ffi_callback_code) \
RW(TypedData, ffi_callback_stack_return) \
RW(Code, build_generic_method_extractor_code) \
RW(Code, build_nongeneric_method_extractor_code) \
RW(Code, dispatch_table_null_error_stub) \

View file

@ -3844,6 +3844,41 @@ DEFINE_RAW_LEAF_RUNTIME_ENTRY(ExitSafepointIgnoreUnwindInProgress,
false,
&DFLRT_ExitSafepointIgnoreUnwindInProgress);
// Ensure that 'callback_id' refers to a valid callback.
//
// If "entry != 0", additionally checks that entry is inside the instructions
// of this callback.
//
// Aborts if any of these conditions fails.
static void VerifyCallbackIdMetadata(Thread* thread,
int32_t callback_id,
uword entry) {
NoSafepointScope _;
const GrowableObjectArrayPtr array =
thread->isolate_group()->object_store()->ffi_callback_code();
if (array == GrowableObjectArray::null()) {
FATAL("Cannot invoke callback on incorrect isolate.");
}
const SmiPtr length_smi = GrowableObjectArray::NoSafepointLength(array);
const intptr_t length = Smi::Value(length_smi);
if (callback_id < 0 || callback_id >= length) {
FATAL("Cannot invoke callback on incorrect isolate.");
}
if (entry != 0) {
CompressedObjectPtr* const code_array =
Array::DataOf(GrowableObjectArray::NoSafepointData(array));
const CodePtr code =
Code::RawCast(code_array[callback_id].Decompress(array.heap_base()));
if (!Code::ContainsInstructionAt(code, entry)) {
FATAL("Cannot invoke callback on incorrect isolate.");
}
}
}
// Not registered as a runtime entry because we can't use Thread to look it up.
static Thread* GetThreadForNativeCallback(uword callback_id,
uword return_address) {
@ -3867,7 +3902,7 @@ static Thread* GetThreadForNativeCallback(uword callback_id,
thread->set_execution_state(Thread::kThreadInVM);
thread->ExitSafepoint();
thread->VerifyCallbackIsolate(callback_id, return_address);
VerifyCallbackIdMetadata(thread, callback_id, return_address);
return thread;
}

View file

@ -72,8 +72,6 @@ Thread::Thread(bool is_vm_isolate)
resume_pc_(0),
execution_state_(kThreadInNative),
safepoint_state_(0),
ffi_callback_code_(GrowableObjectArray::null()),
ffi_callback_stack_return_(TypedData::null()),
api_top_scope_(nullptr),
double_truncate_round_supported_(
TargetCPUFeatures::double_truncate_round_supported() ? 1 : 0),
@ -346,8 +344,6 @@ void Thread::AssertEmptyThreadInvariants() {
if (active_stacktrace_.untag() != 0) {
ASSERT(field_table_values_ == nullptr);
ASSERT(global_object_pool_ == Object::null());
ASSERT(ffi_callback_code_ == Object::null());
ASSERT(ffi_callback_stack_return_ == Object::null());
#define CHECK_REUSABLE_HANDLE(object) ASSERT(object##_handle_->IsNull());
REUSABLE_HANDLE_LIST(CHECK_REUSABLE_HANDLE)
#undef CHECK_REUSABLE_HANDLE
@ -888,9 +884,6 @@ void Thread::VisitObjectPointers(ObjectPointerVisitor* visitor,
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&active_exception_));
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&active_stacktrace_));
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&sticky_error_));
visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&ffi_callback_code_));
visitor->VisitPointer(
reinterpret_cast<ObjectPtr*>(&ffi_callback_stack_return_));
// Visit the api local scope as it has all the api local handles.
ApiLocalScope* scope = api_top_scope_;
@ -1378,8 +1371,6 @@ void Thread::ResetDartMutatorState(Isolate* isolate) {
is_unwind_in_progress_ = false;
field_table_values_ = nullptr;
ffi_callback_code_ = GrowableObjectArray::null();
ffi_callback_stack_return_ = TypedData::null();
ONLY_IN_PRECOMPILED(global_object_pool_ = ObjectPool::null());
ONLY_IN_PRECOMPILED(dispatch_table_array_ = nullptr);
}
@ -1433,111 +1424,4 @@ NoReloadScope::~NoReloadScope() {
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
}
void Thread::EnsureFfiCallbackMetadata(intptr_t callback_id) {
static constexpr intptr_t kInitialCallbackIdsReserved = 16;
if (ffi_callback_code_ == GrowableObjectArray::null()) {
ffi_callback_code_ = GrowableObjectArray::New(kInitialCallbackIdsReserved);
}
#if defined(TARGET_ARCH_IA32)
if (ffi_callback_stack_return_ == TypedData::null()) {
ffi_callback_stack_return_ = TypedData::New(
kTypedDataInt8ArrayCid, kInitialCallbackIdsReserved, Heap::kOld);
}
#endif // defined(TARGET_ARCH_IA32)
const auto& code_array =
GrowableObjectArray::Handle(zone(), ffi_callback_code_);
#if !defined(DART_PRECOMPILED_RUNTIME)
auto* const tramps = isolate()->native_callback_trampolines();
#if defined(TARGET_ARCH_IA32)
auto& stack_array = TypedData::Handle(zone(), ffi_callback_stack_return_);
#endif
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
// Verify invariants of the 3 arrays hold.
ASSERT(code_array.Length() == tramps->next_callback_id());
#if defined(TARGET_ARCH_IA32)
ASSERT(code_array.Length() <= stack_array.Length());
#endif
#endif // !defined(DART_PRECOMPILED_RUNTIME)
if (code_array.Length() <= callback_id) {
// Ensure we've enough space in the 3 arrays.
while (!(callback_id < code_array.Length())) {
code_array.Add(Code::null_object());
#if !defined(DART_PRECOMPILED_RUNTIME)
tramps->AllocateTrampoline();
#endif
}
#if defined(TARGET_ARCH_IA32)
if (callback_id >= stack_array.Length()) {
const int32_t capacity = stack_array.Length();
if (callback_id >= capacity) {
// Ensure both that we grow enough and an exponential growth strategy.
const int32_t new_capacity =
Utils::Maximum(callback_id + 1, capacity * 2);
stack_array = TypedData::Grow(stack_array, new_capacity);
ffi_callback_stack_return_ = stack_array.ptr();
}
}
#endif // defined(TARGET_ARCH_IA32)
}
#if !defined(DART_PRECOMPILED_RUNTIME)
// Verify invariants of the 3 arrays (still) hold.
ASSERT(code_array.Length() == tramps->next_callback_id());
#if defined(TARGET_ARCH_IA32)
ASSERT(code_array.Length() <= stack_array.Length());
#endif
#endif
ASSERT(callback_id < code_array.Length());
}
void Thread::SetFfiCallbackCode(const Function& ffi_trampoline,
const Code& code,
intptr_t stack_return_delta) {
const intptr_t callback_id = ffi_trampoline.FfiCallbackId();
EnsureFfiCallbackMetadata(callback_id);
const auto& code_array =
GrowableObjectArray::Handle(zone(), ffi_callback_code_);
code_array.SetAt(callback_id, code);
#if defined(TARGET_ARCH_IA32)
const auto& stack_delta_array =
TypedData::Handle(zone(), ffi_callback_stack_return_);
stack_delta_array.SetUint8(callback_id, stack_return_delta);
#endif // defined(TARGET_ARCH_IA32)
}
void Thread::VerifyCallbackIsolate(int32_t callback_id, uword entry) {
NoSafepointScope _;
const GrowableObjectArrayPtr array = ffi_callback_code_;
if (array == GrowableObjectArray::null()) {
FATAL("Cannot invoke callback on incorrect isolate.");
}
const SmiPtr length_smi = GrowableObjectArray::NoSafepointLength(array);
const intptr_t length = Smi::Value(length_smi);
if (callback_id < 0 || callback_id >= length) {
FATAL("Cannot invoke callback on incorrect isolate.");
}
if (entry != 0) {
CompressedObjectPtr* const code_array =
Array::DataOf(GrowableObjectArray::NoSafepointData(array));
// RawCast allocates handles in ASSERTs.
const CodePtr code = static_cast<CodePtr>(
code_array[callback_id].Decompress(array.heap_base()));
if (!Code::ContainsInstructionAt(code, entry)) {
FATAL("Cannot invoke callback on incorrect isolate.");
}
}
}
} // namespace dart

View file

@ -457,13 +457,6 @@ class Thread : public ThreadState {
return OFFSET_OF(Thread, safepoint_state_);
}
static intptr_t callback_code_offset() {
return OFFSET_OF(Thread, ffi_callback_code_);
}
static intptr_t callback_stack_return_offset() {
return OFFSET_OF(Thread, ffi_callback_stack_return_);
}
// Tag state is maintained on transitions.
enum {
@ -1097,22 +1090,6 @@ class Thread : public ThreadState {
}
}
// Store 'code' for the native callback identified by 'callback_id'.
//
// Expands the callback code array as necessary to accomodate the callback
// ID.
void SetFfiCallbackCode(const Function& ffi_trampoline,
const Code& code,
intptr_t stack_return_delta);
// Ensure that 'callback_id' refers to a valid callback in this isolate.
//
// If "entry != 0", additionally checks that entry is inside the instructions
// of this callback.
//
// Aborts if any of these conditions fails.
void VerifyCallbackIsolate(int32_t callback_id, uword entry);
Thread* next() const { return next_; }
// Visit all object pointers.
@ -1283,8 +1260,6 @@ class Thread : public ThreadState {
* [UnwindErrorInProgressField]
*/
std::atomic<uword> safepoint_state_;
GrowableObjectArrayPtr ffi_callback_code_;
TypedDataPtr ffi_callback_stack_return_;
uword exit_through_ffi_ = 0;
ApiLocalScope* api_top_scope_;
uint8_t double_truncate_round_supported_;
@ -1453,10 +1428,6 @@ class Thread : public ThreadState {
// Thread needs to be at-safepoint.
static void FreeActiveThread(Thread* thread, bool bypass_safepoint);
// Ensures that we have allocated necessary thread-local data structures for
// [callback_id].
void EnsureFfiCallbackMetadata(intptr_t callback_id);
static void SetCurrent(Thread* current) { OSThread::SetCurrentTLS(current); }
#define REUSABLE_FRIEND_DECLARATION(name) \

View file

@ -0,0 +1,39 @@
// Copyright (c) 2023, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// SharedObjects=ffi_test_functions
import 'dart:async';
import 'dart:isolate';
import 'dart:ffi';
import 'callback_tests_utils.dart';
typedef SimpleAdditionType = Int32 Function(Int32, Int32);
int simpleAddition(int x, int y) {
print("simpleAddition($x, $y)");
return x + y;
}
void main() async {
// The main isolate is very special and cannot be suspended (due to having an
// active api scope throughout it's lifetime), so we run this test in a helper
// isolate.
const int count = 50;
final futures = <Future>[];
for (int i = 0; i < count; ++i) {
futures.add(Isolate.run(() async {
// First make the callback pointer.
final callbackFunctionPointer =
Pointer.fromFunction<SimpleAdditionType>(simpleAddition, 0);
// Then cause suspenion of [Thread].
await Future.delayed(const Duration(seconds: 1));
// Then make use of callback.
CallbackTest("SimpleAddition", callbackFunctionPointer).run();
}));
}
await Future.wait(futures);
}

View file

@ -0,0 +1,41 @@
// Copyright (c) 2023, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// @dart = 2.9
// SharedObjects=ffi_test_functions
import 'dart:async';
import 'dart:isolate';
import 'dart:ffi';
import 'callback_tests_utils.dart';
typedef SimpleAdditionType = Int32 Function(Int32, Int32);
int simpleAddition(int x, int y) {
print("simpleAddition($x, $y)");
return x + y;
}
void main() async {
// The main isolate is very special and cannot be suspended (due to having an
// active api scope throughout it's lifetime), so we run this test in a helper
// isolate.
const int count = 50;
final futures = <Future>[];
for (int i = 0; i < count; ++i) {
futures.add(Isolate.run(() async {
// First make the callback pointer.
final callbackFunctionPointer =
Pointer.fromFunction<SimpleAdditionType>(simpleAddition, 0);
// Then cause suspenion of [Thread].
await Future.delayed(const Duration(seconds: 1));
// Then make use of callback.
CallbackTest("SimpleAddition", callbackFunctionPointer).run();
}));
}
await Future.wait(futures);
}