Revert "[vm/ffi] Throw on returning Error in Handle"

This reverts commit d9c442bce8.

Reason for revert: https://github.com/flutter/flutter/issues/112726

Original change's description:
> [vm/ffi] Throw on returning `Error` in `Handle`
>
> Makes `Dart_Handle` FFI returns behave as the following snippet:
>
> ```
> Dart_Handle ExampleSnippet() {
>   Dart_Handle result;
>   if (Dart_IsError(result)) {
>     Dart_PropagateError(result);
>   }
>   return result;
> }
> ```
>
> Also makes FFI consistent with Dart_NativeFunctions, which will
> automatically throw upon return if Dart_SetReturnValue set the result
> to an error.
>
> `UnhandledExceptions` cannot flow out into Dart generated code. So,
> the implementation needs to be in `FfiCallInstr::EmitNativeCode`.
>
> Using `Dart_IsError` is slow compared to a machine code class id
> check. So, we should do the handle unwrapping and class id check in
> machine code.
>
> Unwrapping Handles in machine code is only safe when the GC is
> guaranteed to not run: Either (1) in `kThreadInGenerated`, or (2) in
> `kThreadInNative`, but only when transitioned into safepoint. So, the
> handle cannot be unwrapped immediately after the FFI call in machine code. We first need to transition back to generated.
>
> This means we need to transition again to native to do the actual
> `Dart_PropagateError` call. We can do so without the stub in JIT
> because we never return with normal control flow.
>
> Performance impact of this change is within benchmark noise in both
> JIT and AOT.
> Size impact is 42 bytes on x64, which is 10% in AOT and 12% in JIT.
>
> For more numbers see: go/dart-ffi-handle-error
>
> TEST=runtime/bin/ffi_test/ffi_test_functions_vmspecific.cc
> TEST=tests/ffi/vmspecific_handle_test.dart
>
> Closes: https://github.com/dart-lang/sdk/issues/49936
> Change-Id: Ie8fabeb6d53bc80689541bc4470cb37ee2200581
> Cq-Include-Trybots: luci.dart.try:vm-canary-linux-debug-try,vm-ffi-android-debug-arm64c-try,vm-ffi-android-debug-arm-try,vm-kernel-gcc-linux-try,vm-kernel-linux-debug-x64-try,vm-kernel-linux-debug-x64c-try,vm-kernel-msvc-windows-try,vm-kernel-optcounter-threshold-linux-release-x64-try,vm-kernel-precomp-asan-linux-release-x64-try,vm-kernel-precomp-linux-debug-x64-try,vm-kernel-reload-linux-debug-x64-try,vm-kernel-reload-rollback-linux-debug-x64-try,vm-kernel-tsan-linux-release-x64-try,vm-kernel-win-debug-x64-try,vm-kernel-win-debug-ia32-try,vm-precomp-ffi-qemu-linux-release-arm-try,vm-precomp-ffi-qemu-linux-release-riscv64-try,vm-kernel-linux-debug-ia32-try,vm-kernel-mac-release-arm64-try,vm-kernel-precomp-win-debug-x64c-try
> Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/261603
> Reviewed-by: Ryan Macnak <rmacnak@google.com>
> Reviewed-by: Martin Kustermann <kustermann@google.com>
> Commit-Queue: Daco Harkes <dacoharkes@google.com>

TBR=kustermann@google.com,rmacnak@google.com,dacoharkes@google.com,dart-scoped@luci-project-accounts.iam.gserviceaccount.com

Change-Id: I94cc63de16b54db2b0a4f92759c39a1e569b8e63
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Cq-Include-Trybots: luci.dart.try:vm-canary-linux-debug-try,vm-ffi-android-debug-arm64c-try,vm-ffi-android-debug-arm-try,vm-kernel-gcc-linux-try,vm-kernel-linux-debug-x64-try,vm-kernel-linux-debug-x64c-try,vm-kernel-msvc-windows-try,vm-kernel-optcounter-threshold-linux-release-x64-try,vm-kernel-precomp-asan-linux-release-x64-try,vm-kernel-precomp-linux-debug-x64-try,vm-kernel-reload-linux-debug-x64-try,vm-kernel-reload-rollback-linux-debug-x64-try,vm-kernel-tsan-linux-release-x64-try,vm-kernel-win-debug-x64-try,vm-kernel-win-debug-ia32-try,vm-precomp-ffi-qemu-linux-release-arm-try,vm-precomp-ffi-qemu-linux-release-riscv64-try,vm-kernel-linux-debug-ia32-try,vm-kernel-mac-release-arm64-try,vm-kernel-precomp-win-debug-x64c-try
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/262270
Reviewed-by: Zach Anderson <zra@google.com>
Reviewed-by: Liam Appelbe <liama@google.com>
Commit-Queue: Zach Anderson <zra@google.com>
This commit is contained in:
Zach Anderson 2022-09-30 21:51:59 +00:00 committed by Commit Queue
parent cb45aa8a93
commit 2742d90033
19 changed files with 876 additions and 1092 deletions

View file

@ -1290,7 +1290,8 @@ class Handlex20 extends FfiBenchmarkBase {
//
// Main driver.
//
void main(List<String> args) {
void main() {
final benchmarks = [
() => Uint8x01(),
() => Uint8x01(isLeaf: true),
@ -1337,12 +1338,7 @@ void main(List<String> args) {
() => Handlex10(),
() => Handlex20(),
];
final filter = args.length == 1 ? args[0] : null;
for (final benchmark in benchmarks) {
final b = benchmark();
final run = (filter != null) ? b.name.contains(filter) : true;
if (run) {
b.report();
}
benchmark().report();
}
}

View file

@ -1279,7 +1279,8 @@ class Handlex20 extends FfiBenchmarkBase {
//
// Main driver.
//
void main(List<String> args) {
void main() {
final benchmarks = [
() => Uint8x01(),
() => Uint8x01(isLeaf: true),
@ -1326,12 +1327,7 @@ void main(List<String> args) {
() => Handlex10(),
() => Handlex20(),
];
final filter = args.length == 1 ? args[0] : null;
for (final benchmark in benchmarks) {
final b = benchmark();
final run = (filter != null) ? b.name.contains(filter) : true;
if (run) {
b.report();
}
benchmark().report();
}
}

View file

@ -1065,13 +1065,6 @@ DART_EXPORT int64_t PropagateErrorWithoutHandle(Dart_Handle (*callback)()) {
return 0;
}
DART_EXPORT Dart_Handle ThrowOnReturnOfError(Dart_Handle (*callback)()) {
Dart_Handle result = callback();
const bool is_error = Dart_IsError(result);
printf("ThrowOnReturnOfError is_error %s\n", is_error ? "true" : "false");
return result;
}
DART_EXPORT Dart_Handle TrueHandle() {
return Dart_True();
}

View file

@ -304,20 +304,15 @@ inline bool IsInternalOnlyClassId(intptr_t index) {
return index <= kLastInternalOnlyCid;
}
// Make sure this function is updated when new Error types are added.
static const ClassId kFirstErrorCid = kErrorCid;
static const ClassId kLastErrorCid = kUnwindErrorCid;
COMPILE_ASSERT(kFirstErrorCid == kErrorCid &&
kApiErrorCid == kFirstErrorCid + 1 &&
kLanguageErrorCid == kFirstErrorCid + 2 &&
kUnhandledExceptionCid == kFirstErrorCid + 3 &&
kUnwindErrorCid == kFirstErrorCid + 4 &&
kLastErrorCid == kUnwindErrorCid &&
// Change if needed for detecting a new error added at the end.
kLastInternalOnlyCid == kLastErrorCid);
inline bool IsErrorClassId(intptr_t index) {
return (index >= kFirstErrorCid && index <= kLastErrorCid);
// Make sure this function is updated when new Error types are added.
COMPILE_ASSERT(kApiErrorCid == kErrorCid + 1 &&
kLanguageErrorCid == kErrorCid + 2 &&
kUnhandledExceptionCid == kErrorCid + 3 &&
kUnwindErrorCid == kErrorCid + 4 &&
// Change if needed for detecting a new error added at the end.
kLastInternalOnlyCid == kUnwindErrorCid);
return (index >= kErrorCid && index <= kUnwindErrorCid);
}
inline bool IsNumberClassId(intptr_t index) {

View file

@ -7581,7 +7581,7 @@ ASSEMBLER_TEST_RUN(StoreReleaseLoadAcquire1024, test) {
static void RangeCheck(Assembler* assembler, Register value, Register temp) {
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ LoadImmediate(return_reg, Immediate(0));
__ Ret();
@ -7631,7 +7631,7 @@ ASSEMBLER_TEST_GENERATE(RangeCheckWithTempReturnValue, assembler) {
const Register temp = CallingConventions::ArgumentRegisters[1];
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ Bind(&in_range);
__ mov(return_reg, value);

View file

@ -3852,7 +3852,7 @@ ASSEMBLER_TEST_GENERATE(StoreIntoObject, assembler) {
static void RangeCheck(Assembler* assembler, Register value, Register temp) {
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ LoadImmediate(return_reg, Immediate(0));
__ Ret();
@ -3902,7 +3902,7 @@ ASSEMBLER_TEST_GENERATE(RangeCheckWithTempReturnValue, assembler) {
const Register temp = CallingConventions::ArgumentRegisters[1];
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ Bind(&in_range);
__ mov(return_reg, Operand(value));

View file

@ -5010,7 +5010,7 @@ IMMEDIATE_TEST(AddrImmEAXByte,
static void RangeCheck(Assembler* assembler, Register value, Register temp) {
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ movl(return_reg, Immediate(0));
__ ret();
@ -5063,7 +5063,7 @@ ASSEMBLER_TEST_GENERATE(RangeCheckWithTempReturnValue, assembler) {
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ movl(value, compiler::Address(ESP, 4));
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ Bind(&in_range);
__ movl(return_reg, value);

View file

@ -6648,7 +6648,7 @@ TEST_ENCODING(intptr_t, CI4SPNImm)
static void RangeCheck(Assembler* assembler, Register value, Register temp) {
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ LoadImmediate(return_reg, 0);
__ Ret();
@ -6698,7 +6698,7 @@ ASSEMBLER_TEST_GENERATE(RangeCheckWithTempReturnValue, assembler) {
const Register temp = CallingConventions::ArgumentRegisters[1];
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ Bind(&in_range);
__ MoveRegister(return_reg, value);

View file

@ -6318,7 +6318,7 @@ ASSEMBLER_TEST_RUN(MoveByteRunTest, test) {
static void RangeCheck(Assembler* assembler, Register value, Register temp) {
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ movq(return_reg, Immediate(0));
__ ret();
@ -6368,7 +6368,7 @@ ASSEMBLER_TEST_GENERATE(RangeCheckWithTempReturnValue, assembler) {
const Register temp = CallingConventions::kArg2Reg;
const Register return_reg = CallingConventions::kReturnReg;
Label in_range;
__ RangeCheck(value, temp, kFirstErrorCid, kLastErrorCid,
__ RangeCheck(value, temp, kErrorCid, kUnwindErrorCid,
AssemblerBase::kIfInRange, &in_range);
__ Bind(&in_range);
__ movq(return_reg, value);

View file

@ -1528,37 +1528,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ blx(temp1);
}
if (marshaller_.IsHandle(compiler::ffi::kResultIndex)) {
__ Comment("Check Dart_Handle for Error.");
compiler::Label not_error;
ASSERT(temp1 != CallingConventions::kReturnReg);
ASSERT(saved_fp_or_sp != CallingConventions::kReturnReg);
__ ldr(temp1,
compiler::Address(CallingConventions::kReturnReg,
compiler::target::LocalHandle::ptr_offset()));
__ LoadClassId(temp1, temp1);
__ RangeCheck(temp1, saved_fp_or_sp, kFirstErrorCid, kLastErrorCid,
compiler::AssemblerBase::kIfNotInRange, &not_error);
// Slow path, use the stub to propagate error, to save on code-size.
__ Comment("Slow path: call Dart_PropagateError through stub.");
ASSERT(CallingConventions::ArgumentRegisters[0] ==
CallingConventions::kReturnReg);
__ ldr(temp1,
compiler::Address(
THR, compiler::target::Thread::
call_native_through_safepoint_entry_point_offset()));
__ ldr(branch, compiler::Address(
THR, kPropagateErrorRuntimeEntry.OffsetFromThread()));
__ blx(temp1);
#if defined(DEBUG)
// We should never return with normal controlflow from this.
__ bkpt(0);
#endif
__ Bind(&not_error);
}
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode) {

View file

@ -1405,35 +1405,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ blr(temp1);
}
if (marshaller_.IsHandle(compiler::ffi::kResultIndex)) {
__ Comment("Check Dart_Handle for Error.");
compiler::Label not_error;
__ ldr(temp1,
compiler::Address(CallingConventions::kReturnReg,
compiler::target::LocalHandle::ptr_offset()));
__ LoadClassId(temp1, temp1);
__ RangeCheck(temp1, temp2, kFirstErrorCid, kLastErrorCid,
compiler::AssemblerBase::kIfNotInRange, &not_error);
// Slow path, use the stub to propagate error, to save on code-size.
__ Comment("Slow path: call Dart_PropagateError through stub.");
ASSERT(CallingConventions::ArgumentRegisters[0] ==
CallingConventions::kReturnReg);
__ ldr(temp1,
compiler::Address(
THR, compiler::target::Thread::
call_native_through_safepoint_entry_point_offset()));
__ ldr(branch, compiler::Address(
THR, kPropagateErrorRuntimeEntry.OffsetFromThread()));
__ blr(temp1);
#if defined(DEBUG)
// We should never return with normal controlflow from this.
__ brk(0);
#endif
__ Bind(&not_error);
}
// Refresh pinned registers values (inc. write barrier mask and null
// object).
__ RestorePinnedRegisters();
@ -3693,8 +3664,8 @@ LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
object_store->allocate_mint_without_fpu_regs_stub()
->untag()
->InVMIsolateHeap();
const bool shared_slow_path_call =
SlowPathSharingSupported(opt) && !stubs_in_vm_isolate;
const bool shared_slow_path_call = SlowPathSharingSupported(opt) &&
!stubs_in_vm_isolate;
LocationSummary* summary = new (zone) LocationSummary(
zone, kNumInputs, kNumTemps,
ValueFitsSmi() ? LocationSummary::kNoCall

View file

@ -1117,34 +1117,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Calls EAX within a safepoint and clobbers EBX.
ASSERT(branch == EAX);
__ call(temp);
if (marshaller_.IsHandle(compiler::ffi::kResultIndex)) {
__ Comment("Check Dart_Handle for Error.");
compiler::Label not_error;
__ movl(temp,
compiler::Address(CallingConventions::kReturnReg,
compiler::target::LocalHandle::ptr_offset()));
__ LoadClassId(temp, temp);
__ RangeCheck(temp, kNoRegister, kFirstErrorCid, kLastErrorCid,
compiler::AssemblerBase::kIfNotInRange, &not_error);
// Slow path, use the stub to propagate error, to save on code-size.
__ Comment("Slow path: call Dart_PropagateError through stub.");
__ movl(temp,
compiler::Address(
THR, compiler::target::Thread::
call_native_through_safepoint_entry_point_offset()));
__ pushl(CallingConventions::kReturnReg);
__ movl(EAX, compiler::Address(
THR, kPropagateErrorRuntimeEntry.OffsetFromThread()));
__ call(temp);
#if defined(DEBUG)
// We should never return with normal controlflow from this.
__ int3();
#endif
__ Bind(&not_error);
}
}
// Restore the stack when a struct by value is returned into memory pointed

View file

@ -1629,38 +1629,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ jalr(temp1);
}
if (marshaller_.IsHandle(compiler::ffi::kResultIndex)) {
__ Comment("Check Dart_Handle for Error.");
ASSERT(temp1 != CallingConventions::kReturnReg);
ASSERT(temp2 != CallingConventions::kReturnReg);
compiler::Label not_error;
__ LoadFromOffset(
temp1,
compiler::Address(CallingConventions::kReturnReg,
compiler::target::LocalHandle::ptr_offset()));
__ LoadClassId(temp1, temp1);
__ RangeCheck(temp1, temp2, kFirstErrorCid, kLastErrorCid,
compiler::AssemblerBase::kIfNotInRange, &not_error);
// Slow path, use the stub to propagate error, to save on code-size.
__ Comment("Slow path: call Dart_PropagateError through stub.");
ASSERT(CallingConventions::ArgumentRegisters[0] ==
CallingConventions::kReturnReg);
__ lx(temp1,
compiler::Address(
THR, compiler::target::Thread::
call_native_through_safepoint_entry_point_offset()));
__ lx(target, compiler::Address(
THR, kPropagateErrorRuntimeEntry.OffsetFromThread()));
__ jalr(temp1);
#if defined(DEBUG)
// We should never return with normal controlflow from this.
__ ebreak();
#endif
__ Bind(&not_error);
}
// Refresh pinned registers values (inc. write barrier mask and null
// object).
__ RestorePinnedRegisters();

View file

@ -1331,34 +1331,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ movq(RBX, target_address);
__ call(temp);
}
if (marshaller_.IsHandle(compiler::ffi::kResultIndex)) {
__ Comment("Check Dart_Handle for Error.");
compiler::Label not_error;
__ movq(temp,
compiler::Address(CallingConventions::kReturnReg,
compiler::target::LocalHandle::ptr_offset()));
__ LoadClassId(temp, temp);
__ RangeCheck(temp, kNoRegister, kFirstErrorCid, kLastErrorCid,
compiler::AssemblerBase::kIfNotInRange, &not_error);
// Slow path, use the stub to propagate error, to save on code-size.
__ Comment("Slow path: call Dart_PropagateError through stub.");
__ movq(temp,
compiler::Address(
THR, compiler::target::Thread::
call_native_through_safepoint_entry_point_offset()));
__ movq(RBX, compiler::Address(
THR, kPropagateErrorRuntimeEntry.OffsetFromThread()));
__ movq(CallingConventions::kArg1Reg, CallingConventions::kReturnReg);
__ call(temp);
#if defined(DEBUG)
// We should never return with normal controlflow from this.
__ int3();
#endif
__ Bind(&not_error);
}
}
// Pass the `saved_fp` reg. as a temp to clobber since we're done with it.

File diff suppressed because it is too large Load diff

View file

@ -3902,32 +3902,6 @@ DEFINE_RAW_LEAF_RUNTIME_ENTRY(
false /* is_float */,
reinterpret_cast<RuntimeFunction>(&DLRT_AllocateHandle));
// Enables reusing `Dart_PropagateError` from `FfiCallInstr`.
// `Dart_PropagateError` requires the native state and transitions into the VM.
// So the flow is:
// - FfiCallInstr (slow path)
// - TransitionGeneratedToNative
// - DLRT_PropagateError (this)
// - Dart_PropagateError
// - TransitionNativeToVM
// - Throw
extern "C" void DLRT_PropagateError(Dart_Handle handle) {
CHECK_STACK_ALIGNMENT;
TRACE_RUNTIME_CALL("PropagateError %p", handle);
ASSERT(Thread::Current()->execution_state() == Thread::kThreadInNative);
ASSERT(Dart_IsError(handle));
Dart_PropagateError(handle);
// We should never exit through normal control flow.
UNREACHABLE();
}
// Not a leaf-function, throws error.
DEFINE_RAW_LEAF_RUNTIME_ENTRY(
PropagateError,
1,
false /* is_float */,
reinterpret_cast<RuntimeFunction>(&DLRT_PropagateError));
#if defined(USING_MEMORY_SANITIZER)
#define MSAN_UNPOISON_RANGE reinterpret_cast<RuntimeFunction>(&__msan_unpoison)
#define MSAN_UNPOISON_PARAM \

View file

@ -107,7 +107,6 @@ namespace dart {
V(ApiLocalScope*, EnterHandleScope, Thread*) \
V(void, ExitHandleScope, Thread*) \
V(LocalHandle*, AllocateHandle, ApiLocalScope*) \
V(void, PropagateError, Dart_Handle) \
V(void, MsanUnpoison, void*, size_t) \
V(void, MsanUnpoisonParam, size_t) \
V(void, TsanLoadAcquire, void*) \

View file

@ -446,10 +446,10 @@ class Thread : public ThreadState {
enum {
// Always true in generated state.
kDidNotExit = 0,
// The VM exited the generated state through FFI.
// The VM did exit the generated state through FFI.
// This can be true in both native and VM state.
kExitThroughFfi = 1,
// The VM exited the generated state through a runtime call.
// The VM exited the generated state through FFI.
// This can be true in both native and VM state.
kExitThroughRuntimeCall = 2,
};

View file

@ -23,7 +23,6 @@ void main() {
testNull();
testDeepRecursive();
testNoHandlePropagateError();
testThrowOnReturnOfError();
}
void testHandle() {
@ -225,21 +224,6 @@ void testNoHandlePropagateError() {
bool throws = false;
try {
final result = propagateErrorWithoutHandle(exceptionHandleCallbackPointer);
print(result.runtimeType);
print(result);
} catch (e) {
throws = true;
print("caught ($e, ${e.runtimeType})");
Expect.isTrue(identical(someException, e));
}
Expect.isTrue(throws);
}
void testThrowOnReturnOfError() {
bool throws = false;
try {
final result = autoPropagateErrorInHandle(exceptionHandleCallbackPointer);
print(result.runtimeType);
print(result);
} catch (e) {
throws = true;
@ -281,8 +265,3 @@ final propagateErrorWithoutHandle = testLibrary.lookupFunction<
Int64 Function(Pointer<NativeFunction<Handle Function()>>),
int Function(Pointer<NativeFunction<Handle Function()>>)>(
"PropagateErrorWithoutHandle");
final autoPropagateErrorInHandle = testLibrary.lookupFunction<
Handle Function(Pointer<NativeFunction<Handle Function()>>),
Object Function(
Pointer<NativeFunction<Handle Function()>>)>("ThrowOnReturnOfError");